Merge branch 'master' into index-lifecycle
This commit is contained in:
commit
5783545222
|
@ -95,7 +95,7 @@ Contributing to the Elasticsearch codebase
|
||||||
JDK 10 is required to build Elasticsearch. You must have a JDK 10 installation
|
JDK 10 is required to build Elasticsearch. You must have a JDK 10 installation
|
||||||
with the environment variable `JAVA_HOME` referencing the path to Java home for
|
with the environment variable `JAVA_HOME` referencing the path to Java home for
|
||||||
your JDK 10 installation. By default, tests use the same runtime as `JAVA_HOME`.
|
your JDK 10 installation. By default, tests use the same runtime as `JAVA_HOME`.
|
||||||
However, since Elasticsearch, supports JDK 8 the build supports compiling with
|
However, since Elasticsearch supports JDK 8, the build supports compiling with
|
||||||
JDK 10 and testing on a JDK 8 runtime; to do this, set `RUNTIME_JAVA_HOME`
|
JDK 10 and testing on a JDK 8 runtime; to do this, set `RUNTIME_JAVA_HOME`
|
||||||
pointing to the Java home of a JDK 8 installation. Note that this mechanism can
|
pointing to the Java home of a JDK 8 installation. Note that this mechanism can
|
||||||
be used to test against other JDKs as well, this is not only limited to JDK 8.
|
be used to test against other JDKs as well, this is not only limited to JDK 8.
|
||||||
|
|
|
@ -23,6 +23,8 @@ import org.gradle.api.Action;
|
||||||
import org.gradle.api.DefaultTask;
|
import org.gradle.api.DefaultTask;
|
||||||
import org.gradle.api.JavaVersion;
|
import org.gradle.api.JavaVersion;
|
||||||
import org.gradle.api.file.FileCollection;
|
import org.gradle.api.file.FileCollection;
|
||||||
|
import org.gradle.api.logging.Logger;
|
||||||
|
import org.gradle.api.logging.Logging;
|
||||||
import org.gradle.api.tasks.Input;
|
import org.gradle.api.tasks.Input;
|
||||||
import org.gradle.api.tasks.InputFiles;
|
import org.gradle.api.tasks.InputFiles;
|
||||||
import org.gradle.api.tasks.OutputFile;
|
import org.gradle.api.tasks.OutputFile;
|
||||||
|
@ -41,6 +43,7 @@ import java.util.Set;
|
||||||
|
|
||||||
public class ForbiddenApisCliTask extends DefaultTask {
|
public class ForbiddenApisCliTask extends DefaultTask {
|
||||||
|
|
||||||
|
private final Logger logger = Logging.getLogger(ForbiddenApisCliTask.class);
|
||||||
private FileCollection signaturesFiles;
|
private FileCollection signaturesFiles;
|
||||||
private List<String> signatures = new ArrayList<>();
|
private List<String> signatures = new ArrayList<>();
|
||||||
private Set<String> bundledSignatures = new LinkedHashSet<>();
|
private Set<String> bundledSignatures = new LinkedHashSet<>();
|
||||||
|
@ -49,13 +52,22 @@ public class ForbiddenApisCliTask extends DefaultTask {
|
||||||
private FileCollection classesDirs;
|
private FileCollection classesDirs;
|
||||||
private Action<JavaExecSpec> execAction;
|
private Action<JavaExecSpec> execAction;
|
||||||
|
|
||||||
|
@Input
|
||||||
public JavaVersion getTargetCompatibility() {
|
public JavaVersion getTargetCompatibility() {
|
||||||
return targetCompatibility;
|
return targetCompatibility;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setTargetCompatibility(JavaVersion targetCompatibility) {
|
public void setTargetCompatibility(JavaVersion targetCompatibility) {
|
||||||
|
if (targetCompatibility.compareTo(JavaVersion.VERSION_1_10) > 0) {
|
||||||
|
logger.warn(
|
||||||
|
"Target compatibility is set to {} but forbiddenapis only supports up to 10. Will cap at 10.",
|
||||||
|
targetCompatibility
|
||||||
|
);
|
||||||
|
this.targetCompatibility = JavaVersion.VERSION_1_10;
|
||||||
|
} else {
|
||||||
this.targetCompatibility = targetCompatibility;
|
this.targetCompatibility = targetCompatibility;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
public Action<JavaExecSpec> getExecAction() {
|
public Action<JavaExecSpec> getExecAction() {
|
||||||
return execAction;
|
return execAction;
|
||||||
|
|
|
@ -685,6 +685,7 @@ public class RestHighLevelClientTests extends ESTestCase {
|
||||||
"nodes.stats",
|
"nodes.stats",
|
||||||
"nodes.hot_threads",
|
"nodes.hot_threads",
|
||||||
"nodes.usage",
|
"nodes.usage",
|
||||||
|
"nodes.reload_secure_settings",
|
||||||
"search_shards",
|
"search_shards",
|
||||||
};
|
};
|
||||||
Set<String> deprecatedMethods = new HashSet<>();
|
Set<String> deprecatedMethods = new HashSet<>();
|
||||||
|
|
|
@ -17,9 +17,6 @@ package org.elasticsearch.client;/*
|
||||||
* under the License.
|
* under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
|
||||||
import org.apache.http.entity.ContentType;
|
|
||||||
import org.apache.http.entity.StringEntity;
|
|
||||||
import org.apache.http.util.EntityUtils;
|
import org.apache.http.util.EntityUtils;
|
||||||
import org.elasticsearch.ElasticsearchStatusException;
|
import org.elasticsearch.ElasticsearchStatusException;
|
||||||
import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest;
|
import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest;
|
||||||
|
@ -35,7 +32,6 @@ import org.elasticsearch.script.StoredScriptSource;
|
||||||
|
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
|
|
||||||
import static java.util.Collections.emptyMap;
|
|
||||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||||
import static org.hamcrest.Matchers.equalTo;
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
|
||||||
|
@ -52,12 +48,9 @@ public class StoredScriptsIT extends ESRestHighLevelClientTestCase {
|
||||||
final String script = Strings.toString(scriptSource.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS));
|
final String script = Strings.toString(scriptSource.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS));
|
||||||
// TODO: change to HighLevel PutStoredScriptRequest when it will be ready
|
// TODO: change to HighLevel PutStoredScriptRequest when it will be ready
|
||||||
// so far - using low-level REST API
|
// so far - using low-level REST API
|
||||||
Response putResponse =
|
Request putRequest = new Request("PUT", "/_scripts/calculate-score");
|
||||||
adminClient()
|
putRequest.setJsonEntity("{\"script\":" + script + "}");
|
||||||
.performRequest("PUT", "/_scripts/calculate-score", emptyMap(),
|
Response putResponse = adminClient().performRequest(putRequest);
|
||||||
new StringEntity("{\"script\":" + script + "}",
|
|
||||||
ContentType.APPLICATION_JSON));
|
|
||||||
assertEquals(putResponse.getStatusLine().getReasonPhrase(), 200, putResponse.getStatusLine().getStatusCode());
|
|
||||||
assertEquals("{\"acknowledged\":true}", EntityUtils.toString(putResponse.getEntity()));
|
assertEquals("{\"acknowledged\":true}", EntityUtils.toString(putResponse.getEntity()));
|
||||||
|
|
||||||
GetStoredScriptRequest getRequest = new GetStoredScriptRequest("calculate-score");
|
GetStoredScriptRequest getRequest = new GetStoredScriptRequest("calculate-score");
|
||||||
|
@ -78,12 +71,9 @@ public class StoredScriptsIT extends ESRestHighLevelClientTestCase {
|
||||||
final String script = Strings.toString(scriptSource.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS));
|
final String script = Strings.toString(scriptSource.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS));
|
||||||
// TODO: change to HighLevel PutStoredScriptRequest when it will be ready
|
// TODO: change to HighLevel PutStoredScriptRequest when it will be ready
|
||||||
// so far - using low-level REST API
|
// so far - using low-level REST API
|
||||||
Response putResponse =
|
Request putRequest = new Request("PUT", "/_scripts/" + id);
|
||||||
adminClient()
|
putRequest.setJsonEntity("{\"script\":" + script + "}");
|
||||||
.performRequest("PUT", "/_scripts/" + id, emptyMap(),
|
Response putResponse = adminClient().performRequest(putRequest);
|
||||||
new StringEntity("{\"script\":" + script + "}",
|
|
||||||
ContentType.APPLICATION_JSON));
|
|
||||||
assertEquals(putResponse.getStatusLine().getReasonPhrase(), 200, putResponse.getStatusLine().getStatusCode());
|
|
||||||
assertEquals("{\"acknowledged\":true}", EntityUtils.toString(putResponse.getEntity()));
|
assertEquals("{\"acknowledged\":true}", EntityUtils.toString(putResponse.getEntity()));
|
||||||
|
|
||||||
DeleteStoredScriptRequest deleteRequest = new DeleteStoredScriptRequest(id);
|
DeleteStoredScriptRequest deleteRequest = new DeleteStoredScriptRequest(id);
|
||||||
|
|
|
@ -17,8 +17,6 @@ package org.elasticsearch.client.documentation;/*
|
||||||
* under the License.
|
* under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import org.apache.http.entity.ContentType;
|
|
||||||
import org.apache.http.entity.StringEntity;
|
|
||||||
import org.apache.http.util.EntityUtils;
|
import org.apache.http.util.EntityUtils;
|
||||||
import org.elasticsearch.action.ActionListener;
|
import org.elasticsearch.action.ActionListener;
|
||||||
import org.elasticsearch.action.LatchedActionListener;
|
import org.elasticsearch.action.LatchedActionListener;
|
||||||
|
@ -27,6 +25,7 @@ import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptReque
|
||||||
import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptResponse;
|
import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptResponse;
|
||||||
import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
||||||
import org.elasticsearch.client.ESRestHighLevelClientTestCase;
|
import org.elasticsearch.client.ESRestHighLevelClientTestCase;
|
||||||
|
import org.elasticsearch.client.Request;
|
||||||
import org.elasticsearch.client.RequestOptions;
|
import org.elasticsearch.client.RequestOptions;
|
||||||
import org.elasticsearch.client.Response;
|
import org.elasticsearch.client.Response;
|
||||||
import org.elasticsearch.client.RestHighLevelClient;
|
import org.elasticsearch.client.RestHighLevelClient;
|
||||||
|
@ -43,7 +42,6 @@ import java.util.Map;
|
||||||
import java.util.concurrent.CountDownLatch;
|
import java.util.concurrent.CountDownLatch;
|
||||||
import java.util.concurrent.TimeUnit;
|
import java.util.concurrent.TimeUnit;
|
||||||
|
|
||||||
import static java.util.Collections.emptyMap;
|
|
||||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||||
import static org.hamcrest.Matchers.equalTo;
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
|
||||||
|
@ -193,11 +191,9 @@ public class StoredScriptsDocumentationIT extends ESRestHighLevelClientTestCase
|
||||||
final String script = Strings.toString(scriptSource.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS));
|
final String script = Strings.toString(scriptSource.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS));
|
||||||
// TODO: change to HighLevel PutStoredScriptRequest when it will be ready
|
// TODO: change to HighLevel PutStoredScriptRequest when it will be ready
|
||||||
// so far - using low-level REST API
|
// so far - using low-level REST API
|
||||||
Response putResponse =
|
Request request = new Request("PUT", "/_scripts/" + id);
|
||||||
adminClient()
|
request.setJsonEntity("{\"script\":" + script + "}");
|
||||||
.performRequest("PUT", "/_scripts/" + id, emptyMap(),
|
Response putResponse = adminClient().performRequest(request);
|
||||||
new StringEntity("{\"script\":" + script + "}",
|
|
||||||
ContentType.APPLICATION_JSON));
|
|
||||||
assertEquals(putResponse.getStatusLine().getReasonPhrase(), 200, putResponse.getStatusLine().getStatusCode());
|
assertEquals(putResponse.getStatusLine().getReasonPhrase(), 200, putResponse.getStatusLine().getStatusCode());
|
||||||
assertEquals("{\"acknowledged\":true}", EntityUtils.toString(putResponse.getEntity()));
|
assertEquals("{\"acknowledged\":true}", EntityUtils.toString(putResponse.getEntity()));
|
||||||
}
|
}
|
||||||
|
|
|
@ -53,7 +53,7 @@ public class WaitForRefreshAndCloseTests extends ESRestTestCase {
|
||||||
|
|
||||||
@After
|
@After
|
||||||
public void cleanupIndex() throws IOException {
|
public void cleanupIndex() throws IOException {
|
||||||
client().performRequest("DELETE", indexName());
|
client().performRequest(new Request("DELETE", indexName()));
|
||||||
}
|
}
|
||||||
|
|
||||||
private String indexName() {
|
private String indexName() {
|
||||||
|
|
|
@ -0,0 +1,55 @@
|
||||||
|
[[cluster-nodes-reload-secure-settings]]
|
||||||
|
== Nodes Reload Secure Settings
|
||||||
|
|
||||||
|
The cluster nodes reload secure settings API is used to re-read the
|
||||||
|
local node's encrypted keystore. Specifically, it will prompt the keystore
|
||||||
|
decryption and reading accross the cluster. The keystore's plain content is
|
||||||
|
used to reinitialize all compatible plugins. A compatible plugin can be
|
||||||
|
reinitilized without restarting the node. The operation is
|
||||||
|
complete when all compatible plugins have finished reinitilizing. Subsequently,
|
||||||
|
the keystore is closed and any changes to it will not be reflected on the node.
|
||||||
|
|
||||||
|
[source,js]
|
||||||
|
--------------------------------------------------
|
||||||
|
POST _nodes/reload_secure_settings
|
||||||
|
POST _nodes/nodeId1,nodeId2/reload_secure_settings
|
||||||
|
--------------------------------------------------
|
||||||
|
// CONSOLE
|
||||||
|
// TEST[setup:node]
|
||||||
|
// TEST[s/nodeId1,nodeId2/*/]
|
||||||
|
|
||||||
|
The first command reloads the keystore on each node. The seconds allows
|
||||||
|
to selectively target `nodeId1` and `nodeId2`. The node selection options are
|
||||||
|
detailed <<cluster-nodes,here>>.
|
||||||
|
|
||||||
|
Note: It is an error if secure settings are inconsistent across the cluster
|
||||||
|
nodes, yet this consistency is not enforced whatsoever. Hence, reloading specific
|
||||||
|
nodes is not standard. It is only justifiable when retrying failed reload operations.
|
||||||
|
|
||||||
|
[float]
|
||||||
|
[[rest-reload-secure-settings]]
|
||||||
|
==== REST Reload Secure Settings Response
|
||||||
|
|
||||||
|
The response contains the `nodes` object, which is a map, keyed by the
|
||||||
|
node id. Each value has the node `name` and an optional `reload_exception`
|
||||||
|
field. The `reload_exception` field is a serialization of the exception
|
||||||
|
that was thrown during the reload process, if any.
|
||||||
|
|
||||||
|
[source,js]
|
||||||
|
--------------------------------------------------
|
||||||
|
{
|
||||||
|
"_nodes": {
|
||||||
|
"total": 1,
|
||||||
|
"successful": 1,
|
||||||
|
"failed": 0
|
||||||
|
},
|
||||||
|
"cluster_name": "my_cluster",
|
||||||
|
"nodes": {
|
||||||
|
"pQHNt5rXTTWNvUgOrdynKg": {
|
||||||
|
"name": "node-0"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
--------------------------------------------------
|
||||||
|
// TESTRESPONSE[s/"my_cluster"/$body.cluster_name/]
|
||||||
|
// TESTRESPONSE[s/"pQHNt5rXTTWNvUgOrdynKg"/\$node_name/]
|
|
@ -4,7 +4,7 @@
|
||||||
== elasticsearch-setup-passwords
|
== elasticsearch-setup-passwords
|
||||||
|
|
||||||
The `elasticsearch-setup-passwords` command sets the passwords for the built-in
|
The `elasticsearch-setup-passwords` command sets the passwords for the built-in
|
||||||
`elastic`, `kibana`, `logstash_system`, and `beats_system` users.
|
`elastic`, `kibana`, `logstash_system`, `beats_system`, and `apm_system` users.
|
||||||
|
|
||||||
[float]
|
[float]
|
||||||
=== Synopsis
|
=== Synopsis
|
||||||
|
|
|
@ -0,0 +1,23 @@
|
||||||
|
{
|
||||||
|
"nodes.reload_secure_settings": {
|
||||||
|
"documentation": "http://www.elastic.co/guide/en/elasticsearch/reference/master/cluster-nodes-reload-secure-settings.html",
|
||||||
|
"methods": ["POST"],
|
||||||
|
"url": {
|
||||||
|
"path": "/_nodes/reload_secure_settings",
|
||||||
|
"paths": ["/_nodes/reload_secure_settings", "/_nodes/{node_id}/reload_secure_settings"],
|
||||||
|
"parts": {
|
||||||
|
"node_id": {
|
||||||
|
"type": "list",
|
||||||
|
"description": "A comma-separated list of node IDs to span the reload/reinit call. Should stay empty because reloading usually involves all cluster nodes."
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"params": {
|
||||||
|
"timeout": {
|
||||||
|
"type" : "time",
|
||||||
|
"description" : "Explicit operation timeout"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"body": null
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,8 @@
|
||||||
|
---
|
||||||
|
"node_reload_secure_settings test":
|
||||||
|
|
||||||
|
- do:
|
||||||
|
nodes.reload_secure_settings: {}
|
||||||
|
|
||||||
|
- is_true: nodes
|
||||||
|
- is_true: cluster_name
|
|
@ -272,6 +272,7 @@ public final class ClusterSettings extends AbstractScopedSettings {
|
||||||
ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING,
|
ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING,
|
||||||
TransportSearchAction.SHARD_COUNT_LIMIT_SETTING,
|
TransportSearchAction.SHARD_COUNT_LIMIT_SETTING,
|
||||||
RemoteClusterAware.REMOTE_CLUSTERS_SEEDS,
|
RemoteClusterAware.REMOTE_CLUSTERS_SEEDS,
|
||||||
|
RemoteClusterAware.REMOTE_CLUSTERS_PROXY,
|
||||||
RemoteClusterService.REMOTE_CLUSTER_SKIP_UNAVAILABLE,
|
RemoteClusterService.REMOTE_CLUSTER_SKIP_UNAVAILABLE,
|
||||||
RemoteClusterService.REMOTE_CONNECTIONS_PER_CLUSTER,
|
RemoteClusterService.REMOTE_CONNECTIONS_PER_CLUSTER,
|
||||||
RemoteClusterService.REMOTE_INITIAL_CONNECTION_TIMEOUT_SETTING,
|
RemoteClusterService.REMOTE_INITIAL_CONNECTION_TIMEOUT_SETTING,
|
||||||
|
|
|
@ -1009,6 +1009,10 @@ public class Setting<T> implements ToXContentObject {
|
||||||
return new Setting<>(key, s -> "", Function.identity(), properties);
|
return new Setting<>(key, s -> "", Function.identity(), properties);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static Setting<String> simpleString(String key, Function<String, String> parser, Property... properties) {
|
||||||
|
return new Setting<>(key, s -> "", parser, properties);
|
||||||
|
}
|
||||||
|
|
||||||
public static Setting<String> simpleString(String key, Setting<String> fallback, Property... properties) {
|
public static Setting<String> simpleString(String key, Setting<String> fallback, Property... properties) {
|
||||||
return new Setting<>(key, fallback, Function.identity(), properties);
|
return new Setting<>(key, fallback, Function.identity(), properties);
|
||||||
}
|
}
|
||||||
|
|
|
@ -468,11 +468,11 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
|
||||||
// commit the change
|
// commit the change
|
||||||
if (defaultMappingSource != null) {
|
if (defaultMappingSource != null) {
|
||||||
this.defaultMappingSource = defaultMappingSource;
|
this.defaultMappingSource = defaultMappingSource;
|
||||||
|
this.defaultMapper = defaultMapper;
|
||||||
}
|
}
|
||||||
if (newMapper != null) {
|
if (newMapper != null) {
|
||||||
this.mapper = newMapper;
|
this.mapper = newMapper;
|
||||||
}
|
}
|
||||||
this.defaultMapper = defaultMapper;
|
|
||||||
this.fieldTypes = fieldTypes;
|
this.fieldTypes = fieldTypes;
|
||||||
this.hasNested = hasNested;
|
this.hasNested = hasNested;
|
||||||
this.fullPathObjectMappers = fullPathObjectMappers;
|
this.fullPathObjectMappers = fullPathObjectMappers;
|
||||||
|
|
|
@ -264,7 +264,10 @@ public class TypeParsers {
|
||||||
}
|
}
|
||||||
|
|
||||||
public static FormatDateTimeFormatter parseDateTimeFormatter(Object node) {
|
public static FormatDateTimeFormatter parseDateTimeFormatter(Object node) {
|
||||||
return Joda.forPattern(node.toString());
|
if (node instanceof String) {
|
||||||
|
return Joda.forPattern((String) node);
|
||||||
|
}
|
||||||
|
throw new IllegalArgumentException("Invalid format: [" + node.toString() + "]: expected string value");
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void parseTermVector(String fieldName, String termVector, FieldMapper.Builder builder) throws MapperParsingException {
|
public static void parseTermVector(String fieldName, String termVector, FieldMapper.Builder builder) throws MapperParsingException {
|
||||||
|
|
|
@ -251,7 +251,16 @@ public class HierarchyCircuitBreakerService extends CircuitBreakerService {
|
||||||
|
|
||||||
//package private to allow overriding it in tests
|
//package private to allow overriding it in tests
|
||||||
long currentMemoryUsage() {
|
long currentMemoryUsage() {
|
||||||
|
try {
|
||||||
return MEMORY_MX_BEAN.getHeapMemoryUsage().getUsed();
|
return MEMORY_MX_BEAN.getHeapMemoryUsage().getUsed();
|
||||||
|
} catch (IllegalArgumentException ex) {
|
||||||
|
// This exception can happen (rarely) due to a race condition in the JVM when determining usage of memory pools. We do not want
|
||||||
|
// to fail requests because of this and thus return zero memory usage in this case. While we could also return the most
|
||||||
|
// recently determined memory usage, we would overestimate memory usage immediately after a garbage collection event.
|
||||||
|
assert ex.getMessage().matches("committed = \\d+ should be < max = \\d+");
|
||||||
|
logger.info("Cannot determine current memory usage due to JDK-8207200.", ex);
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -18,10 +18,14 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.transport;
|
package org.elasticsearch.transport;
|
||||||
|
|
||||||
|
import java.util.EnumSet;
|
||||||
import java.util.function.Supplier;
|
import java.util.function.Supplier;
|
||||||
import org.elasticsearch.Version;
|
import org.elasticsearch.Version;
|
||||||
import org.elasticsearch.cluster.metadata.ClusterNameExpressionResolver;
|
import org.elasticsearch.cluster.metadata.ClusterNameExpressionResolver;
|
||||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||||
|
import org.elasticsearch.common.Strings;
|
||||||
|
import org.elasticsearch.common.UUIDs;
|
||||||
|
import org.elasticsearch.common.collect.Tuple;
|
||||||
import org.elasticsearch.common.component.AbstractComponent;
|
import org.elasticsearch.common.component.AbstractComponent;
|
||||||
import org.elasticsearch.common.settings.ClusterSettings;
|
import org.elasticsearch.common.settings.ClusterSettings;
|
||||||
import org.elasticsearch.common.settings.Setting;
|
import org.elasticsearch.common.settings.Setting;
|
||||||
|
@ -66,6 +70,22 @@ public abstract class RemoteClusterAware extends AbstractComponent {
|
||||||
public static final char REMOTE_CLUSTER_INDEX_SEPARATOR = ':';
|
public static final char REMOTE_CLUSTER_INDEX_SEPARATOR = ':';
|
||||||
public static final String LOCAL_CLUSTER_GROUP_KEY = "";
|
public static final String LOCAL_CLUSTER_GROUP_KEY = "";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A proxy address for the remote cluster.
|
||||||
|
* NOTE: this settings is undocumented until we have at last one transport that supports passing
|
||||||
|
* on the hostname via a mechanism like SNI.
|
||||||
|
*/
|
||||||
|
public static final Setting.AffixSetting<String> REMOTE_CLUSTERS_PROXY = Setting.affixKeySetting(
|
||||||
|
"search.remote.",
|
||||||
|
"proxy",
|
||||||
|
key -> Setting.simpleString(key, s -> {
|
||||||
|
if (Strings.hasLength(s)) {
|
||||||
|
parsePort(s);
|
||||||
|
}
|
||||||
|
return s;
|
||||||
|
}, Setting.Property.NodeScope, Setting.Property.Dynamic), REMOTE_CLUSTERS_SEEDS);
|
||||||
|
|
||||||
|
|
||||||
protected final ClusterNameExpressionResolver clusterNameResolver;
|
protected final ClusterNameExpressionResolver clusterNameResolver;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -77,23 +97,40 @@ public abstract class RemoteClusterAware extends AbstractComponent {
|
||||||
this.clusterNameResolver = new ClusterNameExpressionResolver(settings);
|
this.clusterNameResolver = new ClusterNameExpressionResolver(settings);
|
||||||
}
|
}
|
||||||
|
|
||||||
protected static Map<String, List<Supplier<DiscoveryNode>>> buildRemoteClustersSeeds(Settings settings) {
|
/**
|
||||||
|
* Builds the dynamic per-cluster config from the given settings. This is a map keyed by the cluster alias that points to a tuple
|
||||||
|
* (ProxyAddresss, [SeedNodeSuppliers]). If a cluster is configured with a proxy address all seed nodes will point to
|
||||||
|
* {@link TransportAddress#META_ADDRESS} and their configured address will be used as the hostname for the generated discovery node.
|
||||||
|
*/
|
||||||
|
protected static Map<String, Tuple<String, List<Supplier<DiscoveryNode>>>> buildRemoteClustersDynamicConfig(Settings settings) {
|
||||||
Stream<Setting<List<String>>> allConcreteSettings = REMOTE_CLUSTERS_SEEDS.getAllConcreteSettings(settings);
|
Stream<Setting<List<String>>> allConcreteSettings = REMOTE_CLUSTERS_SEEDS.getAllConcreteSettings(settings);
|
||||||
return allConcreteSettings.collect(
|
return allConcreteSettings.collect(
|
||||||
Collectors.toMap(REMOTE_CLUSTERS_SEEDS::getNamespace, concreteSetting -> {
|
Collectors.toMap(REMOTE_CLUSTERS_SEEDS::getNamespace, concreteSetting -> {
|
||||||
String clusterName = REMOTE_CLUSTERS_SEEDS.getNamespace(concreteSetting);
|
String clusterName = REMOTE_CLUSTERS_SEEDS.getNamespace(concreteSetting);
|
||||||
List<String> addresses = concreteSetting.get(settings);
|
List<String> addresses = concreteSetting.get(settings);
|
||||||
|
final boolean proxyMode = REMOTE_CLUSTERS_PROXY.getConcreteSettingForNamespace(clusterName).exists(settings);
|
||||||
List<Supplier<DiscoveryNode>> nodes = new ArrayList<>(addresses.size());
|
List<Supplier<DiscoveryNode>> nodes = new ArrayList<>(addresses.size());
|
||||||
for (String address : addresses) {
|
for (String address : addresses) {
|
||||||
nodes.add(() -> {
|
nodes.add(() -> buildSeedNode(clusterName, address, proxyMode));
|
||||||
|
}
|
||||||
|
return new Tuple<>(REMOTE_CLUSTERS_PROXY.getConcreteSettingForNamespace(clusterName).get(settings), nodes);
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
static DiscoveryNode buildSeedNode(String clusterName, String address, boolean proxyMode) {
|
||||||
|
if (proxyMode) {
|
||||||
|
TransportAddress transportAddress = new TransportAddress(TransportAddress.META_ADDRESS, 0);
|
||||||
|
String hostName = address.substring(0, indexOfPortSeparator(address));
|
||||||
|
return new DiscoveryNode("", clusterName + "#" + address, UUIDs.randomBase64UUID(), hostName, address,
|
||||||
|
transportAddress, Collections
|
||||||
|
.emptyMap(), EnumSet.allOf(DiscoveryNode.Role.class),
|
||||||
|
Version.CURRENT.minimumCompatibilityVersion());
|
||||||
|
} else {
|
||||||
TransportAddress transportAddress = new TransportAddress(RemoteClusterAware.parseSeedAddress(address));
|
TransportAddress transportAddress = new TransportAddress(RemoteClusterAware.parseSeedAddress(address));
|
||||||
return new DiscoveryNode(clusterName + "#" + transportAddress.toString(),
|
return new DiscoveryNode(clusterName + "#" + transportAddress.toString(),
|
||||||
transportAddress,
|
transportAddress,
|
||||||
Version.CURRENT.minimumCompatibilityVersion());
|
Version.CURRENT.minimumCompatibilityVersion());
|
||||||
});
|
|
||||||
}
|
}
|
||||||
return nodes;
|
|
||||||
}));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -138,20 +175,24 @@ public abstract class RemoteClusterAware extends AbstractComponent {
|
||||||
|
|
||||||
protected abstract Set<String> getRemoteClusterNames();
|
protected abstract Set<String> getRemoteClusterNames();
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Subclasses must implement this to receive information about updated cluster aliases. If the given address list is
|
* Subclasses must implement this to receive information about updated cluster aliases. If the given address list is
|
||||||
* empty the cluster alias is unregistered and should be removed.
|
* empty the cluster alias is unregistered and should be removed.
|
||||||
*/
|
*/
|
||||||
protected abstract void updateRemoteCluster(String clusterAlias, List<String> addresses);
|
protected abstract void updateRemoteCluster(String clusterAlias, List<String> addresses, String proxy);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Registers this instance to listen to updates on the cluster settings.
|
* Registers this instance to listen to updates on the cluster settings.
|
||||||
*/
|
*/
|
||||||
public void listenForUpdates(ClusterSettings clusterSettings) {
|
public void listenForUpdates(ClusterSettings clusterSettings) {
|
||||||
clusterSettings.addAffixUpdateConsumer(RemoteClusterAware.REMOTE_CLUSTERS_SEEDS, this::updateRemoteCluster,
|
clusterSettings.addAffixUpdateConsumer(RemoteClusterAware.REMOTE_CLUSTERS_PROXY,
|
||||||
|
RemoteClusterAware.REMOTE_CLUSTERS_SEEDS,
|
||||||
|
(key, value) -> updateRemoteCluster(key, value.v2(), value.v1()),
|
||||||
(namespace, value) -> {});
|
(namespace, value) -> {});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
protected static InetSocketAddress parseSeedAddress(String remoteHost) {
|
protected static InetSocketAddress parseSeedAddress(String remoteHost) {
|
||||||
String host = remoteHost.substring(0, indexOfPortSeparator(remoteHost));
|
String host = remoteHost.substring(0, indexOfPortSeparator(remoteHost));
|
||||||
InetAddress hostAddress;
|
InetAddress hostAddress;
|
||||||
|
|
|
@ -18,6 +18,7 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.transport;
|
package org.elasticsearch.transport;
|
||||||
|
|
||||||
|
import java.net.InetSocketAddress;
|
||||||
import java.util.function.Supplier;
|
import java.util.function.Supplier;
|
||||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||||
import org.apache.lucene.store.AlreadyClosedException;
|
import org.apache.lucene.store.AlreadyClosedException;
|
||||||
|
@ -88,6 +89,7 @@ final class RemoteClusterConnection extends AbstractComponent implements Transpo
|
||||||
private final int maxNumRemoteConnections;
|
private final int maxNumRemoteConnections;
|
||||||
private final Predicate<DiscoveryNode> nodePredicate;
|
private final Predicate<DiscoveryNode> nodePredicate;
|
||||||
private final ThreadPool threadPool;
|
private final ThreadPool threadPool;
|
||||||
|
private volatile String proxyAddress;
|
||||||
private volatile List<Supplier<DiscoveryNode>> seedNodes;
|
private volatile List<Supplier<DiscoveryNode>> seedNodes;
|
||||||
private volatile boolean skipUnavailable;
|
private volatile boolean skipUnavailable;
|
||||||
private final ConnectHandler connectHandler;
|
private final ConnectHandler connectHandler;
|
||||||
|
@ -106,6 +108,13 @@ final class RemoteClusterConnection extends AbstractComponent implements Transpo
|
||||||
RemoteClusterConnection(Settings settings, String clusterAlias, List<Supplier<DiscoveryNode>> seedNodes,
|
RemoteClusterConnection(Settings settings, String clusterAlias, List<Supplier<DiscoveryNode>> seedNodes,
|
||||||
TransportService transportService, ConnectionManager connectionManager, int maxNumRemoteConnections,
|
TransportService transportService, ConnectionManager connectionManager, int maxNumRemoteConnections,
|
||||||
Predicate<DiscoveryNode> nodePredicate) {
|
Predicate<DiscoveryNode> nodePredicate) {
|
||||||
|
this(settings, clusterAlias, seedNodes, transportService, connectionManager, maxNumRemoteConnections, nodePredicate, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
RemoteClusterConnection(Settings settings, String clusterAlias, List<Supplier<DiscoveryNode>> seedNodes,
|
||||||
|
TransportService transportService, ConnectionManager connectionManager, int maxNumRemoteConnections, Predicate<DiscoveryNode>
|
||||||
|
nodePredicate,
|
||||||
|
String proxyAddress) {
|
||||||
super(settings);
|
super(settings);
|
||||||
this.transportService = transportService;
|
this.transportService = transportService;
|
||||||
this.maxNumRemoteConnections = maxNumRemoteConnections;
|
this.maxNumRemoteConnections = maxNumRemoteConnections;
|
||||||
|
@ -130,13 +139,26 @@ final class RemoteClusterConnection extends AbstractComponent implements Transpo
|
||||||
connectionManager.addListener(this);
|
connectionManager.addListener(this);
|
||||||
// we register the transport service here as a listener to make sure we notify handlers on disconnect etc.
|
// we register the transport service here as a listener to make sure we notify handlers on disconnect etc.
|
||||||
connectionManager.addListener(transportService);
|
connectionManager.addListener(transportService);
|
||||||
|
this.proxyAddress = proxyAddress;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static DiscoveryNode maybeAddProxyAddress(String proxyAddress, DiscoveryNode node) {
|
||||||
|
if (proxyAddress == null || proxyAddress.isEmpty()) {
|
||||||
|
return node;
|
||||||
|
} else {
|
||||||
|
// resovle proxy address lazy here
|
||||||
|
InetSocketAddress proxyInetAddress = RemoteClusterAware.parseSeedAddress(proxyAddress);
|
||||||
|
return new DiscoveryNode(node.getName(), node.getId(), node.getEphemeralId(), node.getHostName(), node
|
||||||
|
.getHostAddress(), new TransportAddress(proxyInetAddress), node.getAttributes(), node.getRoles(), node.getVersion());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Updates the list of seed nodes for this cluster connection
|
* Updates the list of seed nodes for this cluster connection
|
||||||
*/
|
*/
|
||||||
synchronized void updateSeedNodes(List<Supplier<DiscoveryNode>> seedNodes, ActionListener<Void> connectListener) {
|
synchronized void updateSeedNodes(String proxyAddress, List<Supplier<DiscoveryNode>> seedNodes, ActionListener<Void> connectListener) {
|
||||||
this.seedNodes = Collections.unmodifiableList(new ArrayList<>(seedNodes));
|
this.seedNodes = Collections.unmodifiableList(new ArrayList<>(seedNodes));
|
||||||
|
this.proxyAddress = proxyAddress;
|
||||||
connectHandler.connect(connectListener);
|
connectHandler.connect(connectListener);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -281,6 +303,7 @@ final class RemoteClusterConnection extends AbstractComponent implements Transpo
|
||||||
return new ProxyConnection(connection, remoteClusterNode);
|
return new ProxyConnection(connection, remoteClusterNode);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
static final class ProxyConnection implements Transport.Connection {
|
static final class ProxyConnection implements Transport.Connection {
|
||||||
private final Transport.Connection proxyConnection;
|
private final Transport.Connection proxyConnection;
|
||||||
private final DiscoveryNode targetNode;
|
private final DiscoveryNode targetNode;
|
||||||
|
@ -461,7 +484,7 @@ final class RemoteClusterConnection extends AbstractComponent implements Transpo
|
||||||
try {
|
try {
|
||||||
if (seedNodes.hasNext()) {
|
if (seedNodes.hasNext()) {
|
||||||
cancellableThreads.executeIO(() -> {
|
cancellableThreads.executeIO(() -> {
|
||||||
final DiscoveryNode seedNode = seedNodes.next().get();
|
final DiscoveryNode seedNode = maybeAddProxyAddress(proxyAddress, seedNodes.next().get());
|
||||||
final TransportService.HandshakeResponse handshakeResponse;
|
final TransportService.HandshakeResponse handshakeResponse;
|
||||||
Transport.Connection connection = manager.openConnection(seedNode,
|
Transport.Connection connection = manager.openConnection(seedNode,
|
||||||
ConnectionProfile.buildSingleChannelProfile(TransportRequestOptions.Type.REG, null, null));
|
ConnectionProfile.buildSingleChannelProfile(TransportRequestOptions.Type.REG, null, null));
|
||||||
|
@ -476,7 +499,7 @@ final class RemoteClusterConnection extends AbstractComponent implements Transpo
|
||||||
throw ex;
|
throw ex;
|
||||||
}
|
}
|
||||||
|
|
||||||
final DiscoveryNode handshakeNode = handshakeResponse.getDiscoveryNode();
|
final DiscoveryNode handshakeNode = maybeAddProxyAddress(proxyAddress, handshakeResponse.getDiscoveryNode());
|
||||||
if (nodePredicate.test(handshakeNode) && connectedNodes.size() < maxNumRemoteConnections) {
|
if (nodePredicate.test(handshakeNode) && connectedNodes.size() < maxNumRemoteConnections) {
|
||||||
manager.connectToNode(handshakeNode, remoteProfile, transportService.connectionValidator(handshakeNode));
|
manager.connectToNode(handshakeNode, remoteProfile, transportService.connectionValidator(handshakeNode));
|
||||||
if (remoteClusterName.get() == null) {
|
if (remoteClusterName.get() == null) {
|
||||||
|
@ -583,7 +606,8 @@ final class RemoteClusterConnection extends AbstractComponent implements Transpo
|
||||||
cancellableThreads.executeIO(() -> {
|
cancellableThreads.executeIO(() -> {
|
||||||
DiscoveryNodes nodes = response.getState().nodes();
|
DiscoveryNodes nodes = response.getState().nodes();
|
||||||
Iterable<DiscoveryNode> nodesIter = nodes.getNodes()::valuesIt;
|
Iterable<DiscoveryNode> nodesIter = nodes.getNodes()::valuesIt;
|
||||||
for (DiscoveryNode node : nodesIter) {
|
for (DiscoveryNode n : nodesIter) {
|
||||||
|
DiscoveryNode node = maybeAddProxyAddress(proxyAddress, n);
|
||||||
if (nodePredicate.test(node) && connectedNodes.size() < maxNumRemoteConnections) {
|
if (nodePredicate.test(node) && connectedNodes.size() < maxNumRemoteConnections) {
|
||||||
try {
|
try {
|
||||||
connectionManager.connectToNode(node, remoteProfile,
|
connectionManager.connectToNode(node, remoteProfile,
|
||||||
|
@ -646,7 +670,8 @@ final class RemoteClusterConnection extends AbstractComponent implements Transpo
|
||||||
* Get the information about remote nodes to be rendered on {@code _remote/info} requests.
|
* Get the information about remote nodes to be rendered on {@code _remote/info} requests.
|
||||||
*/
|
*/
|
||||||
public RemoteConnectionInfo getConnectionInfo() {
|
public RemoteConnectionInfo getConnectionInfo() {
|
||||||
List<TransportAddress> seedNodeAddresses = seedNodes.stream().map(node -> node.get().getAddress()).collect(Collectors.toList());
|
List<TransportAddress> seedNodeAddresses = seedNodes.stream().map(node -> node.get().getAddress()).collect
|
||||||
|
(Collectors.toList());
|
||||||
TimeValue initialConnectionTimeout = RemoteClusterService.REMOTE_INITIAL_CONNECTION_TIMEOUT_SETTING.get(settings);
|
TimeValue initialConnectionTimeout = RemoteClusterService.REMOTE_INITIAL_CONNECTION_TIMEOUT_SETTING.get(settings);
|
||||||
return new RemoteConnectionInfo(clusterAlias, seedNodeAddresses, maxNumRemoteConnections, connectedNodes.size(),
|
return new RemoteConnectionInfo(clusterAlias, seedNodeAddresses, maxNumRemoteConnections, connectedNodes.size(),
|
||||||
initialConnectionTimeout, skipUnavailable);
|
initialConnectionTimeout, skipUnavailable);
|
||||||
|
|
|
@ -31,10 +31,10 @@ import org.elasticsearch.client.Client;
|
||||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||||
import org.elasticsearch.common.Booleans;
|
import org.elasticsearch.common.Booleans;
|
||||||
import org.elasticsearch.common.Strings;
|
import org.elasticsearch.common.Strings;
|
||||||
|
import org.elasticsearch.common.collect.Tuple;
|
||||||
import org.elasticsearch.common.settings.ClusterSettings;
|
import org.elasticsearch.common.settings.ClusterSettings;
|
||||||
import org.elasticsearch.common.settings.Setting;
|
import org.elasticsearch.common.settings.Setting;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.transport.TransportAddress;
|
|
||||||
import org.elasticsearch.common.unit.TimeValue;
|
import org.elasticsearch.common.unit.TimeValue;
|
||||||
import org.elasticsearch.common.util.concurrent.CountDown;
|
import org.elasticsearch.common.util.concurrent.CountDown;
|
||||||
import org.elasticsearch.core.internal.io.IOUtils;
|
import org.elasticsearch.core.internal.io.IOUtils;
|
||||||
|
@ -116,7 +116,7 @@ public final class RemoteClusterService extends RemoteClusterAware implements Cl
|
||||||
* @param seeds a cluster alias to discovery node mapping representing the remote clusters seeds nodes
|
* @param seeds a cluster alias to discovery node mapping representing the remote clusters seeds nodes
|
||||||
* @param connectionListener a listener invoked once every configured cluster has been connected to
|
* @param connectionListener a listener invoked once every configured cluster has been connected to
|
||||||
*/
|
*/
|
||||||
private synchronized void updateRemoteClusters(Map<String, List<Supplier<DiscoveryNode>>> seeds,
|
private synchronized void updateRemoteClusters(Map<String, Tuple<String, List<Supplier<DiscoveryNode>>>> seeds,
|
||||||
ActionListener<Void> connectionListener) {
|
ActionListener<Void> connectionListener) {
|
||||||
if (seeds.containsKey(LOCAL_CLUSTER_GROUP_KEY)) {
|
if (seeds.containsKey(LOCAL_CLUSTER_GROUP_KEY)) {
|
||||||
throw new IllegalArgumentException("remote clusters must not have the empty string as its key");
|
throw new IllegalArgumentException("remote clusters must not have the empty string as its key");
|
||||||
|
@ -127,9 +127,12 @@ public final class RemoteClusterService extends RemoteClusterAware implements Cl
|
||||||
} else {
|
} else {
|
||||||
CountDown countDown = new CountDown(seeds.size());
|
CountDown countDown = new CountDown(seeds.size());
|
||||||
remoteClusters.putAll(this.remoteClusters);
|
remoteClusters.putAll(this.remoteClusters);
|
||||||
for (Map.Entry<String, List<Supplier<DiscoveryNode>>> entry : seeds.entrySet()) {
|
for (Map.Entry<String, Tuple<String, List<Supplier<DiscoveryNode>>>> entry : seeds.entrySet()) {
|
||||||
|
List<Supplier<DiscoveryNode>> seedList = entry.getValue().v2();
|
||||||
|
String proxyAddress = entry.getValue().v1();
|
||||||
|
|
||||||
RemoteClusterConnection remote = this.remoteClusters.get(entry.getKey());
|
RemoteClusterConnection remote = this.remoteClusters.get(entry.getKey());
|
||||||
if (entry.getValue().isEmpty()) { // with no seed nodes we just remove the connection
|
if (seedList.isEmpty()) { // with no seed nodes we just remove the connection
|
||||||
try {
|
try {
|
||||||
IOUtils.close(remote);
|
IOUtils.close(remote);
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
|
@ -140,15 +143,15 @@ public final class RemoteClusterService extends RemoteClusterAware implements Cl
|
||||||
}
|
}
|
||||||
|
|
||||||
if (remote == null) { // this is a new cluster we have to add a new representation
|
if (remote == null) { // this is a new cluster we have to add a new representation
|
||||||
remote = new RemoteClusterConnection(settings, entry.getKey(), entry.getValue(), transportService,
|
remote = new RemoteClusterConnection(settings, entry.getKey(), seedList, transportService,
|
||||||
new ConnectionManager(settings, transportService.transport, transportService.threadPool), numRemoteConnections,
|
new ConnectionManager(settings, transportService.transport, transportService.threadPool), numRemoteConnections,
|
||||||
getNodePredicate(settings));
|
getNodePredicate(settings), proxyAddress);
|
||||||
remoteClusters.put(entry.getKey(), remote);
|
remoteClusters.put(entry.getKey(), remote);
|
||||||
}
|
}
|
||||||
|
|
||||||
// now update the seed nodes no matter if it's new or already existing
|
// now update the seed nodes no matter if it's new or already existing
|
||||||
RemoteClusterConnection finalRemote = remote;
|
RemoteClusterConnection finalRemote = remote;
|
||||||
remote.updateSeedNodes(entry.getValue(), ActionListener.wrap(
|
remote.updateSeedNodes(proxyAddress, seedList, ActionListener.wrap(
|
||||||
response -> {
|
response -> {
|
||||||
if (countDown.countDown()) {
|
if (countDown.countDown()) {
|
||||||
connectionListener.onResponse(response);
|
connectionListener.onResponse(response);
|
||||||
|
@ -302,8 +305,7 @@ public final class RemoteClusterService extends RemoteClusterAware implements Cl
|
||||||
@Override
|
@Override
|
||||||
public void listenForUpdates(ClusterSettings clusterSettings) {
|
public void listenForUpdates(ClusterSettings clusterSettings) {
|
||||||
super.listenForUpdates(clusterSettings);
|
super.listenForUpdates(clusterSettings);
|
||||||
clusterSettings.addAffixUpdateConsumer(REMOTE_CLUSTER_SKIP_UNAVAILABLE, this::updateSkipUnavailable,
|
clusterSettings.addAffixUpdateConsumer(REMOTE_CLUSTER_SKIP_UNAVAILABLE, this::updateSkipUnavailable, (alias, value) -> {});
|
||||||
(clusterAlias, value) -> {});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
synchronized void updateSkipUnavailable(String clusterAlias, Boolean skipUnavailable) {
|
synchronized void updateSkipUnavailable(String clusterAlias, Boolean skipUnavailable) {
|
||||||
|
@ -313,22 +315,21 @@ public final class RemoteClusterService extends RemoteClusterAware implements Cl
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void updateRemoteCluster(String clusterAlias, List<String> addresses) {
|
protected void updateRemoteCluster(String clusterAlias, List<String> addresses, String proxyAddress) {
|
||||||
updateRemoteCluster(clusterAlias, addresses, ActionListener.wrap((x) -> {}, (x) -> {}));
|
updateRemoteCluster(clusterAlias, addresses, proxyAddress, ActionListener.wrap((x) -> {}, (x) -> {}));
|
||||||
}
|
}
|
||||||
|
|
||||||
void updateRemoteCluster(
|
void updateRemoteCluster(
|
||||||
final String clusterAlias,
|
final String clusterAlias,
|
||||||
final List<String> addresses,
|
final List<String> addresses,
|
||||||
|
final String proxyAddress,
|
||||||
final ActionListener<Void> connectionListener) {
|
final ActionListener<Void> connectionListener) {
|
||||||
final List<Supplier<DiscoveryNode>> nodes = addresses.stream().<Supplier<DiscoveryNode>>map(address -> () -> {
|
final List<Supplier<DiscoveryNode>> nodes = addresses.stream().<Supplier<DiscoveryNode>>map(address -> () ->
|
||||||
final TransportAddress transportAddress = new TransportAddress(RemoteClusterAware.parseSeedAddress(address));
|
buildSeedNode(clusterAlias, address, Strings.hasLength(proxyAddress))
|
||||||
final String id = clusterAlias + "#" + transportAddress.toString();
|
).collect(Collectors.toList());
|
||||||
final Version version = Version.CURRENT.minimumCompatibilityVersion();
|
updateRemoteClusters(Collections.singletonMap(clusterAlias, new Tuple<>(proxyAddress, nodes)), connectionListener);
|
||||||
return new DiscoveryNode(id, transportAddress, version);
|
|
||||||
}).collect(Collectors.toList());
|
|
||||||
updateRemoteClusters(Collections.singletonMap(clusterAlias, nodes), connectionListener);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -338,7 +339,7 @@ public final class RemoteClusterService extends RemoteClusterAware implements Cl
|
||||||
void initializeRemoteClusters() {
|
void initializeRemoteClusters() {
|
||||||
final TimeValue timeValue = REMOTE_INITIAL_CONNECTION_TIMEOUT_SETTING.get(settings);
|
final TimeValue timeValue = REMOTE_INITIAL_CONNECTION_TIMEOUT_SETTING.get(settings);
|
||||||
final PlainActionFuture<Void> future = new PlainActionFuture<>();
|
final PlainActionFuture<Void> future = new PlainActionFuture<>();
|
||||||
Map<String, List<Supplier<DiscoveryNode>>> seeds = RemoteClusterAware.buildRemoteClustersSeeds(settings);
|
Map<String, Tuple<String, List<Supplier<DiscoveryNode>>>> seeds = RemoteClusterAware.buildRemoteClustersDynamicConfig(settings);
|
||||||
updateRemoteClusters(seeds, future);
|
updateRemoteClusters(seeds, future);
|
||||||
try {
|
try {
|
||||||
future.get(timeValue.millis(), TimeUnit.MILLISECONDS);
|
future.get(timeValue.millis(), TimeUnit.MILLISECONDS);
|
||||||
|
|
|
@ -16,12 +16,15 @@
|
||||||
* specific language governing permissions and limitations
|
* specific language governing permissions and limitations
|
||||||
* under the License.
|
* under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
package org.elasticsearch.cluster.metadata;
|
package org.elasticsearch.cluster.metadata;
|
||||||
|
|
||||||
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingClusterStateUpdateRequest;
|
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingClusterStateUpdateRequest;
|
||||||
import org.elasticsearch.cluster.ClusterState;
|
import org.elasticsearch.cluster.ClusterState;
|
||||||
|
import org.elasticsearch.cluster.ClusterStateTaskExecutor;
|
||||||
import org.elasticsearch.cluster.service.ClusterService;
|
import org.elasticsearch.cluster.service.ClusterService;
|
||||||
import org.elasticsearch.common.compress.CompressedXContent;
|
import org.elasticsearch.common.compress.CompressedXContent;
|
||||||
|
import org.elasticsearch.index.Index;
|
||||||
import org.elasticsearch.index.IndexService;
|
import org.elasticsearch.index.IndexService;
|
||||||
import org.elasticsearch.plugins.Plugin;
|
import org.elasticsearch.plugins.Plugin;
|
||||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||||
|
@ -31,6 +34,7 @@ import java.util.Collection;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
|
|
||||||
import static org.hamcrest.Matchers.equalTo;
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
import static org.hamcrest.Matchers.not;
|
||||||
|
|
||||||
public class MetaDataMappingServiceTests extends ESSingleNodeTestCase {
|
public class MetaDataMappingServiceTests extends ESSingleNodeTestCase {
|
||||||
|
|
||||||
|
@ -47,8 +51,18 @@ public class MetaDataMappingServiceTests extends ESSingleNodeTestCase {
|
||||||
final ClusterService clusterService = getInstanceFromNode(ClusterService.class);
|
final ClusterService clusterService = getInstanceFromNode(ClusterService.class);
|
||||||
// TODO - it will be nice to get a random mapping generator
|
// TODO - it will be nice to get a random mapping generator
|
||||||
final PutMappingClusterStateUpdateRequest request = new PutMappingClusterStateUpdateRequest().type("type");
|
final PutMappingClusterStateUpdateRequest request = new PutMappingClusterStateUpdateRequest().type("type");
|
||||||
request.source("{ \"properties\" { \"field\": { \"type\": \"text\" }}}");
|
request.indices(new Index[] {indexService.index()});
|
||||||
|
request.source("{ \"properties\": { \"field\": { \"type\": \"text\" }}}");
|
||||||
|
final ClusterStateTaskExecutor.ClusterTasksResult<PutMappingClusterStateUpdateRequest> result =
|
||||||
mappingService.putMappingExecutor.execute(clusterService.state(), Collections.singletonList(request));
|
mappingService.putMappingExecutor.execute(clusterService.state(), Collections.singletonList(request));
|
||||||
|
// the task completed successfully
|
||||||
|
assertThat(result.executionResults.size(), equalTo(1));
|
||||||
|
assertTrue(result.executionResults.values().iterator().next().isSuccess());
|
||||||
|
// the task really was a mapping update
|
||||||
|
assertThat(
|
||||||
|
indexService.mapperService().documentMapper("type").mappingSource(),
|
||||||
|
not(equalTo(result.resultingState.metaData().index("test").mapping("type").source())));
|
||||||
|
// since we never committed the cluster state update, the in-memory state is unchanged
|
||||||
assertThat(indexService.mapperService().documentMapper("type").mappingSource(), equalTo(currentMapping));
|
assertThat(indexService.mapperService().documentMapper("type").mappingSource(), equalTo(currentMapping));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -69,4 +83,5 @@ public class MetaDataMappingServiceTests extends ESSingleNodeTestCase {
|
||||||
|
|
||||||
assertSame(result, result2);
|
assertSame(result, result2);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -109,6 +109,7 @@ public abstract class AbstractDisruptionTestCase extends ESIntegTestCase {
|
||||||
protected void beforeIndexDeletion() throws Exception {
|
protected void beforeIndexDeletion() throws Exception {
|
||||||
if (disableBeforeIndexDeletion == false) {
|
if (disableBeforeIndexDeletion == false) {
|
||||||
super.beforeIndexDeletion();
|
super.beforeIndexDeletion();
|
||||||
|
assertSeqNos();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -414,4 +414,22 @@ public class DateFieldMapperTests extends ESSingleNodeTestCase {
|
||||||
() -> mapper.merge(update.mapping()));
|
() -> mapper.merge(update.mapping()));
|
||||||
assertEquals("mapper [date] of different type, current_type [date], merged_type [text]", e.getMessage());
|
assertEquals("mapper [date] of different type, current_type [date], merged_type [text]", e.getMessage());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testIllegalFormatField() throws Exception {
|
||||||
|
String mapping = Strings.toString(XContentFactory.jsonBuilder()
|
||||||
|
.startObject()
|
||||||
|
.startObject("type")
|
||||||
|
.startObject("properties")
|
||||||
|
.startObject("field")
|
||||||
|
.field("type", "date")
|
||||||
|
.array("format", "test_format")
|
||||||
|
.endObject()
|
||||||
|
.endObject()
|
||||||
|
.endObject()
|
||||||
|
.endObject());
|
||||||
|
|
||||||
|
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||||
|
() -> parser.parse("type", new CompressedXContent(mapping)));
|
||||||
|
assertEquals("Invalid format: [[test_format]]: expected string value", e.getMessage());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,13 +21,16 @@ package org.elasticsearch.index.mapper;
|
||||||
|
|
||||||
import org.elasticsearch.ExceptionsHelper;
|
import org.elasticsearch.ExceptionsHelper;
|
||||||
import org.elasticsearch.Version;
|
import org.elasticsearch.Version;
|
||||||
|
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
|
||||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||||
import org.elasticsearch.common.Strings;
|
import org.elasticsearch.common.Strings;
|
||||||
import org.elasticsearch.common.bytes.BytesReference;
|
import org.elasticsearch.common.bytes.BytesReference;
|
||||||
import org.elasticsearch.common.compress.CompressedXContent;
|
import org.elasticsearch.common.compress.CompressedXContent;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||||
import org.elasticsearch.common.xcontent.XContentType;
|
import org.elasticsearch.common.xcontent.XContentType;
|
||||||
|
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||||
import org.elasticsearch.index.IndexService;
|
import org.elasticsearch.index.IndexService;
|
||||||
import org.elasticsearch.index.mapper.KeywordFieldMapper.KeywordFieldType;
|
import org.elasticsearch.index.mapper.KeywordFieldMapper.KeywordFieldType;
|
||||||
import org.elasticsearch.index.mapper.MapperService.MergeReason;
|
import org.elasticsearch.index.mapper.MapperService.MergeReason;
|
||||||
|
@ -119,6 +122,35 @@ public class MapperServiceTests extends ESSingleNodeTestCase {
|
||||||
assertNull(indexService.mapperService().documentMapper(MapperService.DEFAULT_MAPPING));
|
assertNull(indexService.mapperService().documentMapper(MapperService.DEFAULT_MAPPING));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testIndexMetaDataUpdateDoesNotLoseDefaultMapper() throws IOException {
|
||||||
|
final IndexService indexService =
|
||||||
|
createIndex("test", Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_6_3_0).build());
|
||||||
|
try (XContentBuilder builder = JsonXContent.contentBuilder()) {
|
||||||
|
builder.startObject();
|
||||||
|
{
|
||||||
|
builder.startObject(MapperService.DEFAULT_MAPPING);
|
||||||
|
{
|
||||||
|
builder.field("date_detection", false);
|
||||||
|
}
|
||||||
|
builder.endObject();
|
||||||
|
}
|
||||||
|
builder.endObject();
|
||||||
|
final PutMappingRequest putMappingRequest = new PutMappingRequest();
|
||||||
|
putMappingRequest.indices("test");
|
||||||
|
putMappingRequest.type(MapperService.DEFAULT_MAPPING);
|
||||||
|
putMappingRequest.source(builder);
|
||||||
|
client().admin().indices().preparePutMapping("test").setType(MapperService.DEFAULT_MAPPING).setSource(builder).get();
|
||||||
|
}
|
||||||
|
assertNotNull(indexService.mapperService().documentMapper(MapperService.DEFAULT_MAPPING));
|
||||||
|
final Settings zeroReplicasSettings = Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0).build();
|
||||||
|
client().admin().indices().prepareUpdateSettings("test").setSettings(zeroReplicasSettings).get();
|
||||||
|
/*
|
||||||
|
* This assertion is a guard against a previous bug that would lose the default mapper when applying a metadata update that did not
|
||||||
|
* update the default mapping.
|
||||||
|
*/
|
||||||
|
assertNotNull(indexService.mapperService().documentMapper(MapperService.DEFAULT_MAPPING));
|
||||||
|
}
|
||||||
|
|
||||||
public void testTotalFieldsExceedsLimit() throws Throwable {
|
public void testTotalFieldsExceedsLimit() throws Throwable {
|
||||||
Function<String, String> mapping = type -> {
|
Function<String, String> mapping = type -> {
|
||||||
try {
|
try {
|
||||||
|
|
|
@ -443,4 +443,22 @@ public class RangeFieldMapperTests extends AbstractNumericFieldMapperTestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testIllegalFormatField() throws Exception {
|
||||||
|
String mapping = Strings.toString(XContentFactory.jsonBuilder()
|
||||||
|
.startObject()
|
||||||
|
.startObject("type")
|
||||||
|
.startObject("properties")
|
||||||
|
.startObject("field")
|
||||||
|
.field("type", "date_range")
|
||||||
|
.array("format", "test_format")
|
||||||
|
.endObject()
|
||||||
|
.endObject()
|
||||||
|
.endObject()
|
||||||
|
.endObject());
|
||||||
|
|
||||||
|
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||||
|
() -> parser.parse("type", new CompressedXContent(mapping)));
|
||||||
|
assertEquals("Invalid format: [[test_format]]: expected string value", e.getMessage());
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -159,4 +159,30 @@ public class RootObjectMapperTests extends ESSingleNodeTestCase {
|
||||||
mapper = mapperService.merge("type", new CompressedXContent(mapping3), MergeReason.MAPPING_UPDATE);
|
mapper = mapperService.merge("type", new CompressedXContent(mapping3), MergeReason.MAPPING_UPDATE);
|
||||||
assertEquals(mapping3, mapper.mappingSource().toString());
|
assertEquals(mapping3, mapper.mappingSource().toString());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testIllegalFormatField() throws Exception {
|
||||||
|
String dynamicMapping = Strings.toString(XContentFactory.jsonBuilder()
|
||||||
|
.startObject()
|
||||||
|
.startObject("type")
|
||||||
|
.startArray("dynamic_date_formats")
|
||||||
|
.startArray().value("test_format").endArray()
|
||||||
|
.endArray()
|
||||||
|
.endObject()
|
||||||
|
.endObject());
|
||||||
|
String mapping = Strings.toString(XContentFactory.jsonBuilder()
|
||||||
|
.startObject()
|
||||||
|
.startObject("type")
|
||||||
|
.startArray("date_formats")
|
||||||
|
.startArray().value("test_format").endArray()
|
||||||
|
.endArray()
|
||||||
|
.endObject()
|
||||||
|
.endObject());
|
||||||
|
|
||||||
|
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
|
||||||
|
for (String m : Arrays.asList(mapping, dynamicMapping)) {
|
||||||
|
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
|
||||||
|
() -> parser.parse("type", new CompressedXContent(m)));
|
||||||
|
assertEquals("Invalid format: [[test_format]]: expected string value", e.getMessage());
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,6 +18,8 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.transport;
|
package org.elasticsearch.transport;
|
||||||
|
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Map;
|
||||||
import java.util.function.Supplier;
|
import java.util.function.Supplier;
|
||||||
import org.apache.lucene.store.AlreadyClosedException;
|
import org.apache.lucene.store.AlreadyClosedException;
|
||||||
import org.elasticsearch.Version;
|
import org.elasticsearch.Version;
|
||||||
|
@ -52,6 +54,7 @@ import org.elasticsearch.mocksocket.MockServerSocket;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
import org.elasticsearch.test.VersionUtils;
|
import org.elasticsearch.test.VersionUtils;
|
||||||
import org.elasticsearch.test.transport.MockTransportService;
|
import org.elasticsearch.test.transport.MockTransportService;
|
||||||
|
import org.elasticsearch.test.transport.StubbableTransport;
|
||||||
import org.elasticsearch.threadpool.TestThreadPool;
|
import org.elasticsearch.threadpool.TestThreadPool;
|
||||||
import org.elasticsearch.threadpool.ThreadPool;
|
import org.elasticsearch.threadpool.ThreadPool;
|
||||||
|
|
||||||
|
@ -378,15 +381,19 @@ public class RemoteClusterConnectionTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private void updateSeedNodes(RemoteClusterConnection connection, List<Supplier<DiscoveryNode>> seedNodes) throws Exception {
|
private void updateSeedNodes(RemoteClusterConnection connection, List<Supplier<DiscoveryNode>> seedNodes) throws Exception {
|
||||||
|
updateSeedNodes(connection, seedNodes, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void updateSeedNodes(RemoteClusterConnection connection, List<Supplier<DiscoveryNode>> seedNodes, String proxyAddress)
|
||||||
|
throws Exception {
|
||||||
CountDownLatch latch = new CountDownLatch(1);
|
CountDownLatch latch = new CountDownLatch(1);
|
||||||
AtomicReference<Exception> exceptionAtomicReference = new AtomicReference<>();
|
AtomicReference<Exception> exceptionAtomicReference = new AtomicReference<>();
|
||||||
ActionListener<Void> listener = ActionListener.wrap(x -> latch.countDown(), x -> {
|
ActionListener<Void> listener = ActionListener.wrap(x -> latch.countDown(), x -> {
|
||||||
exceptionAtomicReference.set(x);
|
exceptionAtomicReference.set(x);
|
||||||
latch.countDown();
|
latch.countDown();
|
||||||
});
|
});
|
||||||
connection.updateSeedNodes(seedNodes, listener);
|
connection.updateSeedNodes(proxyAddress, seedNodes, listener);
|
||||||
latch.await();
|
latch.await();
|
||||||
if (exceptionAtomicReference.get() != null) {
|
if (exceptionAtomicReference.get() != null) {
|
||||||
throw exceptionAtomicReference.get();
|
throw exceptionAtomicReference.get();
|
||||||
|
@ -517,7 +524,7 @@ public class RemoteClusterConnectionTests extends ESTestCase {
|
||||||
exceptionReference.set(x);
|
exceptionReference.set(x);
|
||||||
listenerCalled.countDown();
|
listenerCalled.countDown();
|
||||||
});
|
});
|
||||||
connection.updateSeedNodes(Arrays.asList(() -> seedNode), listener);
|
connection.updateSeedNodes(null, Arrays.asList(() -> seedNode), listener);
|
||||||
acceptedLatch.await();
|
acceptedLatch.await();
|
||||||
connection.close(); // now close it, this should trigger an interrupt on the socket and we can move on
|
connection.close(); // now close it, this should trigger an interrupt on the socket and we can move on
|
||||||
assertTrue(connection.assertNoRunningConnections());
|
assertTrue(connection.assertNoRunningConnections());
|
||||||
|
@ -787,7 +794,7 @@ public class RemoteClusterConnectionTests extends ESTestCase {
|
||||||
throw new AssertionError(x);
|
throw new AssertionError(x);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
connection.updateSeedNodes(seedNodes, listener);
|
connection.updateSeedNodes(null, seedNodes, listener);
|
||||||
}
|
}
|
||||||
latch.await();
|
latch.await();
|
||||||
} catch (Exception ex) {
|
} catch (Exception ex) {
|
||||||
|
@ -875,7 +882,7 @@ public class RemoteClusterConnectionTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
try {
|
try {
|
||||||
connection.updateSeedNodes(seedNodes, listener);
|
connection.updateSeedNodes(null, seedNodes, listener);
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
// it's ok if we're shutting down
|
// it's ok if we're shutting down
|
||||||
assertThat(e.getMessage(), containsString("threadcontext is already closed"));
|
assertThat(e.getMessage(), containsString("threadcontext is already closed"));
|
||||||
|
@ -1384,4 +1391,97 @@ public class RemoteClusterConnectionTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testProxyMode() throws Exception {
|
||||||
|
List<DiscoveryNode> knownNodes = new CopyOnWriteArrayList<>();
|
||||||
|
try (MockTransportService seedTransport = startTransport("node_0", knownNodes, Version.CURRENT);
|
||||||
|
MockTransportService discoverableTransport = startTransport("node_1", knownNodes, Version.CURRENT)) {
|
||||||
|
knownNodes.add(seedTransport.getLocalDiscoNode());
|
||||||
|
knownNodes.add(discoverableTransport.getLocalDiscoNode());
|
||||||
|
Collections.shuffle(knownNodes, random());
|
||||||
|
final String proxyAddress = "1.1.1.1:99";
|
||||||
|
Map<String, DiscoveryNode> nodes = new HashMap<>();
|
||||||
|
nodes.put("node_0", seedTransport.getLocalDiscoNode());
|
||||||
|
nodes.put("node_1", discoverableTransport.getLocalDiscoNode());
|
||||||
|
Transport mockTcpTransport = getProxyTransport(threadPool, Collections.singletonMap(proxyAddress, nodes));
|
||||||
|
try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, mockTcpTransport, Version.CURRENT,
|
||||||
|
threadPool, null, Collections.emptySet())) {
|
||||||
|
service.start();
|
||||||
|
service.acceptIncomingRequests();
|
||||||
|
Supplier<DiscoveryNode> seedSupplier = () ->
|
||||||
|
RemoteClusterAware.buildSeedNode("some-remote-cluster", "node_0:" + randomIntBetween(1, 10000), true);
|
||||||
|
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
|
||||||
|
Arrays.asList(seedSupplier), service, service.getConnectionManager(), Integer.MAX_VALUE, n -> true, proxyAddress)) {
|
||||||
|
updateSeedNodes(connection, Arrays.asList(seedSupplier), proxyAddress);
|
||||||
|
assertEquals(2, connection.getNumNodesConnected());
|
||||||
|
assertNotNull(connection.getConnection(discoverableTransport.getLocalDiscoNode()));
|
||||||
|
assertNotNull(connection.getConnection(seedTransport.getLocalDiscoNode()));
|
||||||
|
assertEquals(proxyAddress, connection.getConnection(seedTransport.getLocalDiscoNode())
|
||||||
|
.getNode().getAddress().toString());
|
||||||
|
assertEquals(proxyAddress, connection.getConnection(discoverableTransport.getLocalDiscoNode())
|
||||||
|
.getNode().getAddress().toString());
|
||||||
|
service.getConnectionManager().disconnectFromNode(knownNodes.get(0));
|
||||||
|
// ensure we reconnect
|
||||||
|
assertBusy(() -> {
|
||||||
|
assertEquals(2, connection.getNumNodesConnected());
|
||||||
|
});
|
||||||
|
discoverableTransport.close();
|
||||||
|
seedTransport.close();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static Transport getProxyTransport(ThreadPool threadPool, Map<String, Map<String, DiscoveryNode>> nodeMap) {
|
||||||
|
if (nodeMap.isEmpty()) {
|
||||||
|
throw new IllegalArgumentException("nodeMap must be non-empty");
|
||||||
|
}
|
||||||
|
|
||||||
|
StubbableTransport stubbableTransport = new StubbableTransport(MockTransportService.newMockTransport(Settings.EMPTY, Version
|
||||||
|
.CURRENT, threadPool));
|
||||||
|
stubbableTransport.setDefaultConnectBehavior((t, node, profile) -> {
|
||||||
|
Map<String, DiscoveryNode> proxyMapping = nodeMap.get(node.getAddress().toString());
|
||||||
|
if (proxyMapping == null) {
|
||||||
|
throw new IllegalStateException("no proxy mapping for node: " + node);
|
||||||
|
}
|
||||||
|
DiscoveryNode proxyNode = proxyMapping.get(node.getName());
|
||||||
|
if (proxyNode == null) {
|
||||||
|
// this is a seednode - lets pick one randomly
|
||||||
|
assertEquals("seed node must not have a port in the hostname: " + node.getHostName(),
|
||||||
|
-1, node.getHostName().lastIndexOf(':'));
|
||||||
|
assertTrue("missing hostname: " + node, proxyMapping.containsKey(node.getHostName()));
|
||||||
|
// route by seed hostname
|
||||||
|
proxyNode = proxyMapping.get(node.getHostName());
|
||||||
|
}
|
||||||
|
Transport.Connection connection = t.openConnection(proxyNode, profile);
|
||||||
|
return new Transport.Connection() {
|
||||||
|
@Override
|
||||||
|
public DiscoveryNode getNode() {
|
||||||
|
return node;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void sendRequest(long requestId, String action, TransportRequest request, TransportRequestOptions options)
|
||||||
|
throws IOException, TransportException {
|
||||||
|
connection.sendRequest(requestId, action, request, options);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void addCloseListener(ActionListener<Void> listener) {
|
||||||
|
connection.addCloseListener(listener);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean isClosed() {
|
||||||
|
return connection.isClosed();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void close() {
|
||||||
|
connection.close();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
});
|
||||||
|
return stubbableTransport;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,6 +26,7 @@ import org.elasticsearch.action.OriginalIndices;
|
||||||
import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsResponse;
|
import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsResponse;
|
||||||
import org.elasticsearch.action.support.IndicesOptions;
|
import org.elasticsearch.action.support.IndicesOptions;
|
||||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||||
|
import org.elasticsearch.common.collect.Tuple;
|
||||||
import org.elasticsearch.common.settings.AbstractScopedSettings;
|
import org.elasticsearch.common.settings.AbstractScopedSettings;
|
||||||
import org.elasticsearch.common.settings.ClusterSettings;
|
import org.elasticsearch.common.settings.ClusterSettings;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
@ -55,6 +56,7 @@ import java.util.concurrent.TimeUnit;
|
||||||
import java.util.concurrent.atomic.AtomicReference;
|
import java.util.concurrent.atomic.AtomicReference;
|
||||||
import java.util.function.BiFunction;
|
import java.util.function.BiFunction;
|
||||||
import java.util.function.Predicate;
|
import java.util.function.Predicate;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
import static org.hamcrest.CoreMatchers.containsString;
|
import static org.hamcrest.CoreMatchers.containsString;
|
||||||
import static org.hamcrest.CoreMatchers.instanceOf;
|
import static org.hamcrest.CoreMatchers.instanceOf;
|
||||||
|
@ -115,25 +117,38 @@ public class RemoteClusterServiceTests extends ESTestCase {
|
||||||
assertEquals("failed to parse port", e.getMessage());
|
assertEquals("failed to parse port", e.getMessage());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testBuiltRemoteClustersSeeds() throws Exception {
|
public void testBuildRemoteClustersDynamicConfig() throws Exception {
|
||||||
Map<String, List<Supplier<DiscoveryNode>>> map = RemoteClusterService.buildRemoteClustersSeeds(
|
Map<String, Tuple<String, List<Supplier<DiscoveryNode>>>> map = RemoteClusterService.buildRemoteClustersDynamicConfig(
|
||||||
Settings.builder().put("search.remote.foo.seeds", "192.168.0.1:8080").put("search.remote.bar.seeds", "[::1]:9090").build());
|
Settings.builder().put("search.remote.foo.seeds", "192.168.0.1:8080")
|
||||||
assertEquals(2, map.size());
|
.put("search.remote.bar.seeds", "[::1]:9090")
|
||||||
|
.put("search.remote.boom.seeds", "boom-node1.internal:1000")
|
||||||
|
.put("search.remote.boom.proxy", "foo.bar.com:1234").build());
|
||||||
|
assertEquals(3, map.size());
|
||||||
assertTrue(map.containsKey("foo"));
|
assertTrue(map.containsKey("foo"));
|
||||||
assertTrue(map.containsKey("bar"));
|
assertTrue(map.containsKey("bar"));
|
||||||
assertEquals(1, map.get("foo").size());
|
assertTrue(map.containsKey("boom"));
|
||||||
assertEquals(1, map.get("bar").size());
|
assertEquals(1, map.get("foo").v2().size());
|
||||||
|
assertEquals(1, map.get("bar").v2().size());
|
||||||
DiscoveryNode foo = map.get("foo").get(0).get();
|
assertEquals(1, map.get("boom").v2().size());
|
||||||
|
|
||||||
|
DiscoveryNode foo = map.get("foo").v2().get(0).get();
|
||||||
|
assertEquals("", map.get("foo").v1());
|
||||||
assertEquals(foo.getAddress(), new TransportAddress(new InetSocketAddress(InetAddress.getByName("192.168.0.1"), 8080)));
|
assertEquals(foo.getAddress(), new TransportAddress(new InetSocketAddress(InetAddress.getByName("192.168.0.1"), 8080)));
|
||||||
assertEquals(foo.getId(), "foo#192.168.0.1:8080");
|
assertEquals(foo.getId(), "foo#192.168.0.1:8080");
|
||||||
assertEquals(foo.getVersion(), Version.CURRENT.minimumCompatibilityVersion());
|
assertEquals(foo.getVersion(), Version.CURRENT.minimumCompatibilityVersion());
|
||||||
|
|
||||||
DiscoveryNode bar = map.get("bar").get(0).get();
|
DiscoveryNode bar = map.get("bar").v2().get(0).get();
|
||||||
assertEquals(bar.getAddress(), new TransportAddress(new InetSocketAddress(InetAddress.getByName("[::1]"), 9090)));
|
assertEquals(bar.getAddress(), new TransportAddress(new InetSocketAddress(InetAddress.getByName("[::1]"), 9090)));
|
||||||
assertEquals(bar.getId(), "bar#[::1]:9090");
|
assertEquals(bar.getId(), "bar#[::1]:9090");
|
||||||
|
assertEquals("", map.get("bar").v1());
|
||||||
assertEquals(bar.getVersion(), Version.CURRENT.minimumCompatibilityVersion());
|
assertEquals(bar.getVersion(), Version.CURRENT.minimumCompatibilityVersion());
|
||||||
|
|
||||||
|
DiscoveryNode boom = map.get("boom").v2().get(0).get();
|
||||||
|
assertEquals(boom.getAddress(), new TransportAddress(TransportAddress.META_ADDRESS, 0));
|
||||||
|
assertEquals("boom-node1.internal", boom.getHostName());
|
||||||
|
assertEquals(boom.getId(), "boom#boom-node1.internal:1000");
|
||||||
|
assertEquals("foo.bar.com:1234", map.get("boom").v1());
|
||||||
|
assertEquals(boom.getVersion(), Version.CURRENT.minimumCompatibilityVersion());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -204,17 +219,17 @@ public class RemoteClusterServiceTests extends ESTestCase {
|
||||||
assertFalse(service.isCrossClusterSearchEnabled());
|
assertFalse(service.isCrossClusterSearchEnabled());
|
||||||
service.initializeRemoteClusters();
|
service.initializeRemoteClusters();
|
||||||
assertFalse(service.isCrossClusterSearchEnabled());
|
assertFalse(service.isCrossClusterSearchEnabled());
|
||||||
service.updateRemoteCluster("cluster_1", Collections.singletonList(seedNode.getAddress().toString()));
|
service.updateRemoteCluster("cluster_1", Collections.singletonList(seedNode.getAddress().toString()), null);
|
||||||
assertTrue(service.isCrossClusterSearchEnabled());
|
assertTrue(service.isCrossClusterSearchEnabled());
|
||||||
assertTrue(service.isRemoteClusterRegistered("cluster_1"));
|
assertTrue(service.isRemoteClusterRegistered("cluster_1"));
|
||||||
service.updateRemoteCluster("cluster_2", Collections.singletonList(otherSeedNode.getAddress().toString()));
|
service.updateRemoteCluster("cluster_2", Collections.singletonList(otherSeedNode.getAddress().toString()), null);
|
||||||
assertTrue(service.isCrossClusterSearchEnabled());
|
assertTrue(service.isCrossClusterSearchEnabled());
|
||||||
assertTrue(service.isRemoteClusterRegistered("cluster_1"));
|
assertTrue(service.isRemoteClusterRegistered("cluster_1"));
|
||||||
assertTrue(service.isRemoteClusterRegistered("cluster_2"));
|
assertTrue(service.isRemoteClusterRegistered("cluster_2"));
|
||||||
service.updateRemoteCluster("cluster_2", Collections.emptyList());
|
service.updateRemoteCluster("cluster_2", Collections.emptyList(), null);
|
||||||
assertFalse(service.isRemoteClusterRegistered("cluster_2"));
|
assertFalse(service.isRemoteClusterRegistered("cluster_2"));
|
||||||
IllegalArgumentException iae = expectThrows(IllegalArgumentException.class,
|
IllegalArgumentException iae = expectThrows(IllegalArgumentException.class,
|
||||||
() -> service.updateRemoteCluster(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY, Collections.emptyList()));
|
() -> service.updateRemoteCluster(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY, Collections.emptyList(), null));
|
||||||
assertEquals("remote clusters must not have the empty string as its key", iae.getMessage());
|
assertEquals("remote clusters must not have the empty string as its key", iae.getMessage());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -265,14 +280,14 @@ public class RemoteClusterServiceTests extends ESTestCase {
|
||||||
final CountDownLatch firstLatch = new CountDownLatch(1);
|
final CountDownLatch firstLatch = new CountDownLatch(1);
|
||||||
service.updateRemoteCluster(
|
service.updateRemoteCluster(
|
||||||
"cluster_1",
|
"cluster_1",
|
||||||
Arrays.asList(c1N1Node.getAddress().toString(), c1N2Node.getAddress().toString()),
|
Arrays.asList(c1N1Node.getAddress().toString(), c1N2Node.getAddress().toString()), null,
|
||||||
connectionListener(firstLatch));
|
connectionListener(firstLatch));
|
||||||
firstLatch.await();
|
firstLatch.await();
|
||||||
|
|
||||||
final CountDownLatch secondLatch = new CountDownLatch(1);
|
final CountDownLatch secondLatch = new CountDownLatch(1);
|
||||||
service.updateRemoteCluster(
|
service.updateRemoteCluster(
|
||||||
"cluster_2",
|
"cluster_2",
|
||||||
Arrays.asList(c2N1Node.getAddress().toString(), c2N2Node.getAddress().toString()),
|
Arrays.asList(c2N1Node.getAddress().toString(), c2N2Node.getAddress().toString()), null,
|
||||||
connectionListener(secondLatch));
|
connectionListener(secondLatch));
|
||||||
secondLatch.await();
|
secondLatch.await();
|
||||||
|
|
||||||
|
@ -330,14 +345,14 @@ public class RemoteClusterServiceTests extends ESTestCase {
|
||||||
final CountDownLatch firstLatch = new CountDownLatch(1);
|
final CountDownLatch firstLatch = new CountDownLatch(1);
|
||||||
service.updateRemoteCluster(
|
service.updateRemoteCluster(
|
||||||
"cluster_1",
|
"cluster_1",
|
||||||
Arrays.asList(c1N1Node.getAddress().toString(), c1N2Node.getAddress().toString()),
|
Arrays.asList(c1N1Node.getAddress().toString(), c1N2Node.getAddress().toString()), null,
|
||||||
connectionListener(firstLatch));
|
connectionListener(firstLatch));
|
||||||
firstLatch.await();
|
firstLatch.await();
|
||||||
|
|
||||||
final CountDownLatch secondLatch = new CountDownLatch(1);
|
final CountDownLatch secondLatch = new CountDownLatch(1);
|
||||||
service.updateRemoteCluster(
|
service.updateRemoteCluster(
|
||||||
"cluster_2",
|
"cluster_2",
|
||||||
Arrays.asList(c2N1Node.getAddress().toString(), c2N2Node.getAddress().toString()),
|
Arrays.asList(c2N1Node.getAddress().toString(), c2N2Node.getAddress().toString()), null,
|
||||||
connectionListener(secondLatch));
|
connectionListener(secondLatch));
|
||||||
secondLatch.await();
|
secondLatch.await();
|
||||||
|
|
||||||
|
@ -403,14 +418,14 @@ public class RemoteClusterServiceTests extends ESTestCase {
|
||||||
final CountDownLatch firstLatch = new CountDownLatch(1);
|
final CountDownLatch firstLatch = new CountDownLatch(1);
|
||||||
service.updateRemoteCluster(
|
service.updateRemoteCluster(
|
||||||
"cluster_1",
|
"cluster_1",
|
||||||
Arrays.asList(c1N1Node.getAddress().toString(), c1N2Node.getAddress().toString()),
|
Arrays.asList(c1N1Node.getAddress().toString(), c1N2Node.getAddress().toString()), null,
|
||||||
connectionListener(firstLatch));
|
connectionListener(firstLatch));
|
||||||
firstLatch.await();
|
firstLatch.await();
|
||||||
|
|
||||||
final CountDownLatch secondLatch = new CountDownLatch(1);
|
final CountDownLatch secondLatch = new CountDownLatch(1);
|
||||||
service.updateRemoteCluster(
|
service.updateRemoteCluster(
|
||||||
"cluster_2",
|
"cluster_2",
|
||||||
Arrays.asList(c2N1Node.getAddress().toString(), c2N2Node.getAddress().toString()),
|
Arrays.asList(c2N1Node.getAddress().toString(), c2N2Node.getAddress().toString()), null,
|
||||||
connectionListener(secondLatch));
|
connectionListener(secondLatch));
|
||||||
secondLatch.await();
|
secondLatch.await();
|
||||||
CountDownLatch latch = new CountDownLatch(1);
|
CountDownLatch latch = new CountDownLatch(1);
|
||||||
|
@ -822,4 +837,76 @@ public class RemoteClusterServiceTests extends ESTestCase {
|
||||||
assertTrue(nodePredicate.test(node));
|
assertTrue(nodePredicate.test(node));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testRemoteClusterWithProxy() throws Exception {
|
||||||
|
List<DiscoveryNode> knownNodes = new CopyOnWriteArrayList<>();
|
||||||
|
try (MockTransportService cluster_1_node0 = startTransport("cluster_1_node0", knownNodes, Version.CURRENT);
|
||||||
|
MockTransportService cluster_1_node_1 = startTransport("cluster_1_node1", knownNodes, Version.CURRENT);
|
||||||
|
MockTransportService cluster_2_node0 = startTransport("cluster_2_node0", Collections.emptyList(), Version.CURRENT)) {
|
||||||
|
knownNodes.add(cluster_1_node0.getLocalDiscoNode());
|
||||||
|
knownNodes.add(cluster_1_node_1.getLocalDiscoNode());
|
||||||
|
String cluster1Proxy = "1.1.1.1:99";
|
||||||
|
String cluster2Proxy = "2.2.2.2:99";
|
||||||
|
Map<String, DiscoveryNode> nodesCluster1 = new HashMap<>();
|
||||||
|
nodesCluster1.put("cluster_1_node0", cluster_1_node0.getLocalDiscoNode());
|
||||||
|
nodesCluster1.put("cluster_1_node1", cluster_1_node_1.getLocalDiscoNode());
|
||||||
|
Map<String, Map<String, DiscoveryNode>> mapping = new HashMap<>();
|
||||||
|
mapping.put(cluster1Proxy, nodesCluster1);
|
||||||
|
mapping.put(cluster2Proxy, Collections.singletonMap("cluster_2_node0", cluster_2_node0.getLocalDiscoNode()));
|
||||||
|
|
||||||
|
Collections.shuffle(knownNodes, random());
|
||||||
|
Transport proxyTransport = RemoteClusterConnectionTests.getProxyTransport(threadPool, mapping);
|
||||||
|
try (MockTransportService transportService = MockTransportService.createNewService(Settings.EMPTY, proxyTransport,
|
||||||
|
Version.CURRENT, threadPool, null, Collections.emptySet());) {
|
||||||
|
transportService.start();
|
||||||
|
transportService.acceptIncomingRequests();
|
||||||
|
Settings.Builder builder = Settings.builder();
|
||||||
|
builder.putList("search.remote.cluster_1.seeds", "cluster_1_node0:8080");
|
||||||
|
builder.put("search.remote.cluster_1.proxy", cluster1Proxy);
|
||||||
|
try (RemoteClusterService service = new RemoteClusterService(builder.build(), transportService)) {
|
||||||
|
assertFalse(service.isCrossClusterSearchEnabled());
|
||||||
|
service.initializeRemoteClusters();
|
||||||
|
assertTrue(service.isCrossClusterSearchEnabled());
|
||||||
|
updateRemoteCluster(service, "cluster_1", Collections.singletonList("cluster_1_node1:8081"), cluster1Proxy);
|
||||||
|
assertTrue(service.isCrossClusterSearchEnabled());
|
||||||
|
assertTrue(service.isRemoteClusterRegistered("cluster_1"));
|
||||||
|
assertFalse(service.isRemoteClusterRegistered("cluster_2"));
|
||||||
|
updateRemoteCluster(service, "cluster_2", Collections.singletonList("cluster_2_node0:9300"), cluster2Proxy);
|
||||||
|
assertTrue(service.isCrossClusterSearchEnabled());
|
||||||
|
assertTrue(service.isRemoteClusterRegistered("cluster_1"));
|
||||||
|
assertTrue(service.isRemoteClusterRegistered("cluster_2"));
|
||||||
|
List<RemoteConnectionInfo> infos = service.getRemoteConnectionInfos().collect(Collectors.toList());
|
||||||
|
for (RemoteConnectionInfo info : infos) {
|
||||||
|
switch (info.clusterAlias) {
|
||||||
|
case "cluster_1":
|
||||||
|
assertEquals(2, info.numNodesConnected);
|
||||||
|
break;
|
||||||
|
case "cluster_2":
|
||||||
|
assertEquals(1, info.numNodesConnected);
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
fail("unknown cluster: " + info.clusterAlias);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
service.updateRemoteCluster("cluster_2", Collections.emptyList(), randomBoolean() ? cluster2Proxy : null);
|
||||||
|
assertFalse(service.isRemoteClusterRegistered("cluster_2"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void updateRemoteCluster(RemoteClusterService service, String clusterAlias, List<String> addresses, String proxyAddress)
|
||||||
|
throws Exception {
|
||||||
|
CountDownLatch latch = new CountDownLatch(1);
|
||||||
|
AtomicReference<Exception> exceptionAtomicReference = new AtomicReference<>();
|
||||||
|
ActionListener<Void> listener = ActionListener.wrap(x -> latch.countDown(), x -> {
|
||||||
|
exceptionAtomicReference.set(x);
|
||||||
|
latch.countDown();
|
||||||
|
});
|
||||||
|
service.updateRemoteCluster(clusterAlias, addresses, proxyAddress, listener);
|
||||||
|
latch.await();
|
||||||
|
if (exceptionAtomicReference.get() != null) {
|
||||||
|
throw exceptionAtomicReference.get();
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -78,7 +78,12 @@ addSubProjects('', new File(rootProject.projectDir, 'plugins'))
|
||||||
addSubProjects('', new File(rootProject.projectDir, 'qa'))
|
addSubProjects('', new File(rootProject.projectDir, 'qa'))
|
||||||
addSubProjects('', new File(rootProject.projectDir, 'x-pack'))
|
addSubProjects('', new File(rootProject.projectDir, 'x-pack'))
|
||||||
|
|
||||||
boolean isEclipse = System.getProperty("eclipse.launcher") != null || gradle.startParameter.taskNames.contains('eclipse') || gradle.startParameter.taskNames.contains('cleanEclipse')
|
List startTasks = gradle.startParameter.taskNames
|
||||||
|
boolean isEclipse =
|
||||||
|
System.getProperty("eclipse.launcher") != null || // Detects gradle launched from the Eclipse IDE
|
||||||
|
System.getProperty("eclipse.application") != null || // Detects gradle launched from the Eclipse compiler server
|
||||||
|
startTasks.contains("eclipse") || // Detects gradle launched from the command line to do Eclipse stuff
|
||||||
|
startTasks.contains("cleanEclipse");
|
||||||
if (isEclipse) {
|
if (isEclipse) {
|
||||||
// eclipse cannot handle an intermediate dependency between main and test, so we must create separate projects
|
// eclipse cannot handle an intermediate dependency between main and test, so we must create separate projects
|
||||||
// for server-src and server-tests
|
// for server-src and server-tests
|
||||||
|
|
|
@ -728,7 +728,7 @@ public abstract class ESIndexLevelReplicationTestCase extends IndexShardTestCase
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void performOnReplica(ResyncReplicationRequest request, IndexShard replica) throws Exception {
|
protected void performOnReplica(ResyncReplicationRequest request, IndexShard replica) throws Exception {
|
||||||
executeResyncOnReplica(replica, request);
|
executeResyncOnReplica(replica, request, getPrimaryShard().getPendingPrimaryTerm(), getPrimaryShard().getGlobalCheckpoint());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -741,8 +741,15 @@ public abstract class ESIndexLevelReplicationTestCase extends IndexShardTestCase
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
private void executeResyncOnReplica(IndexShard replica, ResyncReplicationRequest request) throws Exception {
|
private void executeResyncOnReplica(IndexShard replica, ResyncReplicationRequest request,
|
||||||
final Translog.Location location = TransportResyncReplicationAction.performOnReplica(request, replica);
|
long operationPrimaryTerm, long globalCheckpointOnPrimary) throws Exception {
|
||||||
|
final Translog.Location location;
|
||||||
|
final PlainActionFuture<Releasable> acquirePermitFuture = new PlainActionFuture<>();
|
||||||
|
replica.acquireReplicaOperationPermit(
|
||||||
|
operationPrimaryTerm, globalCheckpointOnPrimary, acquirePermitFuture, ThreadPool.Names.SAME, request);
|
||||||
|
try (Releasable ignored = acquirePermitFuture.actionGet()) {
|
||||||
|
location = TransportResyncReplicationAction.performOnReplica(request, replica);
|
||||||
|
}
|
||||||
TransportWriteActionTestHelper.performPostWriteActions(replica, request, location, logger);
|
TransportWriteActionTestHelper.performPostWriteActions(replica, request, location, logger);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -2349,6 +2349,9 @@ public abstract class ESIntegTestCase extends ESTestCase {
|
||||||
final ObjectLongMap<String> globalCheckpoints = indexShard.getInSyncGlobalCheckpoints();
|
final ObjectLongMap<String> globalCheckpoints = indexShard.getInSyncGlobalCheckpoints();
|
||||||
for (ShardStats shardStats : indexShardStats) {
|
for (ShardStats shardStats : indexShardStats) {
|
||||||
final SeqNoStats seqNoStats = shardStats.getSeqNoStats();
|
final SeqNoStats seqNoStats = shardStats.getSeqNoStats();
|
||||||
|
if (seqNoStats == null) {
|
||||||
|
continue; // this shard was closed
|
||||||
|
}
|
||||||
assertThat(shardStats.getShardRouting() + " local checkpoint mismatch",
|
assertThat(shardStats.getShardRouting() + " local checkpoint mismatch",
|
||||||
seqNoStats.getLocalCheckpoint(), equalTo(primarySeqNoStats.getLocalCheckpoint()));
|
seqNoStats.getLocalCheckpoint(), equalTo(primarySeqNoStats.getLocalCheckpoint()));
|
||||||
assertThat(shardStats.getShardRouting() + " global checkpoint mismatch",
|
assertThat(shardStats.getShardRouting() + " global checkpoint mismatch",
|
||||||
|
|
|
@ -95,6 +95,12 @@ public final class MockTransportService extends TransportService {
|
||||||
|
|
||||||
public static MockTransportService createNewService(Settings settings, Version version, ThreadPool threadPool,
|
public static MockTransportService createNewService(Settings settings, Version version, ThreadPool threadPool,
|
||||||
@Nullable ClusterSettings clusterSettings) {
|
@Nullable ClusterSettings clusterSettings) {
|
||||||
|
MockTcpTransport mockTcpTransport = newMockTransport(settings, version, threadPool);
|
||||||
|
return createNewService(settings, mockTcpTransport, version, threadPool, clusterSettings,
|
||||||
|
Collections.emptySet());
|
||||||
|
}
|
||||||
|
|
||||||
|
public static MockTcpTransport newMockTransport(Settings settings, Version version, ThreadPool threadPool) {
|
||||||
// some tests use MockTransportService to do network based testing. Yet, we run tests in multiple JVMs that means
|
// some tests use MockTransportService to do network based testing. Yet, we run tests in multiple JVMs that means
|
||||||
// concurrent tests could claim port that another JVM just released and if that test tries to simulate a disconnect it might
|
// concurrent tests could claim port that another JVM just released and if that test tries to simulate a disconnect it might
|
||||||
// be smart enough to re-connect depending on what is tested. To reduce the risk, since this is very hard to debug we use
|
// be smart enough to re-connect depending on what is tested. To reduce the risk, since this is very hard to debug we use
|
||||||
|
@ -102,9 +108,8 @@ public final class MockTransportService extends TransportService {
|
||||||
int basePort = 10300 + (JVM_ORDINAL * 100); // use a non-default port otherwise some cluster in this JVM might reuse a port
|
int basePort = 10300 + (JVM_ORDINAL * 100); // use a non-default port otherwise some cluster in this JVM might reuse a port
|
||||||
settings = Settings.builder().put(TcpTransport.PORT.getKey(), basePort + "-" + (basePort + 100)).put(settings).build();
|
settings = Settings.builder().put(TcpTransport.PORT.getKey(), basePort + "-" + (basePort + 100)).put(settings).build();
|
||||||
NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(ClusterModule.getNamedWriteables());
|
NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(ClusterModule.getNamedWriteables());
|
||||||
final Transport transport = new MockTcpTransport(settings, threadPool, BigArrays.NON_RECYCLING_INSTANCE,
|
return new MockTcpTransport(settings, threadPool, BigArrays.NON_RECYCLING_INSTANCE,
|
||||||
new NoneCircuitBreakerService(), namedWriteableRegistry, new NetworkService(Collections.emptyList()), version);
|
new NoneCircuitBreakerService(), namedWriteableRegistry, new NetworkService(Collections.emptyList()), version);
|
||||||
return createNewService(settings, transport, version, threadPool, clusterSettings, Collections.emptySet());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public static MockTransportService createNewService(Settings settings, Transport transport, Version version, ThreadPool threadPool,
|
public static MockTransportService createNewService(Settings settings, Transport transport, Version version, ThreadPool threadPool,
|
||||||
|
|
|
@ -41,7 +41,7 @@ import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.concurrent.ConcurrentHashMap;
|
import java.util.concurrent.ConcurrentHashMap;
|
||||||
|
|
||||||
public class StubbableTransport implements Transport {
|
public final class StubbableTransport implements Transport {
|
||||||
|
|
||||||
private final ConcurrentHashMap<TransportAddress, SendRequestBehavior> sendBehaviors = new ConcurrentHashMap<>();
|
private final ConcurrentHashMap<TransportAddress, SendRequestBehavior> sendBehaviors = new ConcurrentHashMap<>();
|
||||||
private final ConcurrentHashMap<TransportAddress, OpenConnectionBehavior> connectBehaviors = new ConcurrentHashMap<>();
|
private final ConcurrentHashMap<TransportAddress, OpenConnectionBehavior> connectBehaviors = new ConcurrentHashMap<>();
|
||||||
|
@ -60,6 +60,12 @@ public class StubbableTransport implements Transport {
|
||||||
return prior == null;
|
return prior == null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public boolean setDefaultConnectBehavior(OpenConnectionBehavior openConnectionBehavior) {
|
||||||
|
OpenConnectionBehavior prior = this.defaultConnectBehavior;
|
||||||
|
this.defaultConnectBehavior = openConnectionBehavior;
|
||||||
|
return prior == null;
|
||||||
|
}
|
||||||
|
|
||||||
boolean addSendBehavior(TransportAddress transportAddress, SendRequestBehavior sendBehavior) {
|
boolean addSendBehavior(TransportAddress transportAddress, SendRequestBehavior sendBehavior) {
|
||||||
return sendBehaviors.put(transportAddress, sendBehavior) == null;
|
return sendBehaviors.put(transportAddress, sendBehavior) == null;
|
||||||
}
|
}
|
||||||
|
|
|
@ -55,8 +55,8 @@ help you get up and running. The +elasticsearch-setup-passwords+ command is the
|
||||||
simplest method to set the built-in users' passwords for the first time.
|
simplest method to set the built-in users' passwords for the first time.
|
||||||
|
|
||||||
For example, you can run the command in an "interactive" mode, which prompts you
|
For example, you can run the command in an "interactive" mode, which prompts you
|
||||||
to enter new passwords for the `elastic`, `kibana`, `beats_system`, and
|
to enter new passwords for the `elastic`, `kibana`, `beats_system`,
|
||||||
`logstash_system` users:
|
`logstash_system`, and `apm_system` users:
|
||||||
|
|
||||||
[source,shell]
|
[source,shell]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
|
|
|
@ -20,16 +20,11 @@ import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder;
|
|
||||||
import org.elasticsearch.search.aggregations.bucket.composite.DateHistogramValuesSourceBuilder;
|
|
||||||
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder;
|
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder;
|
||||||
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
|
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
|
||||||
import org.elasticsearch.xpack.core.rollup.RollupField;
|
|
||||||
import org.joda.time.DateTimeZone;
|
import org.joda.time.DateTimeZone;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
|
|
||||||
|
@ -182,19 +177,6 @@ public class DateHistogramGroupConfig implements Writeable, ToXContentObject {
|
||||||
return createRounding(interval.toString(), timeZone);
|
return createRounding(interval.toString(), timeZone);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* This returns a set of aggregation builders which represent the configured
|
|
||||||
* set of date histograms. Used by the rollup indexer to iterate over historical data
|
|
||||||
*/
|
|
||||||
public List<CompositeValuesSourceBuilder<?>> toBuilders() {
|
|
||||||
DateHistogramValuesSourceBuilder vsBuilder =
|
|
||||||
new DateHistogramValuesSourceBuilder(RollupField.formatIndexerAggName(field, DateHistogramAggregationBuilder.NAME));
|
|
||||||
vsBuilder.dateHistogramInterval(interval);
|
|
||||||
vsBuilder.field(field);
|
|
||||||
vsBuilder.timeZone(toDateTimeZone(timeZone));
|
|
||||||
return Collections.singletonList(vsBuilder);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void validateMappings(Map<String, Map<String, FieldCapabilities>> fieldCapsResponse,
|
public void validateMappings(Map<String, Map<String, FieldCapabilities>> fieldCapsResponse,
|
||||||
ActionRequestValidationException validationException) {
|
ActionRequestValidationException validationException) {
|
||||||
|
|
||||||
|
|
|
@ -16,18 +16,13 @@ import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder;
|
|
||||||
import org.elasticsearch.search.aggregations.bucket.composite.HistogramValuesSourceBuilder;
|
|
||||||
import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder;
|
|
||||||
import org.elasticsearch.xpack.core.rollup.RollupField;
|
import org.elasticsearch.xpack.core.rollup.RollupField;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
import java.util.stream.Collectors;
|
|
||||||
|
|
||||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||||
|
|
||||||
|
@ -85,25 +80,6 @@ public class HistogramGroupConfig implements Writeable, ToXContentObject {
|
||||||
return fields;
|
return fields;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* This returns a set of aggregation builders which represent the configured
|
|
||||||
* set of histograms. Used by the rollup indexer to iterate over historical data
|
|
||||||
*/
|
|
||||||
public List<CompositeValuesSourceBuilder<?>> toBuilders() {
|
|
||||||
if (fields.length == 0) {
|
|
||||||
return Collections.emptyList();
|
|
||||||
}
|
|
||||||
|
|
||||||
return Arrays.stream(fields).map(f -> {
|
|
||||||
HistogramValuesSourceBuilder vsBuilder
|
|
||||||
= new HistogramValuesSourceBuilder(RollupField.formatIndexerAggName(f, HistogramAggregationBuilder.NAME));
|
|
||||||
vsBuilder.interval(interval);
|
|
||||||
vsBuilder.field(f);
|
|
||||||
vsBuilder.missingBucket(true);
|
|
||||||
return vsBuilder;
|
|
||||||
}).collect(Collectors.toList());
|
|
||||||
}
|
|
||||||
|
|
||||||
public void validateMappings(Map<String, Map<String, FieldCapabilities>> fieldCapsResponse,
|
public void validateMappings(Map<String, Map<String, FieldCapabilities>> fieldCapsResponse,
|
||||||
ActionRequestValidationException validationException) {
|
ActionRequestValidationException validationException) {
|
||||||
|
|
||||||
|
|
|
@ -16,18 +16,9 @@ import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregationBuilder;
|
|
||||||
import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder;
|
|
||||||
import org.elasticsearch.search.aggregations.metrics.min.MinAggregationBuilder;
|
|
||||||
import org.elasticsearch.search.aggregations.metrics.sum.SumAggregationBuilder;
|
|
||||||
import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCountAggregationBuilder;
|
|
||||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
|
||||||
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
|
||||||
import org.elasticsearch.xpack.core.rollup.RollupField;
|
import org.elasticsearch.xpack.core.rollup.RollupField;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
|
@ -53,11 +44,11 @@ import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constru
|
||||||
public class MetricConfig implements Writeable, ToXContentObject {
|
public class MetricConfig implements Writeable, ToXContentObject {
|
||||||
|
|
||||||
// TODO: replace these with an enum
|
// TODO: replace these with an enum
|
||||||
private static final ParseField MIN = new ParseField("min");
|
public static final ParseField MIN = new ParseField("min");
|
||||||
private static final ParseField MAX = new ParseField("max");
|
public static final ParseField MAX = new ParseField("max");
|
||||||
private static final ParseField SUM = new ParseField("sum");
|
public static final ParseField SUM = new ParseField("sum");
|
||||||
private static final ParseField AVG = new ParseField("avg");
|
public static final ParseField AVG = new ParseField("avg");
|
||||||
private static final ParseField VALUE_COUNT = new ParseField("value_count");
|
public static final ParseField VALUE_COUNT = new ParseField("value_count");
|
||||||
|
|
||||||
static final String NAME = "metrics";
|
static final String NAME = "metrics";
|
||||||
private static final String FIELD = "field";
|
private static final String FIELD = "field";
|
||||||
|
@ -111,46 +102,6 @@ public class MetricConfig implements Writeable, ToXContentObject {
|
||||||
return metrics;
|
return metrics;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* This returns a set of aggregation builders which represent the configured
|
|
||||||
* set of metrics. Used by the rollup indexer to iterate over historical data
|
|
||||||
*/
|
|
||||||
public List<ValuesSourceAggregationBuilder.LeafOnly> toBuilders() {
|
|
||||||
if (metrics.size() == 0) {
|
|
||||||
return Collections.emptyList();
|
|
||||||
}
|
|
||||||
|
|
||||||
List<ValuesSourceAggregationBuilder.LeafOnly> aggs = new ArrayList<>(metrics.size());
|
|
||||||
for (String metric : metrics) {
|
|
||||||
ValuesSourceAggregationBuilder.LeafOnly newBuilder;
|
|
||||||
if (metric.equals(MIN.getPreferredName())) {
|
|
||||||
newBuilder = new MinAggregationBuilder(RollupField.formatFieldName(field, MinAggregationBuilder.NAME, RollupField.VALUE));
|
|
||||||
} else if (metric.equals(MAX.getPreferredName())) {
|
|
||||||
newBuilder = new MaxAggregationBuilder(RollupField.formatFieldName(field, MaxAggregationBuilder.NAME, RollupField.VALUE));
|
|
||||||
} else if (metric.equals(AVG.getPreferredName())) {
|
|
||||||
// Avgs are sum + count
|
|
||||||
newBuilder = new SumAggregationBuilder(RollupField.formatFieldName(field, AvgAggregationBuilder.NAME, RollupField.VALUE));
|
|
||||||
ValuesSourceAggregationBuilder.LeafOnly countBuilder
|
|
||||||
= new ValueCountAggregationBuilder(
|
|
||||||
RollupField.formatFieldName(field, AvgAggregationBuilder.NAME, RollupField.COUNT_FIELD), ValueType.NUMERIC);
|
|
||||||
countBuilder.field(field);
|
|
||||||
aggs.add(countBuilder);
|
|
||||||
} else if (metric.equals(SUM.getPreferredName())) {
|
|
||||||
newBuilder = new SumAggregationBuilder(RollupField.formatFieldName(field, SumAggregationBuilder.NAME, RollupField.VALUE));
|
|
||||||
} else if (metric.equals(VALUE_COUNT.getPreferredName())) {
|
|
||||||
// TODO allow non-numeric value_counts.
|
|
||||||
// Hardcoding this is fine for now since the job validation guarantees that all metric fields are numerics
|
|
||||||
newBuilder = new ValueCountAggregationBuilder(
|
|
||||||
RollupField.formatFieldName(field, ValueCountAggregationBuilder.NAME, RollupField.VALUE), ValueType.NUMERIC);
|
|
||||||
} else {
|
|
||||||
throw new IllegalArgumentException("Unsupported metric type [" + metric + "]");
|
|
||||||
}
|
|
||||||
newBuilder.field(field);
|
|
||||||
aggs.add(newBuilder);
|
|
||||||
}
|
|
||||||
return aggs;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void validateMappings(Map<String, Map<String, FieldCapabilities>> fieldCapsResponse,
|
public void validateMappings(Map<String, Map<String, FieldCapabilities>> fieldCapsResponse,
|
||||||
ActionRequestValidationException validationException) {
|
ActionRequestValidationException validationException) {
|
||||||
|
|
||||||
|
|
|
@ -18,16 +18,11 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.index.mapper.KeywordFieldMapper;
|
import org.elasticsearch.index.mapper.KeywordFieldMapper;
|
||||||
import org.elasticsearch.index.mapper.TextFieldMapper;
|
import org.elasticsearch.index.mapper.TextFieldMapper;
|
||||||
import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder;
|
|
||||||
import org.elasticsearch.search.aggregations.bucket.composite.TermsValuesSourceBuilder;
|
|
||||||
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
|
|
||||||
import org.elasticsearch.xpack.core.rollup.RollupField;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.stream.Collectors;
|
|
||||||
|
|
||||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||||
|
|
||||||
|
@ -79,20 +74,6 @@ public class TermsGroupConfig implements Writeable, ToXContentObject {
|
||||||
return fields;
|
return fields;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* This returns a set of aggregation builders which represent the configured
|
|
||||||
* set of date histograms. Used by the rollup indexer to iterate over historical data
|
|
||||||
*/
|
|
||||||
public List<CompositeValuesSourceBuilder<?>> toBuilders() {
|
|
||||||
return Arrays.stream(fields).map(f -> {
|
|
||||||
TermsValuesSourceBuilder vsBuilder
|
|
||||||
= new TermsValuesSourceBuilder(RollupField.formatIndexerAggName(f, TermsAggregationBuilder.NAME));
|
|
||||||
vsBuilder.field(f);
|
|
||||||
vsBuilder.missingBucket(true);
|
|
||||||
return vsBuilder;
|
|
||||||
}).collect(Collectors.toList());
|
|
||||||
}
|
|
||||||
|
|
||||||
public void validateMappings(Map<String, Map<String, FieldCapabilities>> fieldCapsResponse,
|
public void validateMappings(Map<String, Map<String, FieldCapabilities>> fieldCapsResponse,
|
||||||
ActionRequestValidationException validationException) {
|
ActionRequestValidationException validationException) {
|
||||||
|
|
||||||
|
|
|
@ -19,6 +19,7 @@ public class ClientReservedRealm {
|
||||||
case UsernamesField.KIBANA_NAME:
|
case UsernamesField.KIBANA_NAME:
|
||||||
case UsernamesField.LOGSTASH_NAME:
|
case UsernamesField.LOGSTASH_NAME:
|
||||||
case UsernamesField.BEATS_NAME:
|
case UsernamesField.BEATS_NAME:
|
||||||
|
case UsernamesField.APM_NAME:
|
||||||
return XPackSettings.RESERVED_REALM_ENABLED_SETTING.get(settings);
|
return XPackSettings.RESERVED_REALM_ENABLED_SETTING.get(settings);
|
||||||
default:
|
default:
|
||||||
return AnonymousUser.isAnonymousUsername(username, settings);
|
return AnonymousUser.isAnonymousUsername(username, settings);
|
||||||
|
|
|
@ -112,6 +112,8 @@ public class ReservedRolesStore {
|
||||||
null, MetadataUtils.DEFAULT_RESERVED_METADATA))
|
null, MetadataUtils.DEFAULT_RESERVED_METADATA))
|
||||||
.put(UsernamesField.BEATS_ROLE, new RoleDescriptor(UsernamesField.BEATS_ROLE,
|
.put(UsernamesField.BEATS_ROLE, new RoleDescriptor(UsernamesField.BEATS_ROLE,
|
||||||
new String[] { "monitor", MonitoringBulkAction.NAME}, null, null, MetadataUtils.DEFAULT_RESERVED_METADATA))
|
new String[] { "monitor", MonitoringBulkAction.NAME}, null, null, MetadataUtils.DEFAULT_RESERVED_METADATA))
|
||||||
|
.put(UsernamesField.APM_ROLE, new RoleDescriptor(UsernamesField.APM_ROLE,
|
||||||
|
new String[] { "monitor", MonitoringBulkAction.NAME}, null, null, MetadataUtils.DEFAULT_RESERVED_METADATA))
|
||||||
.put("machine_learning_user", new RoleDescriptor("machine_learning_user", new String[] { "monitor_ml" },
|
.put("machine_learning_user", new RoleDescriptor("machine_learning_user", new String[] { "monitor_ml" },
|
||||||
new RoleDescriptor.IndicesPrivileges[] { RoleDescriptor.IndicesPrivileges.builder().indices(".ml-anomalies*",
|
new RoleDescriptor.IndicesPrivileges[] { RoleDescriptor.IndicesPrivileges.builder().indices(".ml-anomalies*",
|
||||||
".ml-notifications").privileges("view_index_metadata", "read").build() },
|
".ml-notifications").privileges("view_index_metadata", "read").build() },
|
||||||
|
|
|
@ -0,0 +1,25 @@
|
||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the Elastic License;
|
||||||
|
* you may not use this file except in compliance with the Elastic License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.xpack.core.security.user;
|
||||||
|
|
||||||
|
import org.elasticsearch.Version;
|
||||||
|
import org.elasticsearch.protocol.xpack.security.User;
|
||||||
|
import org.elasticsearch.xpack.core.security.support.MetadataUtils;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Built in user for APM server internals. Currently used for APM server monitoring.
|
||||||
|
*/
|
||||||
|
public class APMSystemUser extends User {
|
||||||
|
|
||||||
|
public static final String NAME = UsernamesField.APM_NAME;
|
||||||
|
public static final String ROLE_NAME = UsernamesField.APM_ROLE;
|
||||||
|
public static final Version DEFINED_SINCE = Version.V_6_5_0;
|
||||||
|
public static final BuiltinUserInfo USER_INFO = new BuiltinUserInfo(NAME, ROLE_NAME, DEFINED_SINCE);
|
||||||
|
|
||||||
|
public APMSystemUser(boolean enabled) {
|
||||||
|
super(NAME, new String[]{ ROLE_NAME }, null, null, MetadataUtils.DEFAULT_RESERVED_METADATA, enabled);
|
||||||
|
}
|
||||||
|
}
|
|
@ -20,6 +20,8 @@ public final class UsernamesField {
|
||||||
public static final String LOGSTASH_ROLE = "logstash_system";
|
public static final String LOGSTASH_ROLE = "logstash_system";
|
||||||
public static final String BEATS_NAME = "beats_system";
|
public static final String BEATS_NAME = "beats_system";
|
||||||
public static final String BEATS_ROLE = "beats_system";
|
public static final String BEATS_ROLE = "beats_system";
|
||||||
|
public static final String APM_NAME = "apm_system";
|
||||||
|
public static final String APM_ROLE = "apm_system";
|
||||||
|
|
||||||
private UsernamesField() {}
|
private UsernamesField() {}
|
||||||
}
|
}
|
||||||
|
|
|
@ -9,19 +9,16 @@ import org.elasticsearch.action.ActionRequestValidationException;
|
||||||
import org.elasticsearch.action.fieldcaps.FieldCapabilities;
|
import org.elasticsearch.action.fieldcaps.FieldCapabilities;
|
||||||
import org.elasticsearch.common.io.stream.Writeable;
|
import org.elasticsearch.common.io.stream.Writeable;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder;
|
|
||||||
import org.elasticsearch.test.AbstractSerializingTestCase;
|
import org.elasticsearch.test.AbstractSerializingTestCase;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
import static org.elasticsearch.xpack.core.rollup.ConfigTestHelpers.randomTermsGroupConfig;
|
import static org.elasticsearch.xpack.core.rollup.ConfigTestHelpers.randomTermsGroupConfig;
|
||||||
import static org.hamcrest.Matchers.equalTo;
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
import static org.mockito.Mockito.mock;
|
import static org.mockito.Mockito.mock;
|
||||||
import static org.mockito.Mockito.when;
|
|
||||||
|
|
||||||
public class TermsGroupConfigSerializingTests extends AbstractSerializingTestCase<TermsGroupConfig> {
|
public class TermsGroupConfigSerializingTests extends AbstractSerializingTestCase<TermsGroupConfig> {
|
||||||
|
|
||||||
|
@ -77,62 +74,4 @@ public class TermsGroupConfigSerializingTests extends AbstractSerializingTestCas
|
||||||
assertThat(e.validationErrors().get(0), equalTo("The field referenced by a terms group must be a [numeric] or " +
|
assertThat(e.validationErrors().get(0), equalTo("The field referenced by a terms group must be a [numeric] or " +
|
||||||
"[keyword/text] type, but found [geo_point] for field [my_field]"));
|
"[keyword/text] type, but found [geo_point] for field [my_field]"));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testValidateFieldMatchingNotAggregatable() {
|
|
||||||
ActionRequestValidationException e = new ActionRequestValidationException();
|
|
||||||
Map<String, Map<String, FieldCapabilities>> responseMap = new HashMap<>();
|
|
||||||
|
|
||||||
// Have to mock fieldcaps because the ctor's aren't public...
|
|
||||||
FieldCapabilities fieldCaps = mock(FieldCapabilities.class);
|
|
||||||
when(fieldCaps.isAggregatable()).thenReturn(false);
|
|
||||||
responseMap.put("my_field", Collections.singletonMap(getRandomType(), fieldCaps));
|
|
||||||
|
|
||||||
TermsGroupConfig config = new TermsGroupConfig("my_field");
|
|
||||||
config.validateMappings(responseMap, e);
|
|
||||||
assertThat(e.validationErrors().get(0), equalTo("The field [my_field] must be aggregatable across all indices, but is not."));
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testValidateMatchingField() {
|
|
||||||
ActionRequestValidationException e = new ActionRequestValidationException();
|
|
||||||
Map<String, Map<String, FieldCapabilities>> responseMap = new HashMap<>();
|
|
||||||
String type = getRandomType();
|
|
||||||
|
|
||||||
// Have to mock fieldcaps because the ctor's aren't public...
|
|
||||||
FieldCapabilities fieldCaps = mock(FieldCapabilities.class);
|
|
||||||
when(fieldCaps.isAggregatable()).thenReturn(true);
|
|
||||||
responseMap.put("my_field", Collections.singletonMap(type, fieldCaps));
|
|
||||||
|
|
||||||
TermsGroupConfig config = new TermsGroupConfig("my_field");
|
|
||||||
config.validateMappings(responseMap, e);
|
|
||||||
if (e.validationErrors().size() != 0) {
|
|
||||||
fail(e.getMessage());
|
|
||||||
}
|
|
||||||
|
|
||||||
List<CompositeValuesSourceBuilder<?>> builders = config.toBuilders();
|
|
||||||
assertThat(builders.size(), equalTo(1));
|
|
||||||
}
|
|
||||||
|
|
||||||
private String getRandomType() {
|
|
||||||
int n = randomIntBetween(0,8);
|
|
||||||
if (n == 0) {
|
|
||||||
return "keyword";
|
|
||||||
} else if (n == 1) {
|
|
||||||
return "text";
|
|
||||||
} else if (n == 2) {
|
|
||||||
return "long";
|
|
||||||
} else if (n == 3) {
|
|
||||||
return "integer";
|
|
||||||
} else if (n == 4) {
|
|
||||||
return "short";
|
|
||||||
} else if (n == 5) {
|
|
||||||
return "float";
|
|
||||||
} else if (n == 6) {
|
|
||||||
return "double";
|
|
||||||
} else if (n == 7) {
|
|
||||||
return "scaled_float";
|
|
||||||
} else if (n == 8) {
|
|
||||||
return "half_float";
|
|
||||||
}
|
|
||||||
return "long";
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -94,6 +94,7 @@ import org.elasticsearch.xpack.core.security.authz.permission.FieldPermissionsCa
|
||||||
import org.elasticsearch.xpack.core.security.authz.permission.Role;
|
import org.elasticsearch.xpack.core.security.authz.permission.Role;
|
||||||
import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilege;
|
import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilege;
|
||||||
import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilegeDescriptor;
|
import org.elasticsearch.xpack.core.security.authz.privilege.ApplicationPrivilegeDescriptor;
|
||||||
|
import org.elasticsearch.xpack.core.security.user.APMSystemUser;
|
||||||
import org.elasticsearch.xpack.core.security.user.BeatsSystemUser;
|
import org.elasticsearch.xpack.core.security.user.BeatsSystemUser;
|
||||||
import org.elasticsearch.xpack.core.security.user.LogstashSystemUser;
|
import org.elasticsearch.xpack.core.security.user.LogstashSystemUser;
|
||||||
import org.elasticsearch.xpack.core.security.user.SystemUser;
|
import org.elasticsearch.xpack.core.security.user.SystemUser;
|
||||||
|
@ -147,6 +148,7 @@ public class ReservedRolesStoreTests extends ESTestCase {
|
||||||
assertThat(ReservedRolesStore.isReserved(XPackUser.ROLE_NAME), is(true));
|
assertThat(ReservedRolesStore.isReserved(XPackUser.ROLE_NAME), is(true));
|
||||||
assertThat(ReservedRolesStore.isReserved(LogstashSystemUser.ROLE_NAME), is(true));
|
assertThat(ReservedRolesStore.isReserved(LogstashSystemUser.ROLE_NAME), is(true));
|
||||||
assertThat(ReservedRolesStore.isReserved(BeatsSystemUser.ROLE_NAME), is(true));
|
assertThat(ReservedRolesStore.isReserved(BeatsSystemUser.ROLE_NAME), is(true));
|
||||||
|
assertThat(ReservedRolesStore.isReserved(APMSystemUser.ROLE_NAME), is(true));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testIngestAdminRole() {
|
public void testIngestAdminRole() {
|
||||||
|
@ -628,6 +630,30 @@ public class ReservedRolesStoreTests extends ESTestCase {
|
||||||
is(false));
|
is(false));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testAPMSystemRole() {
|
||||||
|
final TransportRequest request = mock(TransportRequest.class);
|
||||||
|
|
||||||
|
RoleDescriptor roleDescriptor = new ReservedRolesStore().roleDescriptor(APMSystemUser.ROLE_NAME);
|
||||||
|
assertNotNull(roleDescriptor);
|
||||||
|
assertThat(roleDescriptor.getMetadata(), hasEntry("_reserved", true));
|
||||||
|
|
||||||
|
Role APMSystemRole = Role.builder(roleDescriptor, null).build();
|
||||||
|
assertThat(APMSystemRole.cluster().check(ClusterHealthAction.NAME, request), is(true));
|
||||||
|
assertThat(APMSystemRole.cluster().check(ClusterStateAction.NAME, request), is(true));
|
||||||
|
assertThat(APMSystemRole.cluster().check(ClusterStatsAction.NAME, request), is(true));
|
||||||
|
assertThat(APMSystemRole.cluster().check(PutIndexTemplateAction.NAME, request), is(false));
|
||||||
|
assertThat(APMSystemRole.cluster().check(ClusterRerouteAction.NAME, request), is(false));
|
||||||
|
assertThat(APMSystemRole.cluster().check(ClusterUpdateSettingsAction.NAME, request), is(false));
|
||||||
|
assertThat(APMSystemRole.cluster().check(MonitoringBulkAction.NAME, request), is(true));
|
||||||
|
|
||||||
|
assertThat(APMSystemRole.runAs().check(randomAlphaOfLengthBetween(1, 30)), is(false));
|
||||||
|
|
||||||
|
assertThat(APMSystemRole.indices().allowedIndicesMatcher(IndexAction.NAME).test("foo"), is(false));
|
||||||
|
assertThat(APMSystemRole.indices().allowedIndicesMatcher(IndexAction.NAME).test(".reporting"), is(false));
|
||||||
|
assertThat(APMSystemRole.indices().allowedIndicesMatcher("indices:foo").test(randomAlphaOfLengthBetween(8, 24)),
|
||||||
|
is(false));
|
||||||
|
}
|
||||||
|
|
||||||
public void testMachineLearningAdminRole() {
|
public void testMachineLearningAdminRole() {
|
||||||
final TransportRequest request = mock(TransportRequest.class);
|
final TransportRequest request = mock(TransportRequest.class);
|
||||||
|
|
||||||
|
|
|
@ -5,9 +5,9 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.xpack.ml.integration;
|
package org.elasticsearch.xpack.ml.integration;
|
||||||
|
|
||||||
import org.apache.http.entity.ContentType;
|
import org.apache.http.util.EntityUtils;
|
||||||
import org.apache.http.entity.StringEntity;
|
import org.elasticsearch.client.Request;
|
||||||
import org.apache.http.message.BasicHeader;
|
import org.elasticsearch.client.RequestOptions;
|
||||||
import org.elasticsearch.client.Response;
|
import org.elasticsearch.client.Response;
|
||||||
import org.elasticsearch.client.ResponseException;
|
import org.elasticsearch.client.ResponseException;
|
||||||
import org.elasticsearch.client.RestClient;
|
import org.elasticsearch.client.RestClient;
|
||||||
|
@ -22,10 +22,7 @@ import org.elasticsearch.xpack.test.rest.XPackRestTestHelper;
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
|
|
||||||
import java.io.BufferedReader;
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.InputStreamReader;
|
|
||||||
import java.nio.charset.StandardCharsets;
|
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.Date;
|
import java.util.Date;
|
||||||
|
@ -36,6 +33,7 @@ import java.util.stream.Collectors;
|
||||||
import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue;
|
import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue;
|
||||||
import static org.hamcrest.Matchers.containsString;
|
import static org.hamcrest.Matchers.containsString;
|
||||||
import static org.hamcrest.Matchers.equalTo;
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
import static org.hamcrest.Matchers.not;
|
||||||
|
|
||||||
public class DatafeedJobsRestIT extends ESRestTestCase {
|
public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||||
|
|
||||||
|
@ -57,26 +55,24 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
private void setupDataAccessRole(String index) throws IOException {
|
private void setupDataAccessRole(String index) throws IOException {
|
||||||
String json = "{"
|
Request request = new Request("PUT", "/_xpack/security/role/test_data_access");
|
||||||
|
request.setJsonEntity("{"
|
||||||
+ " \"indices\" : ["
|
+ " \"indices\" : ["
|
||||||
+ " { \"names\": [\"" + index + "\"], \"privileges\": [\"read\"] }"
|
+ " { \"names\": [\"" + index + "\"], \"privileges\": [\"read\"] }"
|
||||||
+ " ]"
|
+ " ]"
|
||||||
+ "}";
|
+ "}");
|
||||||
|
client().performRequest(request);
|
||||||
client().performRequest("put", "_xpack/security/role/test_data_access", Collections.emptyMap(),
|
|
||||||
new StringEntity(json, ContentType.APPLICATION_JSON));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void setupUser(String user, List<String> roles) throws IOException {
|
private void setupUser(String user, List<String> roles) throws IOException {
|
||||||
String password = new String(SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING.getChars());
|
String password = new String(SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING.getChars());
|
||||||
|
|
||||||
String json = "{"
|
Request request = new Request("PUT", "/_xpack/security/user/" + user);
|
||||||
|
request.setJsonEntity("{"
|
||||||
+ " \"password\" : \"" + password + "\","
|
+ " \"password\" : \"" + password + "\","
|
||||||
+ " \"roles\" : [ " + roles.stream().map(unquoted -> "\"" + unquoted + "\"").collect(Collectors.joining(", ")) + " ]"
|
+ " \"roles\" : [ " + roles.stream().map(unquoted -> "\"" + unquoted + "\"").collect(Collectors.joining(", ")) + " ]"
|
||||||
+ "}";
|
+ "}");
|
||||||
|
client().performRequest(request);
|
||||||
client().performRequest("put", "_xpack/security/user/" + user, Collections.emptyMap(),
|
|
||||||
new StringEntity(json, ContentType.APPLICATION_JSON));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Before
|
@Before
|
||||||
|
@ -92,7 +88,10 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
private void addAirlineData() throws IOException {
|
private void addAirlineData() throws IOException {
|
||||||
String mappings = "{"
|
StringBuilder bulk = new StringBuilder();
|
||||||
|
|
||||||
|
Request createEmptyAirlineDataRequest = new Request("PUT", "/airline-data-empty");
|
||||||
|
createEmptyAirlineDataRequest.setJsonEntity("{"
|
||||||
+ " \"mappings\": {"
|
+ " \"mappings\": {"
|
||||||
+ " \"response\": {"
|
+ " \"response\": {"
|
||||||
+ " \"properties\": {"
|
+ " \"properties\": {"
|
||||||
|
@ -102,12 +101,12 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||||
+ " }"
|
+ " }"
|
||||||
+ " }"
|
+ " }"
|
||||||
+ " }"
|
+ " }"
|
||||||
+ "}";
|
+ "}");
|
||||||
client().performRequest("put", "airline-data-empty", Collections.emptyMap(),
|
client().performRequest(createEmptyAirlineDataRequest);
|
||||||
new StringEntity(mappings, ContentType.APPLICATION_JSON));
|
|
||||||
|
|
||||||
// Create index with source = enabled, doc_values = enabled, stored = false + multi-field
|
// Create index with source = enabled, doc_values = enabled, stored = false + multi-field
|
||||||
mappings = "{"
|
Request createAirlineDataRequest = new Request("PUT", "/airline-data");
|
||||||
|
createAirlineDataRequest.setJsonEntity("{"
|
||||||
+ " \"mappings\": {"
|
+ " \"mappings\": {"
|
||||||
+ " \"response\": {"
|
+ " \"response\": {"
|
||||||
+ " \"properties\": {"
|
+ " \"properties\": {"
|
||||||
|
@ -123,18 +122,17 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||||
+ " }"
|
+ " }"
|
||||||
+ " }"
|
+ " }"
|
||||||
+ " }"
|
+ " }"
|
||||||
+ "}";
|
+ "}");
|
||||||
client().performRequest("put", "airline-data", Collections.emptyMap(), new StringEntity(mappings, ContentType.APPLICATION_JSON));
|
client().performRequest(createAirlineDataRequest);
|
||||||
|
|
||||||
client().performRequest("put", "airline-data/response/1", Collections.emptyMap(),
|
bulk.append("{\"index\": {\"_index\": \"airline-data\", \"_type\": \"response\", \"_id\": 1}}\n");
|
||||||
new StringEntity("{\"time stamp\":\"2016-06-01T00:00:00Z\",\"airline\":\"AAA\",\"responsetime\":135.22}",
|
bulk.append("{\"time stamp\":\"2016-06-01T00:00:00Z\",\"airline\":\"AAA\",\"responsetime\":135.22}\n");
|
||||||
ContentType.APPLICATION_JSON));
|
bulk.append("{\"index\": {\"_index\": \"airline-data\", \"_type\": \"response\", \"_id\": 2}}\n");
|
||||||
client().performRequest("put", "airline-data/response/2", Collections.emptyMap(),
|
bulk.append("{\"time stamp\":\"2016-06-01T01:59:00Z\",\"airline\":\"AAA\",\"responsetime\":541.76}\n");
|
||||||
new StringEntity("{\"time stamp\":\"2016-06-01T01:59:00Z\",\"airline\":\"AAA\",\"responsetime\":541.76}",
|
|
||||||
ContentType.APPLICATION_JSON));
|
|
||||||
|
|
||||||
// Create index with source = enabled, doc_values = disabled (except time), stored = false
|
// Create index with source = enabled, doc_values = disabled (except time), stored = false
|
||||||
mappings = "{"
|
Request createAirlineDataDisabledDocValues = new Request("PUT", "/airline-data-disabled-doc-values");
|
||||||
|
createAirlineDataDisabledDocValues.setJsonEntity("{"
|
||||||
+ " \"mappings\": {"
|
+ " \"mappings\": {"
|
||||||
+ " \"response\": {"
|
+ " \"response\": {"
|
||||||
+ " \"properties\": {"
|
+ " \"properties\": {"
|
||||||
|
@ -144,19 +142,17 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||||
+ " }"
|
+ " }"
|
||||||
+ " }"
|
+ " }"
|
||||||
+ " }"
|
+ " }"
|
||||||
+ "}";
|
+ "}");
|
||||||
client().performRequest("put", "airline-data-disabled-doc-values", Collections.emptyMap(),
|
client().performRequest(createAirlineDataDisabledDocValues);
|
||||||
new StringEntity(mappings, ContentType.APPLICATION_JSON));
|
|
||||||
|
|
||||||
client().performRequest("put", "airline-data-disabled-doc-values/response/1", Collections.emptyMap(),
|
bulk.append("{\"index\": {\"_index\": \"airline-data-disabled-doc-values\", \"_type\": \"response\", \"_id\": 1}}\n");
|
||||||
new StringEntity("{\"time stamp\":\"2016-06-01T00:00:00Z\",\"airline\":\"AAA\",\"responsetime\":135.22}",
|
bulk.append("{\"time stamp\":\"2016-06-01T00:00:00Z\",\"airline\":\"AAA\",\"responsetime\":135.22}\n");
|
||||||
ContentType.APPLICATION_JSON));
|
bulk.append("{\"index\": {\"_index\": \"airline-data-disabled-doc-values\", \"_type\": \"response\", \"_id\": 2}}\n");
|
||||||
client().performRequest("put", "airline-data-disabled-doc-values/response/2", Collections.emptyMap(),
|
bulk.append("{\"time stamp\":\"2016-06-01T01:59:00Z\",\"airline\":\"AAA\",\"responsetime\":541.76}\n");
|
||||||
new StringEntity("{\"time stamp\":\"2016-06-01T01:59:00Z\",\"airline\":\"AAA\",\"responsetime\":541.76}",
|
|
||||||
ContentType.APPLICATION_JSON));
|
|
||||||
|
|
||||||
// Create index with source = disabled, doc_values = enabled (except time), stored = true
|
// Create index with source = disabled, doc_values = enabled (except time), stored = true
|
||||||
mappings = "{"
|
Request createAirlineDataDisabledSource = new Request("PUT", "/airline-data-disabled-source");
|
||||||
|
createAirlineDataDisabledSource.setJsonEntity("{"
|
||||||
+ " \"mappings\": {"
|
+ " \"mappings\": {"
|
||||||
+ " \"response\": {"
|
+ " \"response\": {"
|
||||||
+ " \"_source\":{\"enabled\":false},"
|
+ " \"_source\":{\"enabled\":false},"
|
||||||
|
@ -167,19 +163,16 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||||
+ " }"
|
+ " }"
|
||||||
+ " }"
|
+ " }"
|
||||||
+ " }"
|
+ " }"
|
||||||
+ "}";
|
+ "}");
|
||||||
client().performRequest("put", "airline-data-disabled-source", Collections.emptyMap(),
|
|
||||||
new StringEntity(mappings, ContentType.APPLICATION_JSON));
|
|
||||||
|
|
||||||
client().performRequest("put", "airline-data-disabled-source/response/1", Collections.emptyMap(),
|
bulk.append("{\"index\": {\"_index\": \"airline-data-disabled-source\", \"_type\": \"response\", \"_id\": 1}}\n");
|
||||||
new StringEntity("{\"time stamp\":\"2016-06-01T00:00:00Z\",\"airline\":\"AAA\",\"responsetime\":135.22}",
|
bulk.append("{\"time stamp\":\"2016-06-01T00:00:00Z\",\"airline\":\"AAA\",\"responsetime\":135.22}\n");
|
||||||
ContentType.APPLICATION_JSON));
|
bulk.append("{\"index\": {\"_index\": \"airline-data-disabled-source\", \"_type\": \"response\", \"_id\": 2}}\n");
|
||||||
client().performRequest("put", "airline-data-disabled-source/response/2", Collections.emptyMap(),
|
bulk.append("{\"time stamp\":\"2016-06-01T01:59:00Z\",\"airline\":\"AAA\",\"responsetime\":541.76}\n");
|
||||||
new StringEntity("{\"time stamp\":\"2016-06-01T01:59:00Z\",\"airline\":\"AAA\",\"responsetime\":541.76}",
|
|
||||||
ContentType.APPLICATION_JSON));
|
|
||||||
|
|
||||||
// Create index with nested documents
|
// Create index with nested documents
|
||||||
mappings = "{"
|
Request createAirlineDataNested = new Request("PUT", "/nested-data");
|
||||||
|
createAirlineDataNested.setJsonEntity("{"
|
||||||
+ " \"mappings\": {"
|
+ " \"mappings\": {"
|
||||||
+ " \"response\": {"
|
+ " \"response\": {"
|
||||||
+ " \"properties\": {"
|
+ " \"properties\": {"
|
||||||
|
@ -187,18 +180,17 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||||
+ " }"
|
+ " }"
|
||||||
+ " }"
|
+ " }"
|
||||||
+ " }"
|
+ " }"
|
||||||
+ "}";
|
+ "}");
|
||||||
client().performRequest("put", "nested-data", Collections.emptyMap(), new StringEntity(mappings, ContentType.APPLICATION_JSON));
|
client().performRequest(createAirlineDataNested);
|
||||||
|
|
||||||
client().performRequest("put", "nested-data/response/1", Collections.emptyMap(),
|
bulk.append("{\"index\": {\"_index\": \"nested-data\", \"_type\": \"response\", \"_id\": 1}}\n");
|
||||||
new StringEntity("{\"time\":\"2016-06-01T00:00:00Z\", \"responsetime\":{\"millis\":135.22}}",
|
bulk.append("{\"time\":\"2016-06-01T00:00:00Z\", \"responsetime\":{\"millis\":135.22}}\n");
|
||||||
ContentType.APPLICATION_JSON));
|
bulk.append("{\"index\": {\"_index\": \"nested-data\", \"_type\": \"response\", \"_id\": 2}}\n");
|
||||||
client().performRequest("put", "nested-data/response/2", Collections.emptyMap(),
|
bulk.append("{\"time\":\"2016-06-01T01:59:00Z\",\"responsetime\":{\"millis\":222.0}}\n");
|
||||||
new StringEntity("{\"time\":\"2016-06-01T01:59:00Z\",\"responsetime\":{\"millis\":222.0}}",
|
|
||||||
ContentType.APPLICATION_JSON));
|
|
||||||
|
|
||||||
// Create index with multiple docs per time interval for aggregation testing
|
// Create index with multiple docs per time interval for aggregation testing
|
||||||
mappings = "{"
|
Request createAirlineDataAggs = new Request("PUT", "/airline-data-aggs");
|
||||||
|
createAirlineDataAggs.setJsonEntity("{"
|
||||||
+ " \"mappings\": {"
|
+ " \"mappings\": {"
|
||||||
+ " \"response\": {"
|
+ " \"response\": {"
|
||||||
+ " \"properties\": {"
|
+ " \"properties\": {"
|
||||||
|
@ -208,43 +200,33 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||||
+ " }"
|
+ " }"
|
||||||
+ " }"
|
+ " }"
|
||||||
+ " }"
|
+ " }"
|
||||||
+ "}";
|
+ "}");
|
||||||
client().performRequest("put", "airline-data-aggs", Collections.emptyMap(),
|
client().performRequest(createAirlineDataAggs);
|
||||||
new StringEntity(mappings, ContentType.APPLICATION_JSON));
|
|
||||||
|
|
||||||
client().performRequest("put", "airline-data-aggs/response/1", Collections.emptyMap(),
|
bulk.append("{\"index\": {\"_index\": \"airline-data-aggs\", \"_type\": \"response\", \"_id\": 1}}\n");
|
||||||
new StringEntity("{\"time stamp\":\"2016-06-01T00:00:00Z\",\"airline\":\"AAA\",\"responsetime\":100.0}",
|
bulk.append("{\"time stamp\":\"2016-06-01T00:00:00Z\",\"airline\":\"AAA\",\"responsetime\":100.0}\n");
|
||||||
ContentType.APPLICATION_JSON));
|
bulk.append("{\"index\": {\"_index\": \"airline-data-aggs\", \"_type\": \"response\", \"_id\": 2}}\n");
|
||||||
client().performRequest("put", "airline-data-aggs/response/2", Collections.emptyMap(),
|
bulk.append("{\"time stamp\":\"2016-06-01T00:01:00Z\",\"airline\":\"AAA\",\"responsetime\":200.0}\n");
|
||||||
new StringEntity("{\"time stamp\":\"2016-06-01T00:01:00Z\",\"airline\":\"AAA\",\"responsetime\":200.0}",
|
bulk.append("{\"index\": {\"_index\": \"airline-data-aggs\", \"_type\": \"response\", \"_id\": 3}}\n");
|
||||||
ContentType.APPLICATION_JSON));
|
bulk.append("{\"time stamp\":\"2016-06-01T00:00:00Z\",\"airline\":\"BBB\",\"responsetime\":1000.0}\n");
|
||||||
client().performRequest("put", "airline-data-aggs/response/3", Collections.emptyMap(),
|
bulk.append("{\"index\": {\"_index\": \"airline-data-aggs\", \"_type\": \"response\", \"_id\": 4}}\n");
|
||||||
new StringEntity("{\"time stamp\":\"2016-06-01T00:00:00Z\",\"airline\":\"BBB\",\"responsetime\":1000.0}",
|
bulk.append("{\"time stamp\":\"2016-06-01T00:01:00Z\",\"airline\":\"BBB\",\"responsetime\":2000.0}\n");
|
||||||
ContentType.APPLICATION_JSON));
|
bulk.append("{\"index\": {\"_index\": \"airline-data-aggs\", \"_type\": \"response\", \"_id\": 5}}\n");
|
||||||
client().performRequest("put", "airline-data-aggs/response/4", Collections.emptyMap(),
|
bulk.append("{\"time stamp\":\"2016-06-01T01:00:00Z\",\"airline\":\"AAA\",\"responsetime\":300.0}\n");
|
||||||
new StringEntity("{\"time stamp\":\"2016-06-01T00:01:00Z\",\"airline\":\"BBB\",\"responsetime\":2000.0}",
|
bulk.append("{\"index\": {\"_index\": \"airline-data-aggs\", \"_type\": \"response\", \"_id\": 6}}\n");
|
||||||
ContentType.APPLICATION_JSON));
|
bulk.append("{\"time stamp\":\"2016-06-01T01:01:00Z\",\"airline\":\"AAA\",\"responsetime\":400.0}\n");
|
||||||
client().performRequest("put", "airline-data-aggs/response/5", Collections.emptyMap(),
|
bulk.append("{\"index\": {\"_index\": \"airline-data-aggs\", \"_type\": \"response\", \"_id\": 7}}\n");
|
||||||
new StringEntity("{\"time stamp\":\"2016-06-01T01:00:00Z\",\"airline\":\"AAA\",\"responsetime\":300.0}",
|
bulk.append("{\"time stamp\":\"2016-06-01T01:00:00Z\",\"airline\":\"BBB\",\"responsetime\":3000.0}\n");
|
||||||
ContentType.APPLICATION_JSON));
|
bulk.append("{\"index\": {\"_index\": \"airline-data-aggs\", \"_type\": \"response\", \"_id\": 8}}\n");
|
||||||
client().performRequest("put", "airline-data-aggs/response/6", Collections.emptyMap(),
|
bulk.append("{\"time stamp\":\"2016-06-01T01:01:00Z\",\"airline\":\"BBB\",\"responsetime\":4000.0}\n");
|
||||||
new StringEntity("{\"time stamp\":\"2016-06-01T01:01:00Z\",\"airline\":\"AAA\",\"responsetime\":400.0}",
|
|
||||||
ContentType.APPLICATION_JSON));
|
|
||||||
client().performRequest("put", "airline-data-aggs/response/7", Collections.emptyMap(),
|
|
||||||
new StringEntity("{\"time stamp\":\"2016-06-01T01:00:00Z\",\"airline\":\"BBB\",\"responsetime\":3000.0}",
|
|
||||||
ContentType.APPLICATION_JSON));
|
|
||||||
client().performRequest("put", "airline-data-aggs/response/8", Collections.emptyMap(),
|
|
||||||
new StringEntity("{\"time stamp\":\"2016-06-01T01:01:00Z\",\"airline\":\"BBB\",\"responsetime\":4000.0}",
|
|
||||||
ContentType.APPLICATION_JSON));
|
|
||||||
|
|
||||||
// Ensure all data is searchable
|
bulkIndex(bulk.toString());
|
||||||
client().performRequest("post", "_refresh");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void addNetworkData(String index) throws IOException {
|
private void addNetworkData(String index) throws IOException {
|
||||||
|
|
||||||
// Create index with source = enabled, doc_values = enabled, stored = false + multi-field
|
// Create index with source = enabled, doc_values = enabled, stored = false + multi-field
|
||||||
String mappings = "{"
|
Request createIndexRequest = new Request("PUT", index);
|
||||||
|
createIndexRequest.setJsonEntity("{"
|
||||||
+ " \"mappings\": {"
|
+ " \"mappings\": {"
|
||||||
+ " \"doc\": {"
|
+ " \"doc\": {"
|
||||||
+ " \"properties\": {"
|
+ " \"properties\": {"
|
||||||
|
@ -260,27 +242,25 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||||
+ " }"
|
+ " }"
|
||||||
+ " }"
|
+ " }"
|
||||||
+ " }"
|
+ " }"
|
||||||
+ "}";
|
+ "}");;
|
||||||
client().performRequest("put", index, Collections.emptyMap(), new StringEntity(mappings, ContentType.APPLICATION_JSON));
|
client().performRequest(createIndexRequest);
|
||||||
|
|
||||||
|
StringBuilder bulk = new StringBuilder();
|
||||||
String docTemplate = "{\"timestamp\":%d,\"host\":\"%s\",\"network_bytes_out\":%d}";
|
String docTemplate = "{\"timestamp\":%d,\"host\":\"%s\",\"network_bytes_out\":%d}";
|
||||||
Date date = new Date(1464739200735L);
|
Date date = new Date(1464739200735L);
|
||||||
for (int i = 0; i < 120; i++) {
|
for (int i = 0; i < 120; i++) {
|
||||||
long byteCount = randomNonNegativeLong();
|
long byteCount = randomNonNegativeLong();
|
||||||
String jsonDoc = String.format(Locale.ROOT, docTemplate, date.getTime(), "hostA", byteCount);
|
bulk.append("{\"index\": {\"_index\": \"").append(index).append("\", \"_type\": \"doc\"}}\n");
|
||||||
client().performRequest("post", index + "/doc", Collections.emptyMap(),
|
bulk.append(String.format(Locale.ROOT, docTemplate, date.getTime(), "hostA", byteCount)).append('\n');
|
||||||
new StringEntity(jsonDoc, ContentType.APPLICATION_JSON));
|
|
||||||
|
|
||||||
byteCount = randomNonNegativeLong();
|
byteCount = randomNonNegativeLong();
|
||||||
jsonDoc = String.format(Locale.ROOT, docTemplate, date.getTime(), "hostB", byteCount);
|
bulk.append("{\"index\": {\"_index\": \"").append(index).append("\", \"_type\": \"doc\"}}\n");
|
||||||
client().performRequest("post", index + "/doc", Collections.emptyMap(),
|
bulk.append(String.format(Locale.ROOT, docTemplate, date.getTime(), "hostB", byteCount)).append('\n');
|
||||||
new StringEntity(jsonDoc, ContentType.APPLICATION_JSON));
|
|
||||||
|
|
||||||
date = new Date(date.getTime() + 10_000);
|
date = new Date(date.getTime() + 10_000);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Ensure all data is searchable
|
bulkIndex(bulk.toString());
|
||||||
client().performRequest("post", "_refresh");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testLookbackOnlyWithMixedTypes() throws Exception {
|
public void testLookbackOnlyWithMixedTypes() throws Exception {
|
||||||
|
@ -314,11 +294,21 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||||
|
|
||||||
public void testLookbackOnlyWithNestedFields() throws Exception {
|
public void testLookbackOnlyWithNestedFields() throws Exception {
|
||||||
String jobId = "test-lookback-only-with-nested-fields";
|
String jobId = "test-lookback-only-with-nested-fields";
|
||||||
String job = "{\"description\":\"Nested job\", \"analysis_config\" : {\"bucket_span\":\"1h\",\"detectors\" :"
|
Request createJobRequest = new Request("PUT", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId);
|
||||||
+ "[{\"function\":\"mean\",\"field_name\":\"responsetime.millis\"}]}, \"data_description\" : {\"time_field\":\"time\"}"
|
createJobRequest.setJsonEntity("{\n"
|
||||||
+ "}";
|
+ " \"description\": \"Nested job\",\n"
|
||||||
client().performRequest("put", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId, Collections.emptyMap(),
|
+ " \"analysis_config\": {\n"
|
||||||
new StringEntity(job, ContentType.APPLICATION_JSON));
|
+ " \"bucket_span\": \"1h\",\n"
|
||||||
|
+ " \"detectors\": [\n"
|
||||||
|
+ " {\n"
|
||||||
|
+ " \"function\": \"mean\",\n"
|
||||||
|
+ " \"field_name\": \"responsetime.millis\"\n"
|
||||||
|
+ " }\n"
|
||||||
|
+ " ]\n"
|
||||||
|
+ " },"
|
||||||
|
+ " \"data_description\": {\"time_field\": \"time\"}\n"
|
||||||
|
+ "}");
|
||||||
|
client().performRequest(createJobRequest);
|
||||||
|
|
||||||
String datafeedId = jobId + "-datafeed";
|
String datafeedId = jobId + "-datafeed";
|
||||||
new DatafeedBuilder(datafeedId, jobId, "nested-data", "response").build();
|
new DatafeedBuilder(datafeedId, jobId, "nested-data", "response").build();
|
||||||
|
@ -326,8 +316,9 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||||
|
|
||||||
startDatafeedAndWaitUntilStopped(datafeedId);
|
startDatafeedAndWaitUntilStopped(datafeedId);
|
||||||
waitUntilJobIsClosed(jobId);
|
waitUntilJobIsClosed(jobId);
|
||||||
Response jobStatsResponse = client().performRequest("get", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats");
|
Response jobStatsResponse = client().performRequest(
|
||||||
String jobStatsResponseAsString = responseEntityToString(jobStatsResponse);
|
new Request("GET", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats"));
|
||||||
|
String jobStatsResponseAsString = EntityUtils.toString(jobStatsResponse.getEntity());
|
||||||
assertThat(jobStatsResponseAsString, containsString("\"input_record_count\":2"));
|
assertThat(jobStatsResponseAsString, containsString("\"input_record_count\":2"));
|
||||||
assertThat(jobStatsResponseAsString, containsString("\"processed_record_count\":2"));
|
assertThat(jobStatsResponseAsString, containsString("\"processed_record_count\":2"));
|
||||||
assertThat(jobStatsResponseAsString, containsString("\"missing_field_count\":0"));
|
assertThat(jobStatsResponseAsString, containsString("\"missing_field_count\":0"));
|
||||||
|
@ -340,14 +331,23 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||||
|
|
||||||
public void testInsufficientSearchPrivilegesOnPut() throws Exception {
|
public void testInsufficientSearchPrivilegesOnPut() throws Exception {
|
||||||
String jobId = "privs-put-job";
|
String jobId = "privs-put-job";
|
||||||
String job = "{\"description\":\"Aggs job\",\"analysis_config\" :{\"bucket_span\":\"1h\","
|
Request createJobRequest = new Request("PUT", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId);
|
||||||
+ "\"summary_count_field_name\":\"doc_count\","
|
createJobRequest.setJsonEntity("{\n"
|
||||||
+ "\"detectors\":[{\"function\":\"mean\","
|
+ " \"description\": \"Aggs job\",\n"
|
||||||
+ "\"field_name\":\"responsetime\",\"by_field_name\":\"airline\"}]},"
|
+ " \"analysis_config\": {\n"
|
||||||
+ "\"data_description\" : {\"time_field\":\"time stamp\"}"
|
+ " \"bucket_span\": \"1h\",\n "
|
||||||
+ "}";
|
+ " \"summary_count_field_name\": \"doc_count\",\n"
|
||||||
client().performRequest("put", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId,
|
+ " \"detectors\": [\n"
|
||||||
Collections.emptyMap(), new StringEntity(job, ContentType.APPLICATION_JSON));
|
+ " {\n"
|
||||||
|
+ " \"function\": \"mean\",\n"
|
||||||
|
+ " \"field_name\": \"responsetime\",\n"
|
||||||
|
+ " \"by_field_name\":\"airline\"\n"
|
||||||
|
+ " }\n"
|
||||||
|
+ " ]\n"
|
||||||
|
+ " },\n"
|
||||||
|
+ " \"data_description\" : {\"time_field\": \"time stamp\"}\n"
|
||||||
|
+ "}");
|
||||||
|
client().performRequest(createJobRequest);
|
||||||
|
|
||||||
String datafeedId = "datafeed-" + jobId;
|
String datafeedId = "datafeed-" + jobId;
|
||||||
// This should be disallowed, because even though the ml_admin user has permission to
|
// This should be disallowed, because even though the ml_admin user has permission to
|
||||||
|
@ -365,14 +365,23 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||||
|
|
||||||
public void testInsufficientSearchPrivilegesOnPreview() throws Exception {
|
public void testInsufficientSearchPrivilegesOnPreview() throws Exception {
|
||||||
String jobId = "privs-preview-job";
|
String jobId = "privs-preview-job";
|
||||||
String job = "{\"description\":\"Aggs job\",\"analysis_config\" :{\"bucket_span\":\"1h\","
|
Request createJobRequest = new Request("PUT", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId);
|
||||||
+ "\"summary_count_field_name\":\"doc_count\","
|
createJobRequest.setJsonEntity("{\n"
|
||||||
+ "\"detectors\":[{\"function\":\"mean\","
|
+ " \"description\": \"Aggs job\",\n"
|
||||||
+ "\"field_name\":\"responsetime\",\"by_field_name\":\"airline\"}]},"
|
+ " \"analysis_config\": {\n"
|
||||||
+ "\"data_description\" : {\"time_field\":\"time stamp\"}"
|
+ " \"bucket_span\": \"1h\",\n"
|
||||||
+ "}";
|
+ " \"summary_count_field_name\": \"doc_count\",\n"
|
||||||
client().performRequest("put", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId,
|
+ " \"detectors\": [\n"
|
||||||
Collections.emptyMap(), new StringEntity(job, ContentType.APPLICATION_JSON));
|
+ " {\n"
|
||||||
|
+ " \"function\": \"mean\",\n"
|
||||||
|
+ " \"field_name\": \"responsetime\",\n"
|
||||||
|
+ " \"by_field_name\": \"airline\"\n"
|
||||||
|
+ " }\n"
|
||||||
|
+ " ]\n"
|
||||||
|
+ " },\n"
|
||||||
|
+ " \"data_description\" : {\"time_field\": \"time stamp\"}\n"
|
||||||
|
+ "}");
|
||||||
|
client().performRequest(createJobRequest);
|
||||||
|
|
||||||
String datafeedId = "datafeed-" + jobId;
|
String datafeedId = "datafeed-" + jobId;
|
||||||
new DatafeedBuilder(datafeedId, jobId, "airline-data-aggs", "response").build();
|
new DatafeedBuilder(datafeedId, jobId, "airline-data-aggs", "response").build();
|
||||||
|
@ -380,10 +389,11 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||||
// This should be disallowed, because ml_admin is trying to preview a datafeed created by
|
// This should be disallowed, because ml_admin is trying to preview a datafeed created by
|
||||||
// by another user (x_pack_rest_user in this case) that will reveal the content of an index they
|
// by another user (x_pack_rest_user in this case) that will reveal the content of an index they
|
||||||
// don't have permission to search directly
|
// don't have permission to search directly
|
||||||
ResponseException e = expectThrows(ResponseException.class, () ->
|
Request getFeed = new Request("GET", MachineLearning.BASE_PATH + "datafeeds/" + datafeedId + "/_preview");
|
||||||
client().performRequest("get",
|
RequestOptions.Builder options = getFeed.getOptions().toBuilder();
|
||||||
MachineLearning.BASE_PATH + "datafeeds/" + datafeedId + "/_preview",
|
options.addHeader("Authorization", BASIC_AUTH_VALUE_ML_ADMIN);
|
||||||
new BasicHeader("Authorization", BASIC_AUTH_VALUE_ML_ADMIN)));
|
getFeed.setOptions(options);
|
||||||
|
ResponseException e = expectThrows(ResponseException.class, () -> client().performRequest(getFeed));
|
||||||
|
|
||||||
assertThat(e.getMessage(),
|
assertThat(e.getMessage(),
|
||||||
containsString("[indices:data/read/field_caps] is unauthorized for user [ml_admin]"));
|
containsString("[indices:data/read/field_caps] is unauthorized for user [ml_admin]"));
|
||||||
|
@ -391,13 +401,23 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||||
|
|
||||||
public void testLookbackOnlyGivenAggregationsWithHistogram() throws Exception {
|
public void testLookbackOnlyGivenAggregationsWithHistogram() throws Exception {
|
||||||
String jobId = "aggs-histogram-job";
|
String jobId = "aggs-histogram-job";
|
||||||
String job = "{\"description\":\"Aggs job\",\"analysis_config\" :{\"bucket_span\":\"1h\","
|
Request createJobRequest = new Request("PUT", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId);
|
||||||
+ "\"summary_count_field_name\":\"doc_count\","
|
createJobRequest.setJsonEntity("{\n"
|
||||||
+ "\"detectors\":[{\"function\":\"mean\",\"field_name\":\"responsetime\",\"by_field_name\":\"airline\"}]},"
|
+ " \"description\": \"Aggs job\",\n"
|
||||||
+ "\"data_description\" : {\"time_field\":\"time stamp\"}"
|
+ " \"analysis_config\": {\n"
|
||||||
+ "}";
|
+ " \"bucket_span\": \"1h\",\n"
|
||||||
client().performRequest("put", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId, Collections.emptyMap(),
|
+ " \"summary_count_field_name\": \"doc_count\",\n"
|
||||||
new StringEntity(job, ContentType.APPLICATION_JSON));
|
+ " \"detectors\": [\n"
|
||||||
|
+ " {\n"
|
||||||
|
+ " \"function\": \"mean\",\n"
|
||||||
|
+ " \"field_name\": \"responsetime\",\n"
|
||||||
|
+ " \"by_field_name\": \"airline\"\n"
|
||||||
|
+ " }\n"
|
||||||
|
+ " ]\n"
|
||||||
|
+ " },\n"
|
||||||
|
+ " \"data_description\": {\"time_field\": \"time stamp\"}\n"
|
||||||
|
+ "}");
|
||||||
|
client().performRequest(createJobRequest);
|
||||||
|
|
||||||
String datafeedId = "datafeed-" + jobId;
|
String datafeedId = "datafeed-" + jobId;
|
||||||
String aggregations = "{\"buckets\":{\"histogram\":{\"field\":\"time stamp\",\"interval\":3600000},"
|
String aggregations = "{\"buckets\":{\"histogram\":{\"field\":\"time stamp\",\"interval\":3600000},"
|
||||||
|
@ -410,8 +430,9 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||||
|
|
||||||
startDatafeedAndWaitUntilStopped(datafeedId);
|
startDatafeedAndWaitUntilStopped(datafeedId);
|
||||||
waitUntilJobIsClosed(jobId);
|
waitUntilJobIsClosed(jobId);
|
||||||
Response jobStatsResponse = client().performRequest("get", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats");
|
Response jobStatsResponse = client().performRequest(new Request("GET",
|
||||||
String jobStatsResponseAsString = responseEntityToString(jobStatsResponse);
|
MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats"));
|
||||||
|
String jobStatsResponseAsString = EntityUtils.toString(jobStatsResponse.getEntity());
|
||||||
assertThat(jobStatsResponseAsString, containsString("\"input_record_count\":4"));
|
assertThat(jobStatsResponseAsString, containsString("\"input_record_count\":4"));
|
||||||
assertThat(jobStatsResponseAsString, containsString("\"processed_record_count\":4"));
|
assertThat(jobStatsResponseAsString, containsString("\"processed_record_count\":4"));
|
||||||
assertThat(jobStatsResponseAsString, containsString("\"missing_field_count\":0"));
|
assertThat(jobStatsResponseAsString, containsString("\"missing_field_count\":0"));
|
||||||
|
@ -419,13 +440,23 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||||
|
|
||||||
public void testLookbackOnlyGivenAggregationsWithDateHistogram() throws Exception {
|
public void testLookbackOnlyGivenAggregationsWithDateHistogram() throws Exception {
|
||||||
String jobId = "aggs-date-histogram-job";
|
String jobId = "aggs-date-histogram-job";
|
||||||
String job = "{\"description\":\"Aggs job\",\"analysis_config\" :{\"bucket_span\":\"3600s\","
|
Request createJobRequest = new Request("PUT", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId);
|
||||||
+ "\"summary_count_field_name\":\"doc_count\","
|
createJobRequest.setJsonEntity("{\n"
|
||||||
+ "\"detectors\":[{\"function\":\"mean\",\"field_name\":\"responsetime\",\"by_field_name\":\"airline\"}]},"
|
+ " \"description\": \"Aggs job\",\n"
|
||||||
+ "\"data_description\" : {\"time_field\":\"time stamp\"}"
|
+ " \"analysis_config\": {\n"
|
||||||
+ "}";
|
+ " \"bucket_span\": \"3600s\",\n"
|
||||||
client().performRequest("put", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId, Collections.emptyMap(),
|
+ " \"summary_count_field_name\": \"doc_count\",\n"
|
||||||
new StringEntity(job, ContentType.APPLICATION_JSON));
|
+ " \"detectors\": [\n"
|
||||||
|
+ " {\n"
|
||||||
|
+ " \"function\": \"mean\",\n"
|
||||||
|
+ " \"field_name\": \"responsetime\",\n"
|
||||||
|
+ " \"by_field_name\": \"airline\"\n"
|
||||||
|
+ " }\n"
|
||||||
|
+ " ]\n"
|
||||||
|
+ " },\n"
|
||||||
|
+ " \"data_description\": {\"time_field\": \"time stamp\"}\n"
|
||||||
|
+ "}");
|
||||||
|
client().performRequest(createJobRequest);
|
||||||
|
|
||||||
String datafeedId = "datafeed-" + jobId;
|
String datafeedId = "datafeed-" + jobId;
|
||||||
String aggregations = "{\"time stamp\":{\"date_histogram\":{\"field\":\"time stamp\",\"interval\":\"1h\"},"
|
String aggregations = "{\"time stamp\":{\"date_histogram\":{\"field\":\"time stamp\",\"interval\":\"1h\"},"
|
||||||
|
@ -438,8 +469,9 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||||
|
|
||||||
startDatafeedAndWaitUntilStopped(datafeedId);
|
startDatafeedAndWaitUntilStopped(datafeedId);
|
||||||
waitUntilJobIsClosed(jobId);
|
waitUntilJobIsClosed(jobId);
|
||||||
Response jobStatsResponse = client().performRequest("get", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats");
|
Response jobStatsResponse = client().performRequest(new Request("GET",
|
||||||
String jobStatsResponseAsString = responseEntityToString(jobStatsResponse);
|
MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats"));
|
||||||
|
String jobStatsResponseAsString = EntityUtils.toString(jobStatsResponse.getEntity());
|
||||||
assertThat(jobStatsResponseAsString, containsString("\"input_record_count\":4"));
|
assertThat(jobStatsResponseAsString, containsString("\"input_record_count\":4"));
|
||||||
assertThat(jobStatsResponseAsString, containsString("\"processed_record_count\":4"));
|
assertThat(jobStatsResponseAsString, containsString("\"processed_record_count\":4"));
|
||||||
assertThat(jobStatsResponseAsString, containsString("\"missing_field_count\":0"));
|
assertThat(jobStatsResponseAsString, containsString("\"missing_field_count\":0"));
|
||||||
|
@ -447,13 +479,22 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||||
|
|
||||||
public void testLookbackUsingDerivativeAggWithLargerHistogramBucketThanDataRate() throws Exception {
|
public void testLookbackUsingDerivativeAggWithLargerHistogramBucketThanDataRate() throws Exception {
|
||||||
String jobId = "derivative-agg-network-job";
|
String jobId = "derivative-agg-network-job";
|
||||||
String job = "{\"analysis_config\" :{\"bucket_span\":\"300s\","
|
Request createJobRequest = new Request("PUT", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId);
|
||||||
+ "\"summary_count_field_name\":\"doc_count\","
|
createJobRequest.setJsonEntity("{\n"
|
||||||
+ "\"detectors\":[{\"function\":\"mean\",\"field_name\":\"bytes-delta\",\"by_field_name\":\"hostname\"}]},"
|
+ " \"analysis_config\": {\n"
|
||||||
+ "\"data_description\" : {\"time_field\":\"timestamp\"}"
|
+ " \"bucket_span\": \"300s\",\n"
|
||||||
+ "}";
|
+ " \"summary_count_field_name\": \"doc_count\",\n"
|
||||||
client().performRequest("put", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId, Collections.emptyMap(),
|
+ " \"detectors\": [\n"
|
||||||
new StringEntity(job, ContentType.APPLICATION_JSON));
|
+ " {\n"
|
||||||
|
+ " \"function\": \"mean\",\n"
|
||||||
|
+ " \"field_name\": \"bytes-delta\",\n"
|
||||||
|
+ " \"by_field_name\": \"hostname\"\n"
|
||||||
|
+ " }\n"
|
||||||
|
+ " ]\n"
|
||||||
|
+ " },\n"
|
||||||
|
+ " \"data_description\": {\"time_field\": \"timestamp\"}\n"
|
||||||
|
+ "}");
|
||||||
|
client().performRequest(createJobRequest);
|
||||||
|
|
||||||
String datafeedId = "datafeed-" + jobId;
|
String datafeedId = "datafeed-" + jobId;
|
||||||
String aggregations =
|
String aggregations =
|
||||||
|
@ -471,8 +512,9 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||||
|
|
||||||
startDatafeedAndWaitUntilStopped(datafeedId);
|
startDatafeedAndWaitUntilStopped(datafeedId);
|
||||||
waitUntilJobIsClosed(jobId);
|
waitUntilJobIsClosed(jobId);
|
||||||
Response jobStatsResponse = client().performRequest("get", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats");
|
Response jobStatsResponse = client().performRequest(new Request("GET",
|
||||||
String jobStatsResponseAsString = responseEntityToString(jobStatsResponse);
|
MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats"));
|
||||||
|
String jobStatsResponseAsString = EntityUtils.toString(jobStatsResponse.getEntity());
|
||||||
assertThat(jobStatsResponseAsString, containsString("\"input_record_count\":40"));
|
assertThat(jobStatsResponseAsString, containsString("\"input_record_count\":40"));
|
||||||
assertThat(jobStatsResponseAsString, containsString("\"processed_record_count\":40"));
|
assertThat(jobStatsResponseAsString, containsString("\"processed_record_count\":40"));
|
||||||
assertThat(jobStatsResponseAsString, containsString("\"out_of_order_timestamp_count\":0"));
|
assertThat(jobStatsResponseAsString, containsString("\"out_of_order_timestamp_count\":0"));
|
||||||
|
@ -483,13 +525,22 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||||
|
|
||||||
public void testLookbackUsingDerivativeAggWithSmallerHistogramBucketThanDataRate() throws Exception {
|
public void testLookbackUsingDerivativeAggWithSmallerHistogramBucketThanDataRate() throws Exception {
|
||||||
String jobId = "derivative-agg-network-job";
|
String jobId = "derivative-agg-network-job";
|
||||||
String job = "{\"analysis_config\" :{\"bucket_span\":\"300s\","
|
Request createJobRequest = new Request("PUT", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId);
|
||||||
+ "\"summary_count_field_name\":\"doc_count\","
|
createJobRequest.setJsonEntity("{\n"
|
||||||
+ "\"detectors\":[{\"function\":\"mean\",\"field_name\":\"bytes-delta\",\"by_field_name\":\"hostname\"}]},"
|
+ " \"analysis_config\": {\n"
|
||||||
+ "\"data_description\" : {\"time_field\":\"timestamp\"}"
|
+ " \"bucket_span\": \"300s\",\n"
|
||||||
+ "}";
|
+ " \"summary_count_field_name\": \"doc_count\",\n"
|
||||||
client().performRequest("put", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId, Collections.emptyMap(),
|
+ " \"detectors\": [\n"
|
||||||
new StringEntity(job, ContentType.APPLICATION_JSON));
|
+ " {\n"
|
||||||
|
+ " \"function\": \"mean\",\n"
|
||||||
|
+ " \"field_name\": \"bytes-delta\",\n"
|
||||||
|
+ " \"by_field_name\": \"hostname\"\n"
|
||||||
|
+ " }\n"
|
||||||
|
+ " ]\n"
|
||||||
|
+ " },\n"
|
||||||
|
+ " \"data_description\": {\"time_field\": \"timestamp\"}\n"
|
||||||
|
+ "}");
|
||||||
|
client().performRequest(createJobRequest);
|
||||||
|
|
||||||
String datafeedId = "datafeed-" + jobId;
|
String datafeedId = "datafeed-" + jobId;
|
||||||
String aggregations =
|
String aggregations =
|
||||||
|
@ -507,21 +558,31 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||||
|
|
||||||
startDatafeedAndWaitUntilStopped(datafeedId);
|
startDatafeedAndWaitUntilStopped(datafeedId);
|
||||||
waitUntilJobIsClosed(jobId);
|
waitUntilJobIsClosed(jobId);
|
||||||
Response jobStatsResponse = client().performRequest("get", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats");
|
Response jobStatsResponse = client().performRequest(new Request("GET",
|
||||||
String jobStatsResponseAsString = responseEntityToString(jobStatsResponse);
|
MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats"));
|
||||||
|
String jobStatsResponseAsString = EntityUtils.toString(jobStatsResponse.getEntity());
|
||||||
assertThat(jobStatsResponseAsString, containsString("\"input_record_count\":240"));
|
assertThat(jobStatsResponseAsString, containsString("\"input_record_count\":240"));
|
||||||
assertThat(jobStatsResponseAsString, containsString("\"processed_record_count\":240"));
|
assertThat(jobStatsResponseAsString, containsString("\"processed_record_count\":240"));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testLookbackWithoutPermissions() throws Exception {
|
public void testLookbackWithoutPermissions() throws Exception {
|
||||||
String jobId = "permission-test-network-job";
|
String jobId = "permission-test-network-job";
|
||||||
String job = "{\"analysis_config\" :{\"bucket_span\":\"300s\","
|
Request createJobRequest = new Request("PUT", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId);
|
||||||
+ "\"summary_count_field_name\":\"doc_count\","
|
createJobRequest.setJsonEntity("{\n"
|
||||||
+ "\"detectors\":[{\"function\":\"mean\",\"field_name\":\"bytes-delta\",\"by_field_name\":\"hostname\"}]},"
|
+ " \"analysis_config\": {\n"
|
||||||
+ "\"data_description\" : {\"time_field\":\"timestamp\"}"
|
+ " \"bucket_span\": \"300s\",\n"
|
||||||
+ "}";
|
+ " \"summary_count_field_name\": \"doc_count\",\n"
|
||||||
client().performRequest("put", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId, Collections.emptyMap(),
|
+ " \"detectors\": [\n"
|
||||||
new StringEntity(job, ContentType.APPLICATION_JSON));
|
+ " {\n"
|
||||||
|
+ " \"function\": \"mean\",\n"
|
||||||
|
+ " \"field_name\": \"bytes-delta\",\n"
|
||||||
|
+ " \"by_field_name\": \"hostname\"\n"
|
||||||
|
+ " }\n"
|
||||||
|
+ " ]\n"
|
||||||
|
+ " },\n"
|
||||||
|
+ " \"data_description\": {\"time_field\": \"timestamp\"}\n"
|
||||||
|
+ "}");
|
||||||
|
client().performRequest(createJobRequest);
|
||||||
|
|
||||||
String datafeedId = "datafeed-" + jobId;
|
String datafeedId = "datafeed-" + jobId;
|
||||||
String aggregations =
|
String aggregations =
|
||||||
|
@ -545,29 +606,39 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||||
|
|
||||||
startDatafeedAndWaitUntilStopped(datafeedId, BASIC_AUTH_VALUE_ML_ADMIN_WITH_SOME_DATA_ACCESS);
|
startDatafeedAndWaitUntilStopped(datafeedId, BASIC_AUTH_VALUE_ML_ADMIN_WITH_SOME_DATA_ACCESS);
|
||||||
waitUntilJobIsClosed(jobId);
|
waitUntilJobIsClosed(jobId);
|
||||||
Response jobStatsResponse = client().performRequest("get", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats");
|
Response jobStatsResponse = client().performRequest(new Request("GET",
|
||||||
String jobStatsResponseAsString = responseEntityToString(jobStatsResponse);
|
MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats"));
|
||||||
|
String jobStatsResponseAsString = EntityUtils.toString(jobStatsResponse.getEntity());
|
||||||
// We expect that no data made it through to the job
|
// We expect that no data made it through to the job
|
||||||
assertThat(jobStatsResponseAsString, containsString("\"input_record_count\":0"));
|
assertThat(jobStatsResponseAsString, containsString("\"input_record_count\":0"));
|
||||||
assertThat(jobStatsResponseAsString, containsString("\"processed_record_count\":0"));
|
assertThat(jobStatsResponseAsString, containsString("\"processed_record_count\":0"));
|
||||||
|
|
||||||
// There should be a notification saying that there was a problem extracting data
|
// There should be a notification saying that there was a problem extracting data
|
||||||
client().performRequest("post", "_refresh");
|
client().performRequest(new Request("POST", "/_refresh"));
|
||||||
Response notificationsResponse = client().performRequest("get", AuditorField.NOTIFICATIONS_INDEX + "/_search?q=job_id:" + jobId);
|
Response notificationsResponse = client().performRequest(
|
||||||
String notificationsResponseAsString = responseEntityToString(notificationsResponse);
|
new Request("GET", AuditorField.NOTIFICATIONS_INDEX + "/_search?q=job_id:" + jobId));
|
||||||
|
String notificationsResponseAsString = EntityUtils.toString(notificationsResponse.getEntity());
|
||||||
assertThat(notificationsResponseAsString, containsString("\"message\":\"Datafeed is encountering errors extracting data: " +
|
assertThat(notificationsResponseAsString, containsString("\"message\":\"Datafeed is encountering errors extracting data: " +
|
||||||
"action [indices:data/read/search] is unauthorized for user [ml_admin_plus_data]\""));
|
"action [indices:data/read/search] is unauthorized for user [ml_admin_plus_data]\""));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testLookbackWithPipelineBucketAgg() throws Exception {
|
public void testLookbackWithPipelineBucketAgg() throws Exception {
|
||||||
String jobId = "pipeline-bucket-agg-job";
|
String jobId = "pipeline-bucket-agg-job";
|
||||||
String job = "{\"analysis_config\" :{\"bucket_span\":\"1h\","
|
Request createJobRequest = new Request("PUT", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId);
|
||||||
+ "\"summary_count_field_name\":\"doc_count\","
|
createJobRequest.setJsonEntity("{\n"
|
||||||
+ "\"detectors\":[{\"function\":\"mean\",\"field_name\":\"percentile95_airlines_count\"}]},"
|
+ " \"analysis_config\": {\n"
|
||||||
+ "\"data_description\" : {\"time_field\":\"time stamp\"}"
|
+ " \"bucket_span\": \"1h\",\n"
|
||||||
+ "}";
|
+ " \"summary_count_field_name\": \"doc_count\",\n"
|
||||||
client().performRequest("put", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId, Collections.emptyMap(),
|
+ " \"detectors\": [\n"
|
||||||
new StringEntity(job, ContentType.APPLICATION_JSON));
|
+ " {\n"
|
||||||
|
+ " \"function\": \"mean\",\n"
|
||||||
|
+ " \"field_name\": \"percentile95_airlines_count\"\n"
|
||||||
|
+ " }\n"
|
||||||
|
+ " ]\n"
|
||||||
|
+ " },\n"
|
||||||
|
+ " \"data_description\": {\"time_field\": \"time stamp\"}\n"
|
||||||
|
+ "}");
|
||||||
|
client().performRequest(createJobRequest);
|
||||||
|
|
||||||
String datafeedId = "datafeed-" + jobId;
|
String datafeedId = "datafeed-" + jobId;
|
||||||
String aggregations = "{\"buckets\":{\"date_histogram\":{\"field\":\"time stamp\",\"interval\":\"15m\"},"
|
String aggregations = "{\"buckets\":{\"date_histogram\":{\"field\":\"time stamp\",\"interval\":\"15m\"},"
|
||||||
|
@ -582,8 +653,9 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||||
|
|
||||||
startDatafeedAndWaitUntilStopped(datafeedId);
|
startDatafeedAndWaitUntilStopped(datafeedId);
|
||||||
waitUntilJobIsClosed(jobId);
|
waitUntilJobIsClosed(jobId);
|
||||||
Response jobStatsResponse = client().performRequest("get", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats");
|
Response jobStatsResponse = client().performRequest(new Request("GET",
|
||||||
String jobStatsResponseAsString = responseEntityToString(jobStatsResponse);
|
MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats"));
|
||||||
|
String jobStatsResponseAsString = EntityUtils.toString(jobStatsResponse.getEntity());
|
||||||
assertThat(jobStatsResponseAsString, containsString("\"input_record_count\":2"));
|
assertThat(jobStatsResponseAsString, containsString("\"input_record_count\":2"));
|
||||||
assertThat(jobStatsResponseAsString, containsString("\"input_field_count\":4"));
|
assertThat(jobStatsResponseAsString, containsString("\"input_field_count\":4"));
|
||||||
assertThat(jobStatsResponseAsString, containsString("\"processed_record_count\":2"));
|
assertThat(jobStatsResponseAsString, containsString("\"processed_record_count\":2"));
|
||||||
|
@ -599,15 +671,15 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||||
new DatafeedBuilder(datafeedId, jobId, "airline-data", "response").build();
|
new DatafeedBuilder(datafeedId, jobId, "airline-data", "response").build();
|
||||||
openJob(client(), jobId);
|
openJob(client(), jobId);
|
||||||
|
|
||||||
Response response = client().performRequest("post",
|
Request startRequest = new Request("POST", MachineLearning.BASE_PATH + "datafeeds/" + datafeedId + "/_start");
|
||||||
MachineLearning.BASE_PATH + "datafeeds/" + datafeedId + "/_start?start=2016-06-01T00:00:00Z");
|
startRequest.addParameter("start", "2016-06-01T00:00:00Z");
|
||||||
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
|
Response response = client().performRequest(startRequest);
|
||||||
assertThat(responseEntityToString(response), equalTo("{\"started\":true}"));
|
assertThat(EntityUtils.toString(response.getEntity()), equalTo("{\"started\":true}"));
|
||||||
assertBusy(() -> {
|
assertBusy(() -> {
|
||||||
try {
|
try {
|
||||||
Response getJobResponse = client().performRequest("get",
|
Response getJobResponse = client().performRequest(new Request("GET",
|
||||||
MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats");
|
MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats"));
|
||||||
String responseAsString = responseEntityToString(getJobResponse);
|
String responseAsString = EntityUtils.toString(getJobResponse.getEntity());
|
||||||
assertThat(responseAsString, containsString("\"processed_record_count\":2"));
|
assertThat(responseAsString, containsString("\"processed_record_count\":2"));
|
||||||
assertThat(responseAsString, containsString("\"state\":\"opened\""));
|
assertThat(responseAsString, containsString("\"state\":\"opened\""));
|
||||||
} catch (Exception e1) {
|
} catch (Exception e1) {
|
||||||
|
@ -619,9 +691,9 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||||
// test a model snapshot is present
|
// test a model snapshot is present
|
||||||
assertBusy(() -> {
|
assertBusy(() -> {
|
||||||
try {
|
try {
|
||||||
Response getJobResponse = client().performRequest("get",
|
Response getJobResponse = client().performRequest(new Request("GET",
|
||||||
MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/model_snapshots");
|
MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/model_snapshots"));
|
||||||
String responseAsString = responseEntityToString(getJobResponse);
|
String responseAsString = EntityUtils.toString(getJobResponse.getEntity());
|
||||||
assertThat(responseAsString, containsString("\"count\":1"));
|
assertThat(responseAsString, containsString("\"count\":1"));
|
||||||
} catch (Exception e1) {
|
} catch (Exception e1) {
|
||||||
throw new RuntimeException(e1);
|
throw new RuntimeException(e1);
|
||||||
|
@ -629,25 +701,25 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||||
});
|
});
|
||||||
|
|
||||||
ResponseException e = expectThrows(ResponseException.class,
|
ResponseException e = expectThrows(ResponseException.class,
|
||||||
() -> client().performRequest("delete", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId));
|
() -> client().performRequest(new Request("DELETE", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId)));
|
||||||
response = e.getResponse();
|
response = e.getResponse();
|
||||||
assertThat(response.getStatusLine().getStatusCode(), equalTo(409));
|
assertThat(response.getStatusLine().getStatusCode(), equalTo(409));
|
||||||
assertThat(responseEntityToString(response), containsString("Cannot delete job [" + jobId + "] because datafeed [" + datafeedId
|
assertThat(EntityUtils.toString(response.getEntity()),
|
||||||
+ "] refers to it"));
|
containsString("Cannot delete job [" + jobId + "] because datafeed [" + datafeedId + "] refers to it"));
|
||||||
|
|
||||||
response = client().performRequest("post", MachineLearning.BASE_PATH + "datafeeds/" + datafeedId + "/_stop");
|
response = client().performRequest(new Request("POST", MachineLearning.BASE_PATH + "datafeeds/" + datafeedId + "/_stop"));
|
||||||
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
|
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
|
||||||
assertThat(responseEntityToString(response), equalTo("{\"stopped\":true}"));
|
assertThat(EntityUtils.toString(response.getEntity()), equalTo("{\"stopped\":true}"));
|
||||||
|
|
||||||
client().performRequest("POST", "/_xpack/ml/anomaly_detectors/" + jobId + "/_close");
|
client().performRequest(new Request("POST", "/_xpack/ml/anomaly_detectors/" + jobId + "/_close"));
|
||||||
|
|
||||||
response = client().performRequest("delete", MachineLearning.BASE_PATH + "datafeeds/" + datafeedId);
|
response = client().performRequest(new Request("DELETE", MachineLearning.BASE_PATH + "datafeeds/" + datafeedId));
|
||||||
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
|
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
|
||||||
assertThat(responseEntityToString(response), equalTo("{\"acknowledged\":true}"));
|
assertThat(EntityUtils.toString(response.getEntity()), equalTo("{\"acknowledged\":true}"));
|
||||||
|
|
||||||
response = client().performRequest("delete", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId);
|
response = client().performRequest(new Request("DELETE", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId));
|
||||||
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
|
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
|
||||||
assertThat(responseEntityToString(response), equalTo("{\"acknowledged\":true}"));
|
assertThat(EntityUtils.toString(response.getEntity()), equalTo("{\"acknowledged\":true}"));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testForceDeleteWhileDatafeedIsRunning() throws Exception {
|
public void testForceDeleteWhileDatafeedIsRunning() throws Exception {
|
||||||
|
@ -657,25 +729,26 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||||
new DatafeedBuilder(datafeedId, jobId, "airline-data", "response").build();
|
new DatafeedBuilder(datafeedId, jobId, "airline-data", "response").build();
|
||||||
openJob(client(), jobId);
|
openJob(client(), jobId);
|
||||||
|
|
||||||
Response response = client().performRequest("post",
|
Request startRequest = new Request("POST", MachineLearning.BASE_PATH + "datafeeds/" + datafeedId + "/_start");
|
||||||
MachineLearning.BASE_PATH + "datafeeds/" + datafeedId + "/_start?start=2016-06-01T00:00:00Z");
|
startRequest.addParameter("start", "2016-06-01T00:00:00Z");
|
||||||
|
Response response = client().performRequest(startRequest);
|
||||||
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
|
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
|
||||||
assertThat(responseEntityToString(response), equalTo("{\"started\":true}"));
|
assertThat(EntityUtils.toString(response.getEntity()), equalTo("{\"started\":true}"));
|
||||||
|
|
||||||
ResponseException e = expectThrows(ResponseException.class,
|
ResponseException e = expectThrows(ResponseException.class,
|
||||||
() -> client().performRequest("delete", MachineLearning.BASE_PATH + "datafeeds/" + datafeedId));
|
() -> client().performRequest(new Request("DELETE", MachineLearning.BASE_PATH + "datafeeds/" + datafeedId)));
|
||||||
response = e.getResponse();
|
response = e.getResponse();
|
||||||
assertThat(response.getStatusLine().getStatusCode(), equalTo(409));
|
assertThat(response.getStatusLine().getStatusCode(), equalTo(409));
|
||||||
assertThat(responseEntityToString(response), containsString("Cannot delete datafeed [" + datafeedId
|
assertThat(EntityUtils.toString(response.getEntity()),
|
||||||
+ "] while its status is started"));
|
containsString("Cannot delete datafeed [" + datafeedId + "] while its status is started"));
|
||||||
|
|
||||||
response = client().performRequest("delete",
|
Request forceDeleteRequest = new Request("DELETE", MachineLearning.BASE_PATH + "datafeeds/" + datafeedId);
|
||||||
MachineLearning.BASE_PATH + "datafeeds/" + datafeedId + "?force=true");
|
forceDeleteRequest.addParameter("force", "true");
|
||||||
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
|
response = client().performRequest(forceDeleteRequest);
|
||||||
assertThat(responseEntityToString(response), equalTo("{\"acknowledged\":true}"));
|
assertThat(EntityUtils.toString(response.getEntity()), equalTo("{\"acknowledged\":true}"));
|
||||||
|
|
||||||
expectThrows(ResponseException.class,
|
expectThrows(ResponseException.class,
|
||||||
() -> client().performRequest("get", "/_xpack/ml/datafeeds/" + datafeedId));
|
() -> client().performRequest(new Request("GET", "/_xpack/ml/datafeeds/" + datafeedId)));
|
||||||
}
|
}
|
||||||
|
|
||||||
private class LookbackOnlyTestHelper {
|
private class LookbackOnlyTestHelper {
|
||||||
|
@ -727,9 +800,9 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||||
startDatafeedAndWaitUntilStopped(datafeedId);
|
startDatafeedAndWaitUntilStopped(datafeedId);
|
||||||
waitUntilJobIsClosed(jobId);
|
waitUntilJobIsClosed(jobId);
|
||||||
|
|
||||||
Response jobStatsResponse = client().performRequest("get",
|
Response jobStatsResponse = client().performRequest(new Request("GET",
|
||||||
MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats");
|
MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats"));
|
||||||
String jobStatsResponseAsString = responseEntityToString(jobStatsResponse);
|
String jobStatsResponseAsString = EntityUtils.toString(jobStatsResponse.getEntity());
|
||||||
if (shouldSucceedInput) {
|
if (shouldSucceedInput) {
|
||||||
assertThat(jobStatsResponseAsString, containsString("\"input_record_count\":2"));
|
assertThat(jobStatsResponseAsString, containsString("\"input_record_count\":2"));
|
||||||
} else {
|
} else {
|
||||||
|
@ -748,16 +821,20 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
private void startDatafeedAndWaitUntilStopped(String datafeedId, String authHeader) throws Exception {
|
private void startDatafeedAndWaitUntilStopped(String datafeedId, String authHeader) throws Exception {
|
||||||
Response startDatafeedRequest = client().performRequest("post",
|
Request request = new Request("POST", MachineLearning.BASE_PATH + "datafeeds/" + datafeedId + "/_start");
|
||||||
MachineLearning.BASE_PATH + "datafeeds/" + datafeedId + "/_start?start=2016-06-01T00:00:00Z&end=2016-06-02T00:00:00Z",
|
request.addParameter("start", "2016-06-01T00:00:00Z");
|
||||||
new BasicHeader("Authorization", authHeader));
|
request.addParameter("end", "2016-06-02T00:00:00Z");
|
||||||
assertThat(startDatafeedRequest.getStatusLine().getStatusCode(), equalTo(200));
|
RequestOptions.Builder options = request.getOptions().toBuilder();
|
||||||
assertThat(responseEntityToString(startDatafeedRequest), equalTo("{\"started\":true}"));
|
options.addHeader("Authorization", authHeader);
|
||||||
|
request.setOptions(options);
|
||||||
|
Response startDatafeedResponse = client().performRequest(request);
|
||||||
|
assertThat(EntityUtils.toString(startDatafeedResponse.getEntity()), equalTo("{\"started\":true}"));
|
||||||
assertBusy(() -> {
|
assertBusy(() -> {
|
||||||
try {
|
try {
|
||||||
Response datafeedStatsResponse = client().performRequest("get",
|
Response datafeedStatsResponse = client().performRequest(new Request("GET",
|
||||||
MachineLearning.BASE_PATH + "datafeeds/" + datafeedId + "/_stats");
|
MachineLearning.BASE_PATH + "datafeeds/" + datafeedId + "/_stats"));
|
||||||
assertThat(responseEntityToString(datafeedStatsResponse), containsString("\"state\":\"stopped\""));
|
assertThat(EntityUtils.toString(datafeedStatsResponse.getEntity()),
|
||||||
|
containsString("\"state\":\"stopped\""));
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
}
|
}
|
||||||
|
@ -767,9 +844,9 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||||
private void waitUntilJobIsClosed(String jobId) throws Exception {
|
private void waitUntilJobIsClosed(String jobId) throws Exception {
|
||||||
assertBusy(() -> {
|
assertBusy(() -> {
|
||||||
try {
|
try {
|
||||||
Response jobStatsResponse = client().performRequest("get",
|
Response jobStatsResponse = client().performRequest(new Request("GET",
|
||||||
MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats");
|
MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats"));
|
||||||
assertThat(responseEntityToString(jobStatsResponse), containsString("\"state\":\"closed\""));
|
assertThat(EntityUtils.toString(jobStatsResponse.getEntity()), containsString("\"state\":\"closed\""));
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
}
|
}
|
||||||
|
@ -777,27 +854,30 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
private Response createJob(String id, String airlineVariant) throws Exception {
|
private Response createJob(String id, String airlineVariant) throws Exception {
|
||||||
String job = "{\n" + " \"description\":\"Analysis of response time by airline\",\n"
|
Request request = new Request("PUT", MachineLearning.BASE_PATH + "anomaly_detectors/" + id);
|
||||||
+ " \"analysis_config\" : {\n" + " \"bucket_span\":\"1h\",\n"
|
request.setJsonEntity("{\n"
|
||||||
|
+ " \"description\": \"Analysis of response time by airline\",\n"
|
||||||
|
+ " \"analysis_config\": {\n"
|
||||||
|
+ " \"bucket_span\": \"1h\",\n"
|
||||||
+ " \"detectors\" :[\n"
|
+ " \"detectors\" :[\n"
|
||||||
+ " {\"function\":\"mean\",\"field_name\":\"responsetime\",\"by_field_name\":\"" + airlineVariant + "\"}]\n"
|
+ " {\n"
|
||||||
+ " },\n" + " \"data_description\" : {\n"
|
+ " \"function\": \"mean\",\n"
|
||||||
|
+ " \"field_name\": \"responsetime\",\n"
|
||||||
|
+ " \"by_field_name\": \"" + airlineVariant + "\"\n"
|
||||||
|
+ " }\n"
|
||||||
|
+ " ]\n"
|
||||||
|
+ " },\n"
|
||||||
|
+ " \"data_description\": {\n"
|
||||||
+ " \"format\": \"xcontent\",\n"
|
+ " \"format\": \"xcontent\",\n"
|
||||||
+ " \"time_field\":\"time stamp\",\n" + " \"time_format\":\"yyyy-MM-dd'T'HH:mm:ssX\"\n" + " }\n"
|
+ " \"time_field\": \"time stamp\",\n"
|
||||||
+ "}";
|
+ " \"time_format\": \"yyyy-MM-dd'T'HH:mm:ssX\"\n"
|
||||||
return client().performRequest("put", MachineLearning.BASE_PATH + "anomaly_detectors/" + id,
|
+ " }\n"
|
||||||
Collections.emptyMap(), new StringEntity(job, ContentType.APPLICATION_JSON));
|
+ "}");
|
||||||
}
|
return client().performRequest(request);
|
||||||
|
|
||||||
private static String responseEntityToString(Response response) throws Exception {
|
|
||||||
try (BufferedReader reader = new BufferedReader(new InputStreamReader(response.getEntity().getContent(), StandardCharsets.UTF_8))) {
|
|
||||||
return reader.lines().collect(Collectors.joining("\n"));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void openJob(RestClient client, String jobId) throws IOException {
|
public static void openJob(RestClient client, String jobId) throws IOException {
|
||||||
Response response = client.performRequest("post", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_open");
|
client.performRequest(new Request("POST", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_open"));
|
||||||
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@After
|
@After
|
||||||
|
@ -850,17 +930,28 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
Response build() throws IOException {
|
Response build() throws IOException {
|
||||||
String datafeedConfig = "{"
|
Request request = new Request("PUT", MachineLearning.BASE_PATH + "datafeeds/" + datafeedId);
|
||||||
|
request.setJsonEntity("{"
|
||||||
+ "\"job_id\": \"" + jobId + "\",\"indexes\":[\"" + index + "\"],\"types\":[\"" + type + "\"]"
|
+ "\"job_id\": \"" + jobId + "\",\"indexes\":[\"" + index + "\"],\"types\":[\"" + type + "\"]"
|
||||||
+ (source ? ",\"_source\":true" : "")
|
+ (source ? ",\"_source\":true" : "")
|
||||||
+ (scriptedFields == null ? "" : ",\"script_fields\":" + scriptedFields)
|
+ (scriptedFields == null ? "" : ",\"script_fields\":" + scriptedFields)
|
||||||
+ (aggregations == null ? "" : ",\"aggs\":" + aggregations)
|
+ (aggregations == null ? "" : ",\"aggs\":" + aggregations)
|
||||||
+ (chunkingTimespan == null ? "" :
|
+ (chunkingTimespan == null ? "" :
|
||||||
",\"chunking_config\":{\"mode\":\"MANUAL\",\"time_span\":\"" + chunkingTimespan + "\"}")
|
",\"chunking_config\":{\"mode\":\"MANUAL\",\"time_span\":\"" + chunkingTimespan + "\"}")
|
||||||
+ "}";
|
+ "}");
|
||||||
return client().performRequest("put", MachineLearning.BASE_PATH + "datafeeds/" + datafeedId, Collections.emptyMap(),
|
RequestOptions.Builder options = request.getOptions().toBuilder();
|
||||||
new StringEntity(datafeedConfig, ContentType.APPLICATION_JSON),
|
options.addHeader("Authorization", authHeader);
|
||||||
new BasicHeader("Authorization", authHeader));
|
request.setOptions(options);
|
||||||
|
return client().performRequest(request);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void bulkIndex(String bulk) throws IOException {
|
||||||
|
Request bulkRequest = new Request("POST", "/_bulk");
|
||||||
|
bulkRequest.setJsonEntity(bulk);
|
||||||
|
bulkRequest.addParameter("refresh", "true");
|
||||||
|
bulkRequest.addParameter("pretty", null);
|
||||||
|
String bulkResponse = EntityUtils.toString(client().performRequest(bulkRequest).getEntity());
|
||||||
|
assertThat(bulkResponse, not(containsString("\"errors\": false")));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,8 +5,7 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.xpack.ml.integration;
|
package org.elasticsearch.xpack.ml.integration;
|
||||||
|
|
||||||
import org.apache.http.entity.ContentType;
|
import org.apache.http.util.EntityUtils;
|
||||||
import org.apache.http.entity.StringEntity;
|
|
||||||
import org.elasticsearch.client.Request;
|
import org.elasticsearch.client.Request;
|
||||||
import org.elasticsearch.client.Response;
|
import org.elasticsearch.client.Response;
|
||||||
import org.elasticsearch.client.ResponseException;
|
import org.elasticsearch.client.ResponseException;
|
||||||
|
@ -23,15 +22,10 @@ import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndexFiel
|
||||||
import org.elasticsearch.xpack.test.rest.XPackRestTestHelper;
|
import org.elasticsearch.xpack.test.rest.XPackRestTestHelper;
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
|
|
||||||
import java.io.BufferedReader;
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.InputStreamReader;
|
|
||||||
import java.nio.charset.StandardCharsets;
|
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.Locale;
|
import java.util.Locale;
|
||||||
import java.util.concurrent.atomic.AtomicInteger;
|
import java.util.concurrent.atomic.AtomicInteger;
|
||||||
import java.util.concurrent.atomic.AtomicReference;
|
import java.util.concurrent.atomic.AtomicReference;
|
||||||
import java.util.stream.Collectors;
|
|
||||||
|
|
||||||
import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue;
|
import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue;
|
||||||
import static org.hamcrest.Matchers.containsString;
|
import static org.hamcrest.Matchers.containsString;
|
||||||
|
@ -55,15 +49,13 @@ public class MlJobIT extends ESRestTestCase {
|
||||||
|
|
||||||
public void testPutJob_GivenFarequoteConfig() throws Exception {
|
public void testPutJob_GivenFarequoteConfig() throws Exception {
|
||||||
Response response = createFarequoteJob("given-farequote-config-job");
|
Response response = createFarequoteJob("given-farequote-config-job");
|
||||||
|
String responseAsString = EntityUtils.toString(response.getEntity());
|
||||||
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
|
|
||||||
String responseAsString = responseEntityToString(response);
|
|
||||||
assertThat(responseAsString, containsString("\"job_id\":\"given-farequote-config-job\""));
|
assertThat(responseAsString, containsString("\"job_id\":\"given-farequote-config-job\""));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testGetJob_GivenNoSuchJob() throws Exception {
|
public void testGetJob_GivenNoSuchJob() throws Exception {
|
||||||
ResponseException e = expectThrows(ResponseException.class,
|
ResponseException e = expectThrows(ResponseException.class, () ->
|
||||||
() -> client().performRequest("get", MachineLearning.BASE_PATH + "anomaly_detectors/non-existing-job/_stats"));
|
client().performRequest(new Request("GET", MachineLearning.BASE_PATH + "anomaly_detectors/non-existing-job/_stats")));
|
||||||
|
|
||||||
assertThat(e.getResponse().getStatusLine().getStatusCode(), equalTo(404));
|
assertThat(e.getResponse().getStatusLine().getStatusCode(), equalTo(404));
|
||||||
assertThat(e.getMessage(), containsString("No known job with id 'non-existing-job'"));
|
assertThat(e.getMessage(), containsString("No known job with id 'non-existing-job'"));
|
||||||
|
@ -72,11 +64,9 @@ public class MlJobIT extends ESRestTestCase {
|
||||||
public void testGetJob_GivenJobExists() throws Exception {
|
public void testGetJob_GivenJobExists() throws Exception {
|
||||||
createFarequoteJob("get-job_given-job-exists-job");
|
createFarequoteJob("get-job_given-job-exists-job");
|
||||||
|
|
||||||
Response response = client().performRequest("get",
|
Response response = client().performRequest(new Request("GET",
|
||||||
MachineLearning.BASE_PATH + "anomaly_detectors/get-job_given-job-exists-job/_stats");
|
MachineLearning.BASE_PATH + "anomaly_detectors/get-job_given-job-exists-job/_stats"));
|
||||||
|
String responseAsString = EntityUtils.toString(response.getEntity());
|
||||||
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
|
|
||||||
String responseAsString = responseEntityToString(response);
|
|
||||||
assertThat(responseAsString, containsString("\"count\":1"));
|
assertThat(responseAsString, containsString("\"count\":1"));
|
||||||
assertThat(responseAsString, containsString("\"job_id\":\"get-job_given-job-exists-job\""));
|
assertThat(responseAsString, containsString("\"job_id\":\"get-job_given-job-exists-job\""));
|
||||||
}
|
}
|
||||||
|
@ -86,20 +76,16 @@ public class MlJobIT extends ESRestTestCase {
|
||||||
createFarequoteJob(jobId);
|
createFarequoteJob(jobId);
|
||||||
|
|
||||||
// Explicit _all
|
// Explicit _all
|
||||||
Response response = client().performRequest("get", MachineLearning.BASE_PATH + "anomaly_detectors/_all");
|
String explictAll = EntityUtils.toString(
|
||||||
|
client().performRequest(new Request("GET", MachineLearning.BASE_PATH + "anomaly_detectors/_all")).getEntity());
|
||||||
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
|
assertThat(explictAll, containsString("\"count\":1"));
|
||||||
String responseAsString = responseEntityToString(response);
|
assertThat(explictAll, containsString("\"job_id\":\"" + jobId + "\""));
|
||||||
assertThat(responseAsString, containsString("\"count\":1"));
|
|
||||||
assertThat(responseAsString, containsString("\"job_id\":\"" + jobId + "\""));
|
|
||||||
|
|
||||||
// Implicit _all
|
// Implicit _all
|
||||||
response = client().performRequest("get", MachineLearning.BASE_PATH + "anomaly_detectors");
|
String implicitAll = EntityUtils.toString(
|
||||||
|
client().performRequest(new Request("GET", MachineLearning.BASE_PATH + "anomaly_detectors")).getEntity());
|
||||||
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
|
assertThat(implicitAll, containsString("\"count\":1"));
|
||||||
responseAsString = responseEntityToString(response);
|
assertThat(implicitAll, containsString("\"job_id\":\"" + jobId + "\""));
|
||||||
assertThat(responseAsString, containsString("\"count\":1"));
|
|
||||||
assertThat(responseAsString, containsString("\"job_id\":\"" + jobId + "\""));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testGetJobs_GivenMultipleJobs() throws Exception {
|
public void testGetJobs_GivenMultipleJobs() throws Exception {
|
||||||
|
@ -108,36 +94,37 @@ public class MlJobIT extends ESRestTestCase {
|
||||||
createFarequoteJob("given-multiple-jobs-job-3");
|
createFarequoteJob("given-multiple-jobs-job-3");
|
||||||
|
|
||||||
// Explicit _all
|
// Explicit _all
|
||||||
Response response = client().performRequest("get", MachineLearning.BASE_PATH + "anomaly_detectors/_all");
|
String explicitAll = EntityUtils.toString(
|
||||||
|
client().performRequest(new Request("GET", MachineLearning.BASE_PATH + "anomaly_detectors/_all")).getEntity());
|
||||||
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
|
assertThat(explicitAll, containsString("\"count\":3"));
|
||||||
String responseAsString = responseEntityToString(response);
|
assertThat(explicitAll, containsString("\"job_id\":\"given-multiple-jobs-job-1\""));
|
||||||
assertThat(responseAsString, containsString("\"count\":3"));
|
assertThat(explicitAll, containsString("\"job_id\":\"given-multiple-jobs-job-2\""));
|
||||||
assertThat(responseAsString, containsString("\"job_id\":\"given-multiple-jobs-job-1\""));
|
assertThat(explicitAll, containsString("\"job_id\":\"given-multiple-jobs-job-3\""));
|
||||||
assertThat(responseAsString, containsString("\"job_id\":\"given-multiple-jobs-job-2\""));
|
|
||||||
assertThat(responseAsString, containsString("\"job_id\":\"given-multiple-jobs-job-3\""));
|
|
||||||
|
|
||||||
// Implicit _all
|
// Implicit _all
|
||||||
response = client().performRequest("get", MachineLearning.BASE_PATH + "anomaly_detectors");
|
String implicitAll = EntityUtils.toString(
|
||||||
|
client().performRequest(new Request("GET", MachineLearning.BASE_PATH + "anomaly_detectors")).getEntity());
|
||||||
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
|
assertThat(implicitAll, containsString("\"count\":3"));
|
||||||
responseAsString = responseEntityToString(response);
|
assertThat(implicitAll, containsString("\"job_id\":\"given-multiple-jobs-job-1\""));
|
||||||
assertThat(responseAsString, containsString("\"count\":3"));
|
assertThat(implicitAll, containsString("\"job_id\":\"given-multiple-jobs-job-2\""));
|
||||||
assertThat(responseAsString, containsString("\"job_id\":\"given-multiple-jobs-job-1\""));
|
assertThat(implicitAll, containsString("\"job_id\":\"given-multiple-jobs-job-3\""));
|
||||||
assertThat(responseAsString, containsString("\"job_id\":\"given-multiple-jobs-job-2\""));
|
|
||||||
assertThat(responseAsString, containsString("\"job_id\":\"given-multiple-jobs-job-3\""));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private Response createFarequoteJob(String jobId) throws IOException {
|
private Response createFarequoteJob(String jobId) throws IOException {
|
||||||
String job = "{\n" + " \"description\":\"Analysis of response time by airline\",\n"
|
Request request = new Request("PUT", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId);
|
||||||
+ " \"analysis_config\" : {\n" + " \"bucket_span\": \"3600s\",\n"
|
request.setJsonEntity(
|
||||||
|
"{\n"
|
||||||
|
+ " \"description\":\"Analysis of response time by airline\",\n"
|
||||||
|
+ " \"analysis_config\" : {\n"
|
||||||
|
+ " \"bucket_span\": \"3600s\",\n"
|
||||||
+ " \"detectors\" :[{\"function\":\"metric\",\"field_name\":\"responsetime\",\"by_field_name\":\"airline\"}]\n"
|
+ " \"detectors\" :[{\"function\":\"metric\",\"field_name\":\"responsetime\",\"by_field_name\":\"airline\"}]\n"
|
||||||
+ " },\n" + " \"data_description\" : {\n" + " \"field_delimiter\":\",\",\n" + " " +
|
+ " },\n" + " \"data_description\" : {\n"
|
||||||
"\"time_field\":\"time\",\n"
|
+ " \"field_delimiter\":\",\",\n"
|
||||||
+ " \"time_format\":\"yyyy-MM-dd HH:mm:ssX\"\n" + " }\n" + "}";
|
+ " \"time_field\":\"time\",\n"
|
||||||
|
+ " \"time_format\":\"yyyy-MM-dd HH:mm:ssX\"\n"
|
||||||
return client().performRequest("put", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId,
|
+ " }\n"
|
||||||
Collections.emptyMap(), new StringEntity(job, ContentType.APPLICATION_JSON));
|
+ "}");
|
||||||
|
return client().performRequest(request);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testCantCreateJobWithSameID() throws Exception {
|
public void testCantCreateJobWithSameID() throws Exception {
|
||||||
|
@ -148,18 +135,14 @@ public class MlJobIT extends ESRestTestCase {
|
||||||
" \"data_description\": {},\n" +
|
" \"data_description\": {},\n" +
|
||||||
" \"results_index_name\" : \"%s\"}";
|
" \"results_index_name\" : \"%s\"}";
|
||||||
|
|
||||||
String jobConfig = String.format(Locale.ROOT, jobTemplate, "index-1");
|
|
||||||
|
|
||||||
String jobId = "cant-create-job-with-same-id-job";
|
String jobId = "cant-create-job-with-same-id-job";
|
||||||
Response response = client().performRequest("put", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId ,
|
Request createJob1 = new Request("PUT", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId);
|
||||||
Collections.emptyMap(),
|
createJob1.setJsonEntity(String.format(Locale.ROOT, jobTemplate, "index-1"));
|
||||||
new StringEntity(jobConfig, ContentType.APPLICATION_JSON));
|
client().performRequest(createJob1);
|
||||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
|
||||||
|
|
||||||
final String jobConfig2 = String.format(Locale.ROOT, jobTemplate, "index-2");
|
Request createJob2 = new Request("PUT", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId);
|
||||||
ResponseException e = expectThrows(ResponseException.class,
|
createJob2.setJsonEntity(String.format(Locale.ROOT, jobTemplate, "index-2"));
|
||||||
() ->client().performRequest("put", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId,
|
ResponseException e = expectThrows(ResponseException.class, () -> client().performRequest(createJob2));
|
||||||
Collections.emptyMap(), new StringEntity(jobConfig2, ContentType.APPLICATION_JSON)));
|
|
||||||
|
|
||||||
assertThat(e.getResponse().getStatusLine().getStatusCode(), equalTo(400));
|
assertThat(e.getResponse().getStatusLine().getStatusCode(), equalTo(400));
|
||||||
assertThat(e.getMessage(), containsString("The job cannot be created with the Id '" + jobId + "'. The Id is already used."));
|
assertThat(e.getMessage(), containsString("The job cannot be created with the Id '" + jobId + "'. The Id is already used."));
|
||||||
|
@ -175,94 +158,78 @@ public class MlJobIT extends ESRestTestCase {
|
||||||
|
|
||||||
String jobId1 = "create-jobs-with-index-name-option-job-1";
|
String jobId1 = "create-jobs-with-index-name-option-job-1";
|
||||||
String indexName = "non-default-index";
|
String indexName = "non-default-index";
|
||||||
String jobConfig = String.format(Locale.ROOT, jobTemplate, indexName);
|
Request createJob1 = new Request("PUT", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId1);
|
||||||
|
createJob1.setJsonEntity(String.format(Locale.ROOT, jobTemplate, indexName));
|
||||||
Response response = client().performRequest("put", MachineLearning.BASE_PATH
|
client().performRequest(createJob1);
|
||||||
+ "anomaly_detectors/" + jobId1, Collections.emptyMap(), new StringEntity(jobConfig, ContentType.APPLICATION_JSON));
|
|
||||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
|
||||||
|
|
||||||
String jobId2 = "create-jobs-with-index-name-option-job-2";
|
String jobId2 = "create-jobs-with-index-name-option-job-2";
|
||||||
response = client().performRequest("put", MachineLearning.BASE_PATH
|
Request createJob2 = new Request("PUT", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId2);
|
||||||
+ "anomaly_detectors/" + jobId2, Collections.emptyMap(), new StringEntity(jobConfig, ContentType.APPLICATION_JSON));
|
createJob2.setEntity(createJob1.getEntity());
|
||||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
client().performRequest(createJob2);
|
||||||
|
|
||||||
// With security enabled GET _aliases throws an index_not_found_exception
|
// With security enabled GET _aliases throws an index_not_found_exception
|
||||||
// if no aliases have been created. In multi-node tests the alias may not
|
// if no aliases have been created. In multi-node tests the alias may not
|
||||||
// appear immediately so wait here.
|
// appear immediately so wait here.
|
||||||
assertBusy(() -> {
|
assertBusy(() -> {
|
||||||
try {
|
try {
|
||||||
Response aliasesResponse = client().performRequest("get", "_aliases");
|
String aliasesResponse = EntityUtils.toString(client().performRequest(new Request("GET", "/_aliases")).getEntity());
|
||||||
assertEquals(200, aliasesResponse.getStatusLine().getStatusCode());
|
assertThat(aliasesResponse,
|
||||||
String responseAsString = responseEntityToString(aliasesResponse);
|
|
||||||
assertThat(responseAsString,
|
|
||||||
containsString("\"" + AnomalyDetectorsIndex.jobResultsAliasedName("custom-" + indexName) + "\":{\"aliases\":{"));
|
containsString("\"" + AnomalyDetectorsIndex.jobResultsAliasedName("custom-" + indexName) + "\":{\"aliases\":{"));
|
||||||
assertThat(responseAsString, containsString("\"" + AnomalyDetectorsIndex.jobResultsAliasedName(jobId1)
|
assertThat(aliasesResponse, containsString("\"" + AnomalyDetectorsIndex.jobResultsAliasedName(jobId1)
|
||||||
+ "\":{\"filter\":{\"term\":{\"job_id\":{\"value\":\"" + jobId1 + "\",\"boost\":1.0}}}}"));
|
+ "\":{\"filter\":{\"term\":{\"job_id\":{\"value\":\"" + jobId1 + "\",\"boost\":1.0}}}}"));
|
||||||
assertThat(responseAsString, containsString("\"" + AnomalyDetectorsIndex.resultsWriteAlias(jobId1) + "\":{}"));
|
assertThat(aliasesResponse, containsString("\"" + AnomalyDetectorsIndex.resultsWriteAlias(jobId1) + "\":{}"));
|
||||||
assertThat(responseAsString, containsString("\"" + AnomalyDetectorsIndex.jobResultsAliasedName(jobId2)
|
assertThat(aliasesResponse, containsString("\"" + AnomalyDetectorsIndex.jobResultsAliasedName(jobId2)
|
||||||
+ "\":{\"filter\":{\"term\":{\"job_id\":{\"value\":\"" + jobId2 + "\",\"boost\":1.0}}}}"));
|
+ "\":{\"filter\":{\"term\":{\"job_id\":{\"value\":\"" + jobId2 + "\",\"boost\":1.0}}}}"));
|
||||||
assertThat(responseAsString, containsString("\"" + AnomalyDetectorsIndex.resultsWriteAlias(jobId2) + "\":{}"));
|
assertThat(aliasesResponse, containsString("\"" + AnomalyDetectorsIndex.resultsWriteAlias(jobId2) + "\":{}"));
|
||||||
} catch (ResponseException e) {
|
} catch (ResponseException e) {
|
||||||
throw new AssertionError(e);
|
throw new AssertionError(e);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
Response indicesResponse = client().performRequest("get", "_cat/indices");
|
String responseAsString = EntityUtils.toString(client().performRequest(new Request("GET", "/_cat/indices")).getEntity());
|
||||||
assertEquals(200, indicesResponse.getStatusLine().getStatusCode());
|
|
||||||
String responseAsString = responseEntityToString(indicesResponse);
|
|
||||||
assertThat(responseAsString,
|
assertThat(responseAsString,
|
||||||
containsString(AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "custom-" + indexName));
|
containsString(AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "custom-" + indexName));
|
||||||
assertThat(responseAsString, not(containsString(AnomalyDetectorsIndex.jobResultsAliasedName(jobId1))));
|
assertThat(responseAsString, not(containsString(AnomalyDetectorsIndex.jobResultsAliasedName(jobId1))));
|
||||||
assertThat(responseAsString, not(containsString(AnomalyDetectorsIndex.jobResultsAliasedName(jobId2))));
|
assertThat(responseAsString, not(containsString(AnomalyDetectorsIndex.jobResultsAliasedName(jobId2))));
|
||||||
|
|
||||||
String bucketResult = String.format(Locale.ROOT,
|
|
||||||
"{\"job_id\":\"%s\", \"timestamp\": \"%s\", \"result_type\":\"bucket\", \"bucket_span\": \"%s\"}",
|
|
||||||
jobId1, "1234", 1);
|
|
||||||
String id = String.format(Locale.ROOT, "%s_bucket_%s_%s", jobId1, "1234", 300);
|
String id = String.format(Locale.ROOT, "%s_bucket_%s_%s", jobId1, "1234", 300);
|
||||||
response = client().performRequest("put", AnomalyDetectorsIndex.jobResultsAliasedName(jobId1) + "/doc/" + id,
|
Request createResultRequest = new Request("PUT", AnomalyDetectorsIndex.jobResultsAliasedName(jobId1) + "/doc/" + id);
|
||||||
Collections.emptyMap(), new StringEntity(bucketResult, ContentType.APPLICATION_JSON));
|
createResultRequest.setJsonEntity(String.format(Locale.ROOT,
|
||||||
assertEquals(201, response.getStatusLine().getStatusCode());
|
|
||||||
|
|
||||||
bucketResult = String.format(Locale.ROOT,
|
|
||||||
"{\"job_id\":\"%s\", \"timestamp\": \"%s\", \"result_type\":\"bucket\", \"bucket_span\": \"%s\"}",
|
"{\"job_id\":\"%s\", \"timestamp\": \"%s\", \"result_type\":\"bucket\", \"bucket_span\": \"%s\"}",
|
||||||
jobId1, "1236", 1);
|
jobId1, "1234", 1));
|
||||||
|
client().performRequest(createResultRequest);
|
||||||
|
|
||||||
id = String.format(Locale.ROOT, "%s_bucket_%s_%s", jobId1, "1236", 300);
|
id = String.format(Locale.ROOT, "%s_bucket_%s_%s", jobId1, "1236", 300);
|
||||||
response = client().performRequest("put", AnomalyDetectorsIndex.jobResultsAliasedName(jobId1) + "/doc/" + id,
|
createResultRequest = new Request("PUT", AnomalyDetectorsIndex.jobResultsAliasedName(jobId1) + "/doc/" + id);
|
||||||
Collections.emptyMap(), new StringEntity(bucketResult, ContentType.APPLICATION_JSON));
|
createResultRequest.setJsonEntity(String.format(Locale.ROOT,
|
||||||
assertEquals(201, response.getStatusLine().getStatusCode());
|
"{\"job_id\":\"%s\", \"timestamp\": \"%s\", \"result_type\":\"bucket\", \"bucket_span\": \"%s\"}",
|
||||||
|
jobId1, "1236", 1));
|
||||||
|
client().performRequest(createResultRequest);
|
||||||
|
|
||||||
client().performRequest("post", "_refresh");
|
client().performRequest(new Request("POST", "/_refresh"));
|
||||||
|
|
||||||
response = client().performRequest("get", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId1 + "/results/buckets");
|
responseAsString = EntityUtils.toString(client().performRequest(
|
||||||
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
|
new Request("GET", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId1 + "/results/buckets")).getEntity());
|
||||||
responseAsString = responseEntityToString(response);
|
|
||||||
assertThat(responseAsString, containsString("\"count\":2"));
|
assertThat(responseAsString, containsString("\"count\":2"));
|
||||||
|
|
||||||
response = client().performRequest("get", AnomalyDetectorsIndex.jobResultsAliasedName(jobId1) + "/_search");
|
responseAsString = EntityUtils.toString(client().performRequest(
|
||||||
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
|
new Request("GET", AnomalyDetectorsIndex.jobResultsAliasedName(jobId1) + "/_search")).getEntity());
|
||||||
responseAsString = responseEntityToString(response);
|
|
||||||
assertThat(responseAsString, containsString("\"total\":2"));
|
assertThat(responseAsString, containsString("\"total\":2"));
|
||||||
|
|
||||||
response = client().performRequest("delete", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId1);
|
client().performRequest(new Request("DELETE", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId1));
|
||||||
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
|
|
||||||
|
|
||||||
// check that indices still exist, but are empty and aliases are gone
|
// check that indices still exist, but are empty and aliases are gone
|
||||||
response = client().performRequest("get", "_aliases");
|
responseAsString = EntityUtils.toString(client().performRequest(new Request("GET", "/_aliases")).getEntity());
|
||||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
|
||||||
responseAsString = responseEntityToString(response);
|
|
||||||
assertThat(responseAsString, not(containsString(AnomalyDetectorsIndex.jobResultsAliasedName(jobId1))));
|
assertThat(responseAsString, not(containsString(AnomalyDetectorsIndex.jobResultsAliasedName(jobId1))));
|
||||||
assertThat(responseAsString, containsString(AnomalyDetectorsIndex.jobResultsAliasedName(jobId2))); //job2 still exists
|
assertThat(responseAsString, containsString(AnomalyDetectorsIndex.jobResultsAliasedName(jobId2))); //job2 still exists
|
||||||
|
|
||||||
response = client().performRequest("get", "_cat/indices");
|
responseAsString = EntityUtils.toString(client().performRequest(new Request("GET", "/_cat/indices")).getEntity());
|
||||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
|
||||||
responseAsString = responseEntityToString(response);
|
|
||||||
assertThat(responseAsString, containsString(AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "custom-" + indexName));
|
assertThat(responseAsString, containsString(AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "custom-" + indexName));
|
||||||
|
|
||||||
client().performRequest("post", "_refresh");
|
client().performRequest(new Request("POST", "/_refresh"));
|
||||||
|
|
||||||
response = client().performRequest("get", AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "custom-" + indexName + "/_count");
|
responseAsString = EntityUtils.toString(client().performRequest(
|
||||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
new Request("GET", AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "custom-" + indexName + "/_count")).getEntity());
|
||||||
responseAsString = responseEntityToString(response);
|
|
||||||
assertThat(responseAsString, containsString("\"count\":0"));
|
assertThat(responseAsString, containsString("\"count\":0"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -278,32 +245,27 @@ public class MlJobIT extends ESRestTestCase {
|
||||||
String byFieldName1 = "responsetime";
|
String byFieldName1 = "responsetime";
|
||||||
String jobId2 = "create-job-in-shared-index-updates-mapping-job-2";
|
String jobId2 = "create-job-in-shared-index-updates-mapping-job-2";
|
||||||
String byFieldName2 = "cpu-usage";
|
String byFieldName2 = "cpu-usage";
|
||||||
String jobConfig = String.format(Locale.ROOT, jobTemplate, byFieldName1);
|
|
||||||
|
|
||||||
Response response = client().performRequest("put", MachineLearning.BASE_PATH
|
Request createJob1Request = new Request("PUT", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId1);
|
||||||
+ "anomaly_detectors/" + jobId1, Collections.emptyMap(), new StringEntity(jobConfig, ContentType.APPLICATION_JSON));
|
createJob1Request.setJsonEntity(String.format(Locale.ROOT, jobTemplate, byFieldName1));
|
||||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
client().performRequest(createJob1Request);
|
||||||
|
|
||||||
// Check the index mapping contains the first by_field_name
|
// Check the index mapping contains the first by_field_name
|
||||||
response = client().performRequest("get", AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX
|
Request getResultsMappingRequest = new Request("GET",
|
||||||
+ AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT + "/_mapping?pretty");
|
AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT + "/_mapping");
|
||||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
getResultsMappingRequest.addParameter("pretty", null);
|
||||||
String responseAsString = responseEntityToString(response);
|
String resultsMappingAfterJob1 = EntityUtils.toString(client().performRequest(getResultsMappingRequest).getEntity());
|
||||||
assertThat(responseAsString, containsString(byFieldName1));
|
assertThat(resultsMappingAfterJob1, containsString(byFieldName1));
|
||||||
assertThat(responseAsString, not(containsString(byFieldName2)));
|
assertThat(resultsMappingAfterJob1, not(containsString(byFieldName2)));
|
||||||
|
|
||||||
jobConfig = String.format(Locale.ROOT, jobTemplate, byFieldName2);
|
Request createJob2Request = new Request("PUT", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId2);
|
||||||
response = client().performRequest("put", MachineLearning.BASE_PATH
|
createJob2Request.setJsonEntity(String.format(Locale.ROOT, jobTemplate, byFieldName2));
|
||||||
+ "anomaly_detectors/" + jobId2, Collections.emptyMap(), new StringEntity(jobConfig, ContentType.APPLICATION_JSON));
|
client().performRequest(createJob2Request);
|
||||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
|
||||||
|
|
||||||
// Check the index mapping now contains both fields
|
// Check the index mapping now contains both fields
|
||||||
response = client().performRequest("get", AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX
|
String resultsMappingAfterJob2 = EntityUtils.toString(client().performRequest(getResultsMappingRequest).getEntity());
|
||||||
+ AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT + "/_mapping?pretty");
|
assertThat(resultsMappingAfterJob2, containsString(byFieldName1));
|
||||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
assertThat(resultsMappingAfterJob2, containsString(byFieldName2));
|
||||||
responseAsString = responseEntityToString(response);
|
|
||||||
assertThat(responseAsString, containsString(byFieldName1));
|
|
||||||
assertThat(responseAsString, containsString(byFieldName2));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testCreateJobInCustomSharedIndexUpdatesMapping() throws Exception {
|
public void testCreateJobInCustomSharedIndexUpdatesMapping() throws Exception {
|
||||||
|
@ -318,32 +280,27 @@ public class MlJobIT extends ESRestTestCase {
|
||||||
String byFieldName1 = "responsetime";
|
String byFieldName1 = "responsetime";
|
||||||
String jobId2 = "create-job-in-custom-shared-index-updates-mapping-job-2";
|
String jobId2 = "create-job-in-custom-shared-index-updates-mapping-job-2";
|
||||||
String byFieldName2 = "cpu-usage";
|
String byFieldName2 = "cpu-usage";
|
||||||
String jobConfig = String.format(Locale.ROOT, jobTemplate, byFieldName1);
|
|
||||||
|
|
||||||
Response response = client().performRequest("put", MachineLearning.BASE_PATH
|
Request createJob1Request = new Request("PUT", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId1);
|
||||||
+ "anomaly_detectors/" + jobId1, Collections.emptyMap(), new StringEntity(jobConfig, ContentType.APPLICATION_JSON));
|
createJob1Request.setJsonEntity(String.format(Locale.ROOT, jobTemplate, byFieldName1));
|
||||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
client().performRequest(createJob1Request);
|
||||||
|
|
||||||
// Check the index mapping contains the first by_field_name
|
// Check the index mapping contains the first by_field_name
|
||||||
response = client().performRequest("get",
|
Request getResultsMappingRequest = new Request("GET",
|
||||||
AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "custom-shared-index" + "/_mapping?pretty");
|
AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "custom-shared-index/_mapping");
|
||||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
getResultsMappingRequest.addParameter("pretty", null);
|
||||||
String responseAsString = responseEntityToString(response);
|
String resultsMappingAfterJob1 = EntityUtils.toString(client().performRequest(getResultsMappingRequest).getEntity());
|
||||||
assertThat(responseAsString, containsString(byFieldName1));
|
assertThat(resultsMappingAfterJob1, containsString(byFieldName1));
|
||||||
assertThat(responseAsString, not(containsString(byFieldName2)));
|
assertThat(resultsMappingAfterJob1, not(containsString(byFieldName2)));
|
||||||
|
|
||||||
jobConfig = String.format(Locale.ROOT, jobTemplate, byFieldName2);
|
Request createJob2Request = new Request("PUT", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId2);
|
||||||
response = client().performRequest("put", MachineLearning.BASE_PATH
|
createJob2Request.setJsonEntity(String.format(Locale.ROOT, jobTemplate, byFieldName2));
|
||||||
+ "anomaly_detectors/" + jobId2, Collections.emptyMap(), new StringEntity(jobConfig, ContentType.APPLICATION_JSON));
|
client().performRequest(createJob2Request);
|
||||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
|
||||||
|
|
||||||
// Check the index mapping now contains both fields
|
// Check the index mapping now contains both fields
|
||||||
response = client().performRequest("get",
|
String resultsMappingAfterJob2 = EntityUtils.toString(client().performRequest(getResultsMappingRequest).getEntity());
|
||||||
AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "custom-shared-index" + "/_mapping?pretty");
|
assertThat(resultsMappingAfterJob2, containsString(byFieldName1));
|
||||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
assertThat(resultsMappingAfterJob2, containsString(byFieldName2));
|
||||||
responseAsString = responseEntityToString(response);
|
|
||||||
assertThat(responseAsString, containsString(byFieldName1));
|
|
||||||
assertThat(responseAsString, containsString(byFieldName2));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testCreateJob_WithClashingFieldMappingsFails() throws Exception {
|
public void testCreateJob_WithClashingFieldMappingsFails() throws Exception {
|
||||||
|
@ -366,17 +323,14 @@ public class MlJobIT extends ESRestTestCase {
|
||||||
byFieldName1 = "response.time";
|
byFieldName1 = "response.time";
|
||||||
byFieldName2 = "response";
|
byFieldName2 = "response";
|
||||||
}
|
}
|
||||||
String jobConfig = String.format(Locale.ROOT, jobTemplate, byFieldName1);
|
|
||||||
|
|
||||||
Response response = client().performRequest("put", MachineLearning.BASE_PATH
|
Request createJob1Request = new Request("PUT", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId1);
|
||||||
+ "anomaly_detectors/" + jobId1, Collections.emptyMap(), new StringEntity(jobConfig, ContentType.APPLICATION_JSON));
|
createJob1Request.setJsonEntity(String.format(Locale.ROOT, jobTemplate, byFieldName1));
|
||||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
client().performRequest(createJob1Request);
|
||||||
|
|
||||||
final String failingJobConfig = String.format(Locale.ROOT, jobTemplate, byFieldName2);
|
|
||||||
ResponseException e = expectThrows(ResponseException.class,
|
|
||||||
() -> client().performRequest("put", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId2,
|
|
||||||
Collections.emptyMap(), new StringEntity(failingJobConfig, ContentType.APPLICATION_JSON)));
|
|
||||||
|
|
||||||
|
Request createJob2Request = new Request("PUT", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId2);
|
||||||
|
createJob2Request.setJsonEntity(String.format(Locale.ROOT, jobTemplate, byFieldName2));
|
||||||
|
ResponseException e = expectThrows(ResponseException.class, () -> client().performRequest(createJob2Request));
|
||||||
assertThat(e.getMessage(),
|
assertThat(e.getMessage(),
|
||||||
containsString("This job would cause a mapping clash with existing field [response] - " +
|
containsString("This job would cause a mapping clash with existing field [response] - " +
|
||||||
"avoid the clash by assigning a dedicated results index"));
|
"avoid the clash by assigning a dedicated results index"));
|
||||||
|
@ -387,35 +341,27 @@ public class MlJobIT extends ESRestTestCase {
|
||||||
String indexName = AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT;
|
String indexName = AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT;
|
||||||
createFarequoteJob(jobId);
|
createFarequoteJob(jobId);
|
||||||
|
|
||||||
Response response = client().performRequest("get", "_cat/indices");
|
String indicesBeforeDelete = EntityUtils.toString(client().performRequest(new Request("GET", "/_cat/indices")).getEntity());
|
||||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
assertThat(indicesBeforeDelete, containsString(indexName));
|
||||||
String responseAsString = responseEntityToString(response);
|
|
||||||
assertThat(responseAsString, containsString(indexName));
|
|
||||||
|
|
||||||
response = client().performRequest("delete", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId);
|
client().performRequest(new Request("DELETE", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId));
|
||||||
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
|
|
||||||
|
|
||||||
// check that the index still exists (it's shared by default)
|
// check that the index still exists (it's shared by default)
|
||||||
response = client().performRequest("get", "_cat/indices");
|
String indicesAfterDelete = EntityUtils.toString(client().performRequest(new Request("GET", "/_cat/indices")).getEntity());
|
||||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
assertThat(indicesAfterDelete, containsString(indexName));
|
||||||
responseAsString = responseEntityToString(response);
|
|
||||||
assertThat(responseAsString, containsString(indexName));
|
|
||||||
|
|
||||||
assertBusy(() -> {
|
assertBusy(() -> {
|
||||||
try {
|
try {
|
||||||
Response r = client().performRequest("get", indexName + "/_count");
|
String count = EntityUtils.toString(client().performRequest(new Request("GET", indexName + "/_count")).getEntity());
|
||||||
assertEquals(200, r.getStatusLine().getStatusCode());
|
assertThat(count, containsString("\"count\":0"));
|
||||||
String responseString = responseEntityToString(r);
|
|
||||||
assertThat(responseString, containsString("\"count\":0"));
|
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
fail(e.getMessage());
|
fail(e.getMessage());
|
||||||
}
|
}
|
||||||
|
|
||||||
});
|
});
|
||||||
|
|
||||||
// check that the job itself is gone
|
// check that the job itself is gone
|
||||||
expectThrows(ResponseException.class, () ->
|
expectThrows(ResponseException.class, () ->
|
||||||
client().performRequest("get", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats"));
|
client().performRequest(new Request("GET", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats")));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testDeleteJobAfterMissingIndex() throws Exception {
|
public void testDeleteJobAfterMissingIndex() throws Exception {
|
||||||
|
@ -424,28 +370,22 @@ public class MlJobIT extends ESRestTestCase {
|
||||||
String indexName = AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT;
|
String indexName = AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT;
|
||||||
createFarequoteJob(jobId);
|
createFarequoteJob(jobId);
|
||||||
|
|
||||||
Response response = client().performRequest("get", "_cat/indices");
|
String indicesBeforeDelete = EntityUtils.toString(client().performRequest(new Request("GET", "/_cat/indices")).getEntity());
|
||||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
assertThat(indicesBeforeDelete, containsString(indexName));
|
||||||
String responseAsString = responseEntityToString(response);
|
|
||||||
assertThat(responseAsString, containsString(indexName));
|
|
||||||
|
|
||||||
// Manually delete the index so that we can test that deletion proceeds
|
// Manually delete the index so that we can test that deletion proceeds
|
||||||
// normally anyway
|
// normally anyway
|
||||||
response = client().performRequest("delete", indexName);
|
client().performRequest(new Request("DELETE", indexName));
|
||||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
|
||||||
|
|
||||||
response = client().performRequest("delete", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId);
|
client().performRequest(new Request("DELETE", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId));
|
||||||
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
|
|
||||||
|
|
||||||
// check index was deleted
|
// check index was deleted
|
||||||
response = client().performRequest("get", "_cat/indices");
|
String indicesAfterDelete = EntityUtils.toString(client().performRequest(new Request("GET", "/_cat/indices")).getEntity());
|
||||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
assertThat(indicesAfterDelete, not(containsString(aliasName)));
|
||||||
responseAsString = responseEntityToString(response);
|
assertThat(indicesAfterDelete, not(containsString(indexName)));
|
||||||
assertThat(responseAsString, not(containsString(aliasName)));
|
|
||||||
assertThat(responseAsString, not(containsString(indexName)));
|
|
||||||
|
|
||||||
expectThrows(ResponseException.class, () ->
|
expectThrows(ResponseException.class, () ->
|
||||||
client().performRequest("get", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats"));
|
client().performRequest(new Request("GET", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats")));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testDeleteJobAfterMissingAliases() throws Exception {
|
public void testDeleteJobAfterMissingAliases() throws Exception {
|
||||||
|
@ -460,11 +400,9 @@ public class MlJobIT extends ESRestTestCase {
|
||||||
// appear immediately so wait here.
|
// appear immediately so wait here.
|
||||||
assertBusy(() -> {
|
assertBusy(() -> {
|
||||||
try {
|
try {
|
||||||
Response aliasesResponse = client().performRequest(new Request("get", "_cat/aliases"));
|
String aliases = EntityUtils.toString(client().performRequest(new Request("GET", "/_cat/aliases")).getEntity());
|
||||||
assertEquals(200, aliasesResponse.getStatusLine().getStatusCode());
|
assertThat(aliases, containsString(readAliasName));
|
||||||
String responseAsString = responseEntityToString(aliasesResponse);
|
assertThat(aliases, containsString(writeAliasName));
|
||||||
assertThat(responseAsString, containsString(readAliasName));
|
|
||||||
assertThat(responseAsString, containsString(writeAliasName));
|
|
||||||
} catch (ResponseException e) {
|
} catch (ResponseException e) {
|
||||||
throw new AssertionError(e);
|
throw new AssertionError(e);
|
||||||
}
|
}
|
||||||
|
@ -472,17 +410,14 @@ public class MlJobIT extends ESRestTestCase {
|
||||||
|
|
||||||
// Manually delete the aliases so that we can test that deletion proceeds
|
// Manually delete the aliases so that we can test that deletion proceeds
|
||||||
// normally anyway
|
// normally anyway
|
||||||
Response response = client().performRequest("delete", indexName + "/_alias/" + readAliasName);
|
client().performRequest(new Request("DELETE", indexName + "/_alias/" + readAliasName));
|
||||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
client().performRequest(new Request("DELETE", indexName + "/_alias/" + writeAliasName));
|
||||||
response = client().performRequest("delete", indexName + "/_alias/" + writeAliasName);
|
|
||||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
|
||||||
|
|
||||||
// check aliases were deleted
|
// check aliases were deleted
|
||||||
expectThrows(ResponseException.class, () -> client().performRequest("get", indexName + "/_alias/" + readAliasName));
|
expectThrows(ResponseException.class, () -> client().performRequest(new Request("GET", indexName + "/_alias/" + readAliasName)));
|
||||||
expectThrows(ResponseException.class, () -> client().performRequest("get", indexName + "/_alias/" + writeAliasName));
|
expectThrows(ResponseException.class, () -> client().performRequest(new Request("GET", indexName + "/_alias/" + writeAliasName)));
|
||||||
|
|
||||||
response = client().performRequest("delete", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId);
|
client().performRequest(new Request("DELETE", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId));
|
||||||
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testMultiIndexDelete() throws Exception {
|
public void testMultiIndexDelete() throws Exception {
|
||||||
|
@ -490,86 +425,63 @@ public class MlJobIT extends ESRestTestCase {
|
||||||
String indexName = AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT;
|
String indexName = AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT;
|
||||||
createFarequoteJob(jobId);
|
createFarequoteJob(jobId);
|
||||||
|
|
||||||
Response response = client().performRequest("put", indexName + "-001");
|
client().performRequest(new Request("PUT", indexName + "-001"));
|
||||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
client().performRequest(new Request("PUT", indexName + "-002"));
|
||||||
|
|
||||||
response = client().performRequest("put", indexName + "-002");
|
String indicesBeforeDelete = EntityUtils.toString(client().performRequest(new Request("GET", "/_cat/indices")).getEntity());
|
||||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
assertThat(indicesBeforeDelete, containsString(indexName));
|
||||||
|
assertThat(indicesBeforeDelete, containsString(indexName + "-001"));
|
||||||
response = client().performRequest("get", "_cat/indices");
|
assertThat(indicesBeforeDelete, containsString(indexName + "-002"));
|
||||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
|
||||||
String responseAsString = responseEntityToString(response);
|
|
||||||
assertThat(responseAsString, containsString(indexName));
|
|
||||||
assertThat(responseAsString, containsString(indexName + "-001"));
|
|
||||||
assertThat(responseAsString, containsString(indexName + "-002"));
|
|
||||||
|
|
||||||
// Add some documents to each index to make sure the DBQ clears them out
|
// Add some documents to each index to make sure the DBQ clears them out
|
||||||
String recordResult =
|
Request createDoc0 = new Request("PUT", indexName + "/doc/" + 123);
|
||||||
String.format(Locale.ROOT,
|
createDoc0.setJsonEntity(String.format(Locale.ROOT,
|
||||||
"{\"job_id\":\"%s\", \"timestamp\": \"%s\", \"bucket_span\":%d, \"result_type\":\"record\"}",
|
"{\"job_id\":\"%s\", \"timestamp\": \"%s\", \"bucket_span\":%d, \"result_type\":\"record\"}",
|
||||||
jobId, 123, 1);
|
jobId, 123, 1));
|
||||||
client().performRequest("put", indexName + "/doc/" + 123,
|
client().performRequest(createDoc0);
|
||||||
Collections.singletonMap("refresh", "true"), new StringEntity(recordResult, ContentType.APPLICATION_JSON));
|
Request createDoc1 = new Request("PUT", indexName + "-001/doc/" + 123);
|
||||||
client().performRequest("put", indexName + "-001/doc/" + 123,
|
createDoc1.setEntity(createDoc0.getEntity());
|
||||||
Collections.singletonMap("refresh", "true"), new StringEntity(recordResult, ContentType.APPLICATION_JSON));
|
client().performRequest(createDoc1);
|
||||||
client().performRequest("put", indexName + "-002/doc/" + 123,
|
Request createDoc2 = new Request("PUT", indexName + "-002/doc/" + 123);
|
||||||
Collections.singletonMap("refresh", "true"), new StringEntity(recordResult, ContentType.APPLICATION_JSON));
|
createDoc2.setEntity(createDoc0.getEntity());
|
||||||
|
client().performRequest(createDoc2);
|
||||||
|
|
||||||
// Also index a few through the alias for the first job
|
// Also index a few through the alias for the first job
|
||||||
client().performRequest("put", indexName + "/doc/" + 456,
|
Request createDoc3 = new Request("PUT", indexName + "/doc/" + 456);
|
||||||
Collections.singletonMap("refresh", "true"), new StringEntity(recordResult, ContentType.APPLICATION_JSON));
|
createDoc3.setEntity(createDoc0.getEntity());
|
||||||
|
client().performRequest(createDoc3);
|
||||||
|
|
||||||
|
client().performRequest(new Request("POST", "/_refresh"));
|
||||||
client().performRequest("post", "_refresh");
|
|
||||||
|
|
||||||
// check for the documents
|
// check for the documents
|
||||||
response = client().performRequest("get", indexName+ "/_count");
|
assertThat(EntityUtils.toString(client().performRequest(new Request("GET", indexName+ "/_count")).getEntity()),
|
||||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
containsString("\"count\":2"));
|
||||||
responseAsString = responseEntityToString(response);
|
assertThat(EntityUtils.toString(client().performRequest(new Request("GET", indexName+ "-001/_count")).getEntity()),
|
||||||
assertThat(responseAsString, containsString("\"count\":2"));
|
containsString("\"count\":1"));
|
||||||
|
assertThat(EntityUtils.toString(client().performRequest(new Request("GET", indexName+ "-002/_count")).getEntity()),
|
||||||
response = client().performRequest("get", indexName + "-001/_count");
|
containsString("\"count\":1"));
|
||||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
|
||||||
responseAsString = responseEntityToString(response);
|
|
||||||
assertThat(responseAsString, containsString("\"count\":1"));
|
|
||||||
|
|
||||||
response = client().performRequest("get", indexName + "-002/_count");
|
|
||||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
|
||||||
responseAsString = responseEntityToString(response);
|
|
||||||
assertThat(responseAsString, containsString("\"count\":1"));
|
|
||||||
|
|
||||||
// Delete
|
// Delete
|
||||||
response = client().performRequest("delete", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId);
|
client().performRequest(new Request("DELETE", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId));
|
||||||
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
|
|
||||||
|
|
||||||
client().performRequest("post", "_refresh");
|
client().performRequest(new Request("POST", "/_refresh"));
|
||||||
|
|
||||||
// check that the indices still exist but are empty
|
// check that the indices still exist but are empty
|
||||||
response = client().performRequest("get", "_cat/indices");
|
String indicesAfterDelete = EntityUtils.toString(client().performRequest(new Request("GET", "/_cat/indices")).getEntity());
|
||||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
assertThat(indicesAfterDelete, containsString(indexName));
|
||||||
responseAsString = responseEntityToString(response);
|
assertThat(indicesAfterDelete, containsString(indexName + "-001"));
|
||||||
assertThat(responseAsString, containsString(indexName));
|
assertThat(indicesAfterDelete, containsString(indexName + "-002"));
|
||||||
assertThat(responseAsString, containsString(indexName + "-001"));
|
|
||||||
assertThat(responseAsString, containsString(indexName + "-002"));
|
|
||||||
|
|
||||||
response = client().performRequest("get", indexName + "/_count");
|
assertThat(EntityUtils.toString(client().performRequest(new Request("GET", indexName+ "/_count")).getEntity()),
|
||||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
containsString("\"count\":0"));
|
||||||
responseAsString = responseEntityToString(response);
|
assertThat(EntityUtils.toString(client().performRequest(new Request("GET", indexName+ "-001/_count")).getEntity()),
|
||||||
assertThat(responseAsString, containsString("\"count\":0"));
|
containsString("\"count\":0"));
|
||||||
|
assertThat(EntityUtils.toString(client().performRequest(new Request("GET", indexName+ "-002/_count")).getEntity()),
|
||||||
response = client().performRequest("get", indexName + "-001/_count");
|
containsString("\"count\":0"));
|
||||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
|
||||||
responseAsString = responseEntityToString(response);
|
|
||||||
assertThat(responseAsString, containsString("\"count\":0"));
|
|
||||||
|
|
||||||
response = client().performRequest("get", indexName + "-002/_count");
|
|
||||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
|
||||||
responseAsString = responseEntityToString(response);
|
|
||||||
assertThat(responseAsString, containsString("\"count\":0"));
|
|
||||||
|
|
||||||
|
|
||||||
expectThrows(ResponseException.class, () ->
|
expectThrows(ResponseException.class, () ->
|
||||||
client().performRequest("get", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats"));
|
client().performRequest(new Request("GET", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats")));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testDelete_multipleRequest() throws Exception {
|
public void testDelete_multipleRequest() throws Exception {
|
||||||
|
@ -590,7 +502,7 @@ public class MlJobIT extends ESRestTestCase {
|
||||||
if (forceDelete) {
|
if (forceDelete) {
|
||||||
url += "?force=true";
|
url += "?force=true";
|
||||||
}
|
}
|
||||||
Response response = client().performRequest("delete", url);
|
Response response = client().performRequest(new Request("DELETE", url));
|
||||||
responses.put(Thread.currentThread().getId(), response);
|
responses.put(Thread.currentThread().getId(), response);
|
||||||
} catch (ResponseException re) {
|
} catch (ResponseException re) {
|
||||||
responseExceptions.put(Thread.currentThread().getId(), re);
|
responseExceptions.put(Thread.currentThread().getId(), re);
|
||||||
|
@ -640,11 +552,12 @@ public class MlJobIT extends ESRestTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
for (Response response : responses.values()) {
|
for (Response response : responses.values()) {
|
||||||
assertEquals(responseEntityToString(response), 200, response.getStatusLine().getStatusCode());
|
assertEquals(EntityUtils.toString(response.getEntity()), 200, response.getStatusLine().getStatusCode());
|
||||||
}
|
}
|
||||||
|
|
||||||
assertNotNull(recreationResponse.get());
|
assertNotNull(recreationResponse.get());
|
||||||
assertEquals(responseEntityToString(recreationResponse.get()), 200, recreationResponse.get().getStatusLine().getStatusCode());
|
assertEquals(EntityUtils.toString(recreationResponse.get().getEntity()),
|
||||||
|
200, recreationResponse.get().getStatusLine().getStatusCode());
|
||||||
|
|
||||||
if (recreationException.get() != null) {
|
if (recreationException.get() != null) {
|
||||||
assertNull(recreationException.get().getMessage(), recreationException.get());
|
assertNull(recreationException.get().getMessage(), recreationException.get());
|
||||||
|
@ -656,7 +569,7 @@ public class MlJobIT extends ESRestTestCase {
|
||||||
// but in the case that it does not the job that is recreated may get deleted.
|
// but in the case that it does not the job that is recreated may get deleted.
|
||||||
// It is not a error if the job does not exist but the following assertions
|
// It is not a error if the job does not exist but the following assertions
|
||||||
// will fail in that case.
|
// will fail in that case.
|
||||||
client().performRequest("get", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId);
|
client().performRequest(new Request("GET", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId));
|
||||||
|
|
||||||
// Check that the job aliases exist. These are the last thing to be deleted when a job is deleted, so
|
// Check that the job aliases exist. These are the last thing to be deleted when a job is deleted, so
|
||||||
// if there's been a race between deletion and recreation these are what will be missing.
|
// if there's been a race between deletion and recreation these are what will be missing.
|
||||||
|
@ -682,15 +595,8 @@ public class MlJobIT extends ESRestTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
private String getAliases() throws IOException {
|
private String getAliases() throws IOException {
|
||||||
Response response = client().performRequest("get", "_aliases");
|
Response response = client().performRequest(new Request("GET", "/_aliases"));
|
||||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
return EntityUtils.toString(response.getEntity());
|
||||||
return responseEntityToString(response);
|
|
||||||
}
|
|
||||||
|
|
||||||
private static String responseEntityToString(Response response) throws IOException {
|
|
||||||
try (BufferedReader reader = new BufferedReader(new InputStreamReader(response.getEntity().getContent(), StandardCharsets.UTF_8))) {
|
|
||||||
return reader.lines().collect(Collectors.joining("\n"));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@After
|
@After
|
||||||
|
|
|
@ -5,9 +5,8 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.xpack.ml.transforms;
|
package org.elasticsearch.xpack.ml.transforms;
|
||||||
|
|
||||||
import org.apache.http.entity.ContentType;
|
|
||||||
import org.apache.http.entity.StringEntity;
|
|
||||||
import org.apache.http.util.EntityUtils;
|
import org.apache.http.util.EntityUtils;
|
||||||
|
import org.elasticsearch.client.Request;
|
||||||
import org.elasticsearch.client.Response;
|
import org.elasticsearch.client.Response;
|
||||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||||
import org.elasticsearch.common.Strings;
|
import org.elasticsearch.common.Strings;
|
||||||
|
@ -18,7 +17,6 @@ import org.elasticsearch.xpack.ml.utils.DomainSplitFunction;
|
||||||
import org.joda.time.DateTime;
|
import org.joda.time.DateTime;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.concurrent.TimeUnit;
|
import java.util.concurrent.TimeUnit;
|
||||||
|
@ -185,9 +183,10 @@ public class PainlessDomainSplitIT extends ESRestTestCase {
|
||||||
.put(IndexMetaData.INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), 0);
|
.put(IndexMetaData.INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), 0);
|
||||||
|
|
||||||
createIndex("painless", settings.build());
|
createIndex("painless", settings.build());
|
||||||
client().performRequest("PUT", "painless/test/1", Collections.emptyMap(),
|
Request createDoc = new Request("PUT", "/painless/test/1");
|
||||||
new StringEntity("{\"test\": \"test\"}", ContentType.APPLICATION_JSON));
|
createDoc.setJsonEntity("{\"test\": \"test\"}");
|
||||||
client().performRequest("POST", "painless/_refresh");
|
createDoc.addParameter("refresh", "true");
|
||||||
|
client().performRequest(createDoc);
|
||||||
|
|
||||||
Pattern pattern = Pattern.compile("domain_split\":\\[(.*?),(.*?)\\]");
|
Pattern pattern = Pattern.compile("domain_split\":\\[(.*?),(.*?)\\]");
|
||||||
|
|
||||||
|
@ -198,7 +197,9 @@ public class PainlessDomainSplitIT extends ESRestTestCase {
|
||||||
String mapAsJson = Strings.toString(jsonBuilder().map(params));
|
String mapAsJson = Strings.toString(jsonBuilder().map(params));
|
||||||
logger.info("params={}", mapAsJson);
|
logger.info("params={}", mapAsJson);
|
||||||
|
|
||||||
StringEntity body = new StringEntity("{\n" +
|
Request searchRequest = new Request("GET", "/painless/test/_search");
|
||||||
|
searchRequest.setJsonEntity(
|
||||||
|
"{\n" +
|
||||||
" \"query\" : {\n" +
|
" \"query\" : {\n" +
|
||||||
" \"match_all\": {}\n" +
|
" \"match_all\": {}\n" +
|
||||||
" },\n" +
|
" },\n" +
|
||||||
|
@ -212,10 +213,8 @@ public class PainlessDomainSplitIT extends ESRestTestCase {
|
||||||
" }\n" +
|
" }\n" +
|
||||||
" }\n" +
|
" }\n" +
|
||||||
" }\n" +
|
" }\n" +
|
||||||
"}", ContentType.APPLICATION_JSON);
|
"}");
|
||||||
|
String responseBody = EntityUtils.toString(client().performRequest(searchRequest).getEntity());
|
||||||
Response response = client().performRequest("GET", "painless/test/_search", Collections.emptyMap(), body);
|
|
||||||
String responseBody = EntityUtils.toString(response.getEntity());
|
|
||||||
Matcher m = pattern.matcher(responseBody);
|
Matcher m = pattern.matcher(responseBody);
|
||||||
|
|
||||||
String actualSubDomain = "";
|
String actualSubDomain = "";
|
||||||
|
@ -242,9 +241,10 @@ public class PainlessDomainSplitIT extends ESRestTestCase {
|
||||||
|
|
||||||
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32966")
|
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32966")
|
||||||
public void testHRDSplit() throws Exception {
|
public void testHRDSplit() throws Exception {
|
||||||
|
|
||||||
// Create job
|
// Create job
|
||||||
String job = "{\n" +
|
Request createJobRequest = new Request("PUT", MachineLearning.BASE_PATH + "anomaly_detectors/hrd-split-job");
|
||||||
|
createJobRequest.setJsonEntity(
|
||||||
|
"{\n" +
|
||||||
" \"description\":\"Domain splitting\",\n" +
|
" \"description\":\"Domain splitting\",\n" +
|
||||||
" \"analysis_config\" : {\n" +
|
" \"analysis_config\" : {\n" +
|
||||||
" \"bucket_span\":\"3600s\",\n" +
|
" \"bucket_span\":\"3600s\",\n" +
|
||||||
|
@ -255,11 +255,9 @@ public class PainlessDomainSplitIT extends ESRestTestCase {
|
||||||
" \"time_field\":\"time\"\n" +
|
" \"time_field\":\"time\"\n" +
|
||||||
" \n" +
|
" \n" +
|
||||||
" }\n" +
|
" }\n" +
|
||||||
" }";
|
"}");
|
||||||
|
client().performRequest(createJobRequest);
|
||||||
client().performRequest("PUT", MachineLearning.BASE_PATH + "anomaly_detectors/hrd-split-job", Collections.emptyMap(),
|
client().performRequest(new Request("POST", MachineLearning.BASE_PATH + "anomaly_detectors/hrd-split-job/_open"));
|
||||||
new StringEntity(job, ContentType.APPLICATION_JSON));
|
|
||||||
client().performRequest("POST", MachineLearning.BASE_PATH + "anomaly_detectors/hrd-split-job/_open");
|
|
||||||
|
|
||||||
// Create index to hold data
|
// Create index to hold data
|
||||||
Settings.Builder settings = Settings.builder()
|
Settings.Builder settings = Settings.builder()
|
||||||
|
@ -284,24 +282,24 @@ public class PainlessDomainSplitIT extends ESRestTestCase {
|
||||||
if (i == 64) {
|
if (i == 64) {
|
||||||
// Anomaly has 100 docs, but we don't care about the value
|
// Anomaly has 100 docs, but we don't care about the value
|
||||||
for (int j = 0; j < 100; j++) {
|
for (int j = 0; j < 100; j++) {
|
||||||
client().performRequest("PUT", "painless/test/" + time.toDateTimeISO() + "_" + j,
|
Request createDocRequest = new Request("PUT", "/painless/test/" + time.toDateTimeISO() + "_" + j);
|
||||||
Collections.emptyMap(),
|
createDocRequest.setJsonEntity("{\"domain\": \"" + "bar.bar.com\", \"time\": \"" + time.toDateTimeISO() + "\"}");
|
||||||
new StringEntity("{\"domain\": \"" + "bar.bar.com\", \"time\": \"" + time.toDateTimeISO()
|
client().performRequest(createDocRequest);
|
||||||
+ "\"}", ContentType.APPLICATION_JSON));
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// Non-anomalous values will be what's seen when the anomaly is reported
|
// Non-anomalous values will be what's seen when the anomaly is reported
|
||||||
client().performRequest("PUT", "painless/test/" + time.toDateTimeISO(),
|
Request createDocRequest = new Request("PUT", "/painless/test/" + time.toDateTimeISO());
|
||||||
Collections.emptyMap(),
|
createDocRequest.setJsonEntity("{\"domain\": \"" + test.hostName + "\", \"time\": \"" + time.toDateTimeISO() + "\"}");
|
||||||
new StringEntity("{\"domain\": \"" + test.hostName + "\", \"time\": \"" + time.toDateTimeISO()
|
client().performRequest(createDocRequest);
|
||||||
+ "\"}", ContentType.APPLICATION_JSON));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
client().performRequest("POST", "painless/_refresh");
|
client().performRequest(new Request("POST", "/painless/_refresh"));
|
||||||
|
|
||||||
// Create and start datafeed
|
// Create and start datafeed
|
||||||
String body = "{\n" +
|
Request createFeedRequest = new Request("PUT", MachineLearning.BASE_PATH + "datafeeds/hrd-split-datafeed");
|
||||||
|
createFeedRequest.setJsonEntity(
|
||||||
|
"{\n" +
|
||||||
" \"job_id\":\"hrd-split-job\",\n" +
|
" \"job_id\":\"hrd-split-job\",\n" +
|
||||||
" \"indexes\":[\"painless\"],\n" +
|
" \"indexes\":[\"painless\"],\n" +
|
||||||
" \"types\":[\"test\"],\n" +
|
" \"types\":[\"test\"],\n" +
|
||||||
|
@ -310,18 +308,17 @@ public class PainlessDomainSplitIT extends ESRestTestCase {
|
||||||
" \"script\": \"return domainSplit(doc['domain'].value, params);\"\n" +
|
" \"script\": \"return domainSplit(doc['domain'].value, params);\"\n" +
|
||||||
" }\n" +
|
" }\n" +
|
||||||
" }\n" +
|
" }\n" +
|
||||||
" }";
|
"}");
|
||||||
|
|
||||||
client().performRequest("PUT", MachineLearning.BASE_PATH + "datafeeds/hrd-split-datafeed", Collections.emptyMap(),
|
client().performRequest(createFeedRequest);
|
||||||
new StringEntity(body, ContentType.APPLICATION_JSON));
|
client().performRequest(new Request("POST", MachineLearning.BASE_PATH + "datafeeds/hrd-split-datafeed/_start"));
|
||||||
client().performRequest("POST", MachineLearning.BASE_PATH + "datafeeds/hrd-split-datafeed/_start");
|
|
||||||
|
|
||||||
boolean passed = awaitBusy(() -> {
|
boolean passed = awaitBusy(() -> {
|
||||||
try {
|
try {
|
||||||
client().performRequest("POST", "/_refresh");
|
client().performRequest(new Request("POST", "/_refresh"));
|
||||||
|
|
||||||
Response response = client().performRequest("GET",
|
Response response = client().performRequest(new Request("GET",
|
||||||
MachineLearning.BASE_PATH + "anomaly_detectors/hrd-split-job/results/records");
|
MachineLearning.BASE_PATH + "anomaly_detectors/hrd-split-job/results/records"));
|
||||||
String responseBody = EntityUtils.toString(response.getEntity());
|
String responseBody = EntityUtils.toString(response.getEntity());
|
||||||
|
|
||||||
if (responseBody.contains("\"count\":2")) {
|
if (responseBody.contains("\"count\":2")) {
|
||||||
|
|
|
@ -120,8 +120,10 @@ public class MonitoringIT extends ESSingleNodeTestCase {
|
||||||
|
|
||||||
// REST is the realistic way that these operations happen, so it's the most realistic way to integration test it too
|
// REST is the realistic way that these operations happen, so it's the most realistic way to integration test it too
|
||||||
// Use Monitoring Bulk API to index 3 documents
|
// Use Monitoring Bulk API to index 3 documents
|
||||||
//final Response bulkResponse = getRestClient().performRequest("POST", "/_xpack/monitoring/_bulk",
|
//final Request bulkRequest = new Request("POST", "/_xpack/monitoring/_bulk");
|
||||||
// parameters, createBulkEntity());
|
//<<add all parameters>
|
||||||
|
//bulkRequest.setJsonEntity(createBulkEntity());
|
||||||
|
//final Response bulkResponse = getRestClient().performRequest(request);
|
||||||
|
|
||||||
final MonitoringBulkResponse bulkResponse =
|
final MonitoringBulkResponse bulkResponse =
|
||||||
new MonitoringBulkRequestBuilder(client())
|
new MonitoringBulkRequestBuilder(client())
|
||||||
|
|
|
@ -15,19 +15,35 @@ import org.elasticsearch.action.search.SearchResponse;
|
||||||
import org.elasticsearch.common.unit.TimeValue;
|
import org.elasticsearch.common.unit.TimeValue;
|
||||||
import org.elasticsearch.index.query.QueryBuilder;
|
import org.elasticsearch.index.query.QueryBuilder;
|
||||||
import org.elasticsearch.index.query.RangeQueryBuilder;
|
import org.elasticsearch.index.query.RangeQueryBuilder;
|
||||||
|
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||||
import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregation;
|
import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregation;
|
||||||
import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregationBuilder;
|
import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregationBuilder;
|
||||||
import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder;
|
import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder;
|
||||||
|
import org.elasticsearch.search.aggregations.bucket.composite.DateHistogramValuesSourceBuilder;
|
||||||
|
import org.elasticsearch.search.aggregations.bucket.composite.HistogramValuesSourceBuilder;
|
||||||
|
import org.elasticsearch.search.aggregations.bucket.composite.TermsValuesSourceBuilder;
|
||||||
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder;
|
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder;
|
||||||
|
import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder;
|
||||||
|
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
|
||||||
|
import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregationBuilder;
|
||||||
|
import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder;
|
||||||
|
import org.elasticsearch.search.aggregations.metrics.min.MinAggregationBuilder;
|
||||||
|
import org.elasticsearch.search.aggregations.metrics.sum.SumAggregationBuilder;
|
||||||
|
import org.elasticsearch.search.aggregations.metrics.valuecount.ValueCountAggregationBuilder;
|
||||||
|
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||||
|
import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder;
|
||||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||||
import org.elasticsearch.xpack.core.rollup.RollupField;
|
import org.elasticsearch.xpack.core.rollup.RollupField;
|
||||||
import org.elasticsearch.xpack.core.rollup.job.DateHistogramGroupConfig;
|
import org.elasticsearch.xpack.core.rollup.job.DateHistogramGroupConfig;
|
||||||
import org.elasticsearch.xpack.core.rollup.job.GroupConfig;
|
import org.elasticsearch.xpack.core.rollup.job.GroupConfig;
|
||||||
import org.elasticsearch.xpack.core.rollup.job.HistogramGroupConfig;
|
import org.elasticsearch.xpack.core.rollup.job.HistogramGroupConfig;
|
||||||
import org.elasticsearch.xpack.core.rollup.job.IndexerState;
|
import org.elasticsearch.xpack.core.rollup.job.IndexerState;
|
||||||
|
import org.elasticsearch.xpack.core.rollup.job.MetricConfig;
|
||||||
import org.elasticsearch.xpack.core.rollup.job.RollupJob;
|
import org.elasticsearch.xpack.core.rollup.job.RollupJob;
|
||||||
import org.elasticsearch.xpack.core.rollup.job.RollupJobConfig;
|
import org.elasticsearch.xpack.core.rollup.job.RollupJobConfig;
|
||||||
import org.elasticsearch.xpack.core.rollup.job.RollupJobStats;
|
import org.elasticsearch.xpack.core.rollup.job.RollupJobStats;
|
||||||
|
import org.elasticsearch.xpack.core.rollup.job.TermsGroupConfig;
|
||||||
|
import org.joda.time.DateTimeZone;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
@ -38,6 +54,10 @@ import java.util.concurrent.Executor;
|
||||||
import java.util.concurrent.atomic.AtomicBoolean;
|
import java.util.concurrent.atomic.AtomicBoolean;
|
||||||
import java.util.concurrent.atomic.AtomicReference;
|
import java.util.concurrent.atomic.AtomicReference;
|
||||||
|
|
||||||
|
import static java.util.Collections.singletonList;
|
||||||
|
import static java.util.Collections.unmodifiableList;
|
||||||
|
import static org.elasticsearch.xpack.core.rollup.RollupField.formatFieldName;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* An abstract class that builds a rollup index incrementally. A background job can be launched using {@link #maybeTriggerAsyncJob(long)},
|
* An abstract class that builds a rollup index incrementally. A background job can be launched using {@link #maybeTriggerAsyncJob(long)},
|
||||||
* it will create the rollup index from the source index up to the last complete bucket that is allowed to be built (based on the current
|
* it will create the rollup index from the source index up to the last complete bucket that is allowed to be built (based on the current
|
||||||
|
@ -392,21 +412,12 @@ public abstract class RollupIndexer {
|
||||||
*/
|
*/
|
||||||
private CompositeAggregationBuilder createCompositeBuilder(RollupJobConfig config) {
|
private CompositeAggregationBuilder createCompositeBuilder(RollupJobConfig config) {
|
||||||
final GroupConfig groupConfig = config.getGroupConfig();
|
final GroupConfig groupConfig = config.getGroupConfig();
|
||||||
List<CompositeValuesSourceBuilder<?>> builders = new ArrayList<>();
|
List<CompositeValuesSourceBuilder<?>> builders = createValueSourceBuilders(groupConfig);
|
||||||
|
|
||||||
// Add all the agg builders to our request in order: date_histo -> histo -> terms
|
|
||||||
if (groupConfig != null) {
|
|
||||||
builders.addAll(groupConfig.getDateHistogram().toBuilders());
|
|
||||||
if (groupConfig.getHistogram() != null) {
|
|
||||||
builders.addAll(groupConfig.getHistogram().toBuilders());
|
|
||||||
}
|
|
||||||
if (groupConfig.getTerms() != null) {
|
|
||||||
builders.addAll(groupConfig.getTerms().toBuilders());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
CompositeAggregationBuilder composite = new CompositeAggregationBuilder(AGGREGATION_NAME, builders);
|
CompositeAggregationBuilder composite = new CompositeAggregationBuilder(AGGREGATION_NAME, builders);
|
||||||
config.getMetricsConfig().forEach(m -> m.toBuilders().forEach(composite::subAggregation));
|
|
||||||
|
List<AggregationBuilder> aggregations = createAggregationBuilders(config.getMetricsConfig());
|
||||||
|
aggregations.forEach(composite::subAggregation);
|
||||||
|
|
||||||
final Map<String, Object> metadata = createMetadata(groupConfig);
|
final Map<String, Object> metadata = createMetadata(groupConfig);
|
||||||
if (metadata.isEmpty() == false) {
|
if (metadata.isEmpty() == false) {
|
||||||
|
@ -456,5 +467,112 @@ public abstract class RollupIndexer {
|
||||||
}
|
}
|
||||||
return metadata;
|
return metadata;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static List<CompositeValuesSourceBuilder<?>> createValueSourceBuilders(final GroupConfig groupConfig) {
|
||||||
|
final List<CompositeValuesSourceBuilder<?>> builders = new ArrayList<>();
|
||||||
|
// Add all the agg builders to our request in order: date_histo -> histo -> terms
|
||||||
|
if (groupConfig != null) {
|
||||||
|
final DateHistogramGroupConfig dateHistogram = groupConfig.getDateHistogram();
|
||||||
|
builders.addAll(createValueSourceBuilders(dateHistogram));
|
||||||
|
|
||||||
|
final HistogramGroupConfig histogram = groupConfig.getHistogram();
|
||||||
|
builders.addAll(createValueSourceBuilders(histogram));
|
||||||
|
|
||||||
|
final TermsGroupConfig terms = groupConfig.getTerms();
|
||||||
|
builders.addAll(createValueSourceBuilders(terms));
|
||||||
|
}
|
||||||
|
return unmodifiableList(builders);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static List<CompositeValuesSourceBuilder<?>> createValueSourceBuilders(final DateHistogramGroupConfig dateHistogram) {
|
||||||
|
final String dateHistogramField = dateHistogram.getField();
|
||||||
|
final String dateHistogramName = RollupField.formatIndexerAggName(dateHistogramField, DateHistogramAggregationBuilder.NAME);
|
||||||
|
final DateHistogramValuesSourceBuilder dateHistogramBuilder = new DateHistogramValuesSourceBuilder(dateHistogramName);
|
||||||
|
dateHistogramBuilder.dateHistogramInterval(dateHistogram.getInterval());
|
||||||
|
dateHistogramBuilder.field(dateHistogramField);
|
||||||
|
dateHistogramBuilder.timeZone(toDateTimeZone(dateHistogram.getTimeZone()));
|
||||||
|
return singletonList(dateHistogramBuilder);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static List<CompositeValuesSourceBuilder<?>> createValueSourceBuilders(final HistogramGroupConfig histogram) {
|
||||||
|
final List<CompositeValuesSourceBuilder<?>> builders = new ArrayList<>();
|
||||||
|
if (histogram != null) {
|
||||||
|
for (String field : histogram.getFields()) {
|
||||||
|
final String histogramName = RollupField.formatIndexerAggName(field, HistogramAggregationBuilder.NAME);
|
||||||
|
final HistogramValuesSourceBuilder histogramBuilder = new HistogramValuesSourceBuilder(histogramName);
|
||||||
|
histogramBuilder.interval(histogram.getInterval());
|
||||||
|
histogramBuilder.field(field);
|
||||||
|
histogramBuilder.missingBucket(true);
|
||||||
|
builders.add(histogramBuilder);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return unmodifiableList(builders);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static List<CompositeValuesSourceBuilder<?>> createValueSourceBuilders(final TermsGroupConfig terms) {
|
||||||
|
final List<CompositeValuesSourceBuilder<?>> builders = new ArrayList<>();
|
||||||
|
if (terms != null) {
|
||||||
|
for (String field : terms.getFields()) {
|
||||||
|
final String termsName = RollupField.formatIndexerAggName(field, TermsAggregationBuilder.NAME);
|
||||||
|
final TermsValuesSourceBuilder termsBuilder = new TermsValuesSourceBuilder(termsName);
|
||||||
|
termsBuilder.field(field);
|
||||||
|
termsBuilder.missingBucket(true);
|
||||||
|
builders.add(termsBuilder);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return unmodifiableList(builders);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This returns a set of aggregation builders which represent the configured
|
||||||
|
* set of metrics. Used to iterate over historical data.
|
||||||
|
*/
|
||||||
|
static List<AggregationBuilder> createAggregationBuilders(final List<MetricConfig> metricsConfigs) {
|
||||||
|
final List<AggregationBuilder> builders = new ArrayList<>();
|
||||||
|
if (metricsConfigs != null) {
|
||||||
|
for (MetricConfig metricConfig : metricsConfigs) {
|
||||||
|
final List<String> metrics = metricConfig.getMetrics();
|
||||||
|
if (metrics.isEmpty() == false) {
|
||||||
|
final String field = metricConfig.getField();
|
||||||
|
for (String metric : metrics) {
|
||||||
|
ValuesSourceAggregationBuilder.LeafOnly newBuilder;
|
||||||
|
if (metric.equals(MetricConfig.MIN.getPreferredName())) {
|
||||||
|
newBuilder = new MinAggregationBuilder(formatFieldName(field, MinAggregationBuilder.NAME, RollupField.VALUE));
|
||||||
|
} else if (metric.equals(MetricConfig.MAX.getPreferredName())) {
|
||||||
|
newBuilder = new MaxAggregationBuilder(formatFieldName(field, MaxAggregationBuilder.NAME, RollupField.VALUE));
|
||||||
|
} else if (metric.equals(MetricConfig.AVG.getPreferredName())) {
|
||||||
|
// Avgs are sum + count
|
||||||
|
newBuilder = new SumAggregationBuilder(formatFieldName(field, AvgAggregationBuilder.NAME, RollupField.VALUE));
|
||||||
|
ValuesSourceAggregationBuilder.LeafOnly countBuilder
|
||||||
|
= new ValueCountAggregationBuilder(
|
||||||
|
formatFieldName(field, AvgAggregationBuilder.NAME, RollupField.COUNT_FIELD), ValueType.NUMERIC);
|
||||||
|
countBuilder.field(field);
|
||||||
|
builders.add(countBuilder);
|
||||||
|
} else if (metric.equals(MetricConfig.SUM.getPreferredName())) {
|
||||||
|
newBuilder = new SumAggregationBuilder(formatFieldName(field, SumAggregationBuilder.NAME, RollupField.VALUE));
|
||||||
|
} else if (metric.equals(MetricConfig.VALUE_COUNT.getPreferredName())) {
|
||||||
|
// TODO allow non-numeric value_counts.
|
||||||
|
// Hardcoding this is fine for now since the job validation guarantees that all metric fields are numerics
|
||||||
|
newBuilder = new ValueCountAggregationBuilder(
|
||||||
|
formatFieldName(field, ValueCountAggregationBuilder.NAME, RollupField.VALUE), ValueType.NUMERIC);
|
||||||
|
} else {
|
||||||
|
throw new IllegalArgumentException("Unsupported metric type [" + metric + "]");
|
||||||
|
}
|
||||||
|
newBuilder.field(field);
|
||||||
|
builders.add(newBuilder);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return unmodifiableList(builders);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static DateTimeZone toDateTimeZone(final String timezone) {
|
||||||
|
try {
|
||||||
|
return DateTimeZone.forOffsetHours(Integer.parseInt(timezone));
|
||||||
|
} catch (NumberFormatException e) {
|
||||||
|
return DateTimeZone.forID(timezone);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,83 @@
|
||||||
|
/*
|
||||||
|
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||||
|
* or more contributor license agreements. Licensed under the Elastic License;
|
||||||
|
* you may not use this file except in compliance with the Elastic License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.xpack.rollup.action.job;
|
||||||
|
|
||||||
|
import org.elasticsearch.action.ActionRequestValidationException;
|
||||||
|
import org.elasticsearch.action.fieldcaps.FieldCapabilities;
|
||||||
|
import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder;
|
||||||
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
import org.elasticsearch.xpack.core.rollup.job.TermsGroupConfig;
|
||||||
|
import org.elasticsearch.xpack.rollup.job.RollupIndexer;
|
||||||
|
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
import static org.mockito.Mockito.mock;
|
||||||
|
import static org.mockito.Mockito.when;
|
||||||
|
|
||||||
|
public class RollupIndexTests extends ESTestCase {
|
||||||
|
|
||||||
|
public void testValidateMatchingField() {
|
||||||
|
ActionRequestValidationException e = new ActionRequestValidationException();
|
||||||
|
Map<String, Map<String, FieldCapabilities>> responseMap = new HashMap<>();
|
||||||
|
String type = getRandomType();
|
||||||
|
|
||||||
|
// Have to mock fieldcaps because the ctor's aren't public...
|
||||||
|
FieldCapabilities fieldCaps = mock(FieldCapabilities.class);
|
||||||
|
when(fieldCaps.isAggregatable()).thenReturn(true);
|
||||||
|
responseMap.put("my_field", Collections.singletonMap(type, fieldCaps));
|
||||||
|
|
||||||
|
TermsGroupConfig config = new TermsGroupConfig("my_field");
|
||||||
|
config.validateMappings(responseMap, e);
|
||||||
|
if (e.validationErrors().size() != 0) {
|
||||||
|
fail(e.getMessage());
|
||||||
|
}
|
||||||
|
|
||||||
|
List<CompositeValuesSourceBuilder<?>> builders = RollupIndexer.createValueSourceBuilders(config);
|
||||||
|
assertThat(builders.size(), equalTo(1));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testValidateFieldMatchingNotAggregatable() {
|
||||||
|
ActionRequestValidationException e = new ActionRequestValidationException();
|
||||||
|
Map<String, Map<String, FieldCapabilities>> responseMap = new HashMap<>();
|
||||||
|
|
||||||
|
// Have to mock fieldcaps because the ctor's aren't public...
|
||||||
|
FieldCapabilities fieldCaps = mock(FieldCapabilities.class);
|
||||||
|
when(fieldCaps.isAggregatable()).thenReturn(false);
|
||||||
|
responseMap.put("my_field", Collections.singletonMap(getRandomType(), fieldCaps));
|
||||||
|
|
||||||
|
TermsGroupConfig config = new TermsGroupConfig("my_field");
|
||||||
|
config.validateMappings(responseMap, e);
|
||||||
|
assertThat(e.validationErrors().get(0), equalTo("The field [my_field] must be aggregatable across all indices, but is not."));
|
||||||
|
}
|
||||||
|
|
||||||
|
private String getRandomType() {
|
||||||
|
int n = randomIntBetween(0,8);
|
||||||
|
if (n == 0) {
|
||||||
|
return "keyword";
|
||||||
|
} else if (n == 1) {
|
||||||
|
return "text";
|
||||||
|
} else if (n == 2) {
|
||||||
|
return "long";
|
||||||
|
} else if (n == 3) {
|
||||||
|
return "integer";
|
||||||
|
} else if (n == 4) {
|
||||||
|
return "short";
|
||||||
|
} else if (n == 5) {
|
||||||
|
return "float";
|
||||||
|
} else if (n == 6) {
|
||||||
|
return "double";
|
||||||
|
} else if (n == 7) {
|
||||||
|
return "scaled_float";
|
||||||
|
} else if (n == 8) {
|
||||||
|
return "half_float";
|
||||||
|
}
|
||||||
|
return "long";
|
||||||
|
}
|
||||||
|
}
|
|
@ -21,6 +21,7 @@ import org.elasticsearch.index.mapper.DateFieldMapper;
|
||||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||||
import org.elasticsearch.index.mapper.NumberFieldMapper;
|
import org.elasticsearch.index.mapper.NumberFieldMapper;
|
||||||
import org.elasticsearch.search.aggregations.Aggregation;
|
import org.elasticsearch.search.aggregations.Aggregation;
|
||||||
|
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||||
import org.elasticsearch.search.aggregations.Aggregations;
|
import org.elasticsearch.search.aggregations.Aggregations;
|
||||||
import org.elasticsearch.search.aggregations.Aggregator;
|
import org.elasticsearch.search.aggregations.Aggregator;
|
||||||
import org.elasticsearch.search.aggregations.AggregatorTestCase;
|
import org.elasticsearch.search.aggregations.AggregatorTestCase;
|
||||||
|
@ -57,6 +58,7 @@ import static java.util.Collections.singletonList;
|
||||||
import static org.elasticsearch.xpack.core.rollup.ConfigTestHelpers.randomDateHistogramGroupConfig;
|
import static org.elasticsearch.xpack.core.rollup.ConfigTestHelpers.randomDateHistogramGroupConfig;
|
||||||
import static org.elasticsearch.xpack.core.rollup.ConfigTestHelpers.randomGroupConfig;
|
import static org.elasticsearch.xpack.core.rollup.ConfigTestHelpers.randomGroupConfig;
|
||||||
import static org.elasticsearch.xpack.core.rollup.ConfigTestHelpers.randomHistogramGroupConfig;
|
import static org.elasticsearch.xpack.core.rollup.ConfigTestHelpers.randomHistogramGroupConfig;
|
||||||
|
import static org.elasticsearch.xpack.rollup.job.RollupIndexer.createAggregationBuilders;
|
||||||
import static org.hamcrest.Matchers.equalTo;
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
import static org.mockito.Mockito.mock;
|
import static org.mockito.Mockito.mock;
|
||||||
import static org.mockito.Mockito.when;
|
import static org.mockito.Mockito.when;
|
||||||
|
@ -101,9 +103,11 @@ public class IndexerUtilsTests extends AggregatorTestCase {
|
||||||
//TODO swap this over to DateHistoConfig.Builder once DateInterval is in
|
//TODO swap this over to DateHistoConfig.Builder once DateInterval is in
|
||||||
DateHistogramGroupConfig dateHistoGroupConfig = new DateHistogramGroupConfig(timestampField, DateHistogramInterval.DAY);
|
DateHistogramGroupConfig dateHistoGroupConfig = new DateHistogramGroupConfig(timestampField, DateHistogramInterval.DAY);
|
||||||
CompositeAggregationBuilder compositeBuilder =
|
CompositeAggregationBuilder compositeBuilder =
|
||||||
new CompositeAggregationBuilder(RollupIndexer.AGGREGATION_NAME, dateHistoGroupConfig.toBuilders());
|
new CompositeAggregationBuilder(RollupIndexer.AGGREGATION_NAME,
|
||||||
|
RollupIndexer.createValueSourceBuilders(dateHistoGroupConfig));
|
||||||
MetricConfig metricConfig = new MetricConfig("does_not_exist", singletonList("max"));
|
MetricConfig metricConfig = new MetricConfig("does_not_exist", singletonList("max"));
|
||||||
metricConfig.toBuilders().forEach(compositeBuilder::subAggregation);
|
List<AggregationBuilder> metricAgg = createAggregationBuilders(singletonList(metricConfig));
|
||||||
|
metricAgg.forEach(compositeBuilder::subAggregation);
|
||||||
|
|
||||||
Aggregator aggregator = createAggregator(compositeBuilder, indexSearcher, timestampFieldType, valueFieldType);
|
Aggregator aggregator = createAggregator(compositeBuilder, indexSearcher, timestampFieldType, valueFieldType);
|
||||||
aggregator.preCollection();
|
aggregator.preCollection();
|
||||||
|
@ -170,7 +174,8 @@ public class IndexerUtilsTests extends AggregatorTestCase {
|
||||||
singletonList(dateHisto));
|
singletonList(dateHisto));
|
||||||
|
|
||||||
MetricConfig metricConfig = new MetricConfig(valueField, singletonList("max"));
|
MetricConfig metricConfig = new MetricConfig(valueField, singletonList("max"));
|
||||||
metricConfig.toBuilders().forEach(compositeBuilder::subAggregation);
|
List<AggregationBuilder> metricAgg = createAggregationBuilders(singletonList(metricConfig));
|
||||||
|
metricAgg.forEach(compositeBuilder::subAggregation);
|
||||||
|
|
||||||
Aggregator aggregator = createAggregator(compositeBuilder, indexSearcher, timestampFieldType, valueFieldType);
|
Aggregator aggregator = createAggregator(compositeBuilder, indexSearcher, timestampFieldType, valueFieldType);
|
||||||
aggregator.preCollection();
|
aggregator.preCollection();
|
||||||
|
@ -226,7 +231,8 @@ public class IndexerUtilsTests extends AggregatorTestCase {
|
||||||
singletonList(terms));
|
singletonList(terms));
|
||||||
|
|
||||||
MetricConfig metricConfig = new MetricConfig(valueField, singletonList("max"));
|
MetricConfig metricConfig = new MetricConfig(valueField, singletonList("max"));
|
||||||
metricConfig.toBuilders().forEach(compositeBuilder::subAggregation);
|
List<AggregationBuilder> metricAgg = createAggregationBuilders(singletonList(metricConfig));
|
||||||
|
metricAgg.forEach(compositeBuilder::subAggregation);
|
||||||
|
|
||||||
Aggregator aggregator = createAggregator(compositeBuilder, indexSearcher, valueFieldType);
|
Aggregator aggregator = createAggregator(compositeBuilder, indexSearcher, valueFieldType);
|
||||||
aggregator.preCollection();
|
aggregator.preCollection();
|
||||||
|
@ -292,7 +298,8 @@ public class IndexerUtilsTests extends AggregatorTestCase {
|
||||||
singletonList(dateHisto));
|
singletonList(dateHisto));
|
||||||
|
|
||||||
MetricConfig metricConfig = new MetricConfig("another_field", Arrays.asList("avg", "sum"));
|
MetricConfig metricConfig = new MetricConfig("another_field", Arrays.asList("avg", "sum"));
|
||||||
metricConfig.toBuilders().forEach(compositeBuilder::subAggregation);
|
List<AggregationBuilder> metricAgg = createAggregationBuilders(singletonList(metricConfig));
|
||||||
|
metricAgg.forEach(compositeBuilder::subAggregation);
|
||||||
|
|
||||||
Aggregator aggregator = createAggregator(compositeBuilder, indexSearcher, timestampFieldType, valueFieldType);
|
Aggregator aggregator = createAggregator(compositeBuilder, indexSearcher, timestampFieldType, valueFieldType);
|
||||||
aggregator.preCollection();
|
aggregator.preCollection();
|
||||||
|
@ -523,11 +530,13 @@ public class IndexerUtilsTests extends AggregatorTestCase {
|
||||||
|
|
||||||
// Setup the composite agg
|
// Setup the composite agg
|
||||||
TermsGroupConfig termsGroupConfig = new TermsGroupConfig(valueField);
|
TermsGroupConfig termsGroupConfig = new TermsGroupConfig(valueField);
|
||||||
CompositeAggregationBuilder compositeBuilder = new CompositeAggregationBuilder(RollupIndexer.AGGREGATION_NAME,
|
CompositeAggregationBuilder compositeBuilder =
|
||||||
termsGroupConfig.toBuilders()).size(numDocs*2);
|
new CompositeAggregationBuilder(RollupIndexer.AGGREGATION_NAME, RollupIndexer.createValueSourceBuilders(termsGroupConfig))
|
||||||
|
.size(numDocs*2);
|
||||||
|
|
||||||
MetricConfig metricConfig = new MetricConfig(metricField, singletonList("max"));
|
MetricConfig metricConfig = new MetricConfig(metricField, singletonList("max"));
|
||||||
metricConfig.toBuilders().forEach(compositeBuilder::subAggregation);
|
List<AggregationBuilder> metricAgg = createAggregationBuilders(singletonList(metricConfig));
|
||||||
|
metricAgg.forEach(compositeBuilder::subAggregation);
|
||||||
|
|
||||||
Aggregator aggregator = createAggregator(compositeBuilder, indexSearcher, valueFieldType, metricFieldType);
|
Aggregator aggregator = createAggregator(compositeBuilder, indexSearcher, valueFieldType, metricFieldType);
|
||||||
aggregator.preCollection();
|
aggregator.preCollection();
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
evaluationDependsOn(xpackModule('core'))
|
evaluationDependsOn(xpackModule('core'))
|
||||||
|
|
||||||
apply plugin: 'elasticsearch.esplugin'
|
apply plugin: 'elasticsearch.esplugin'
|
||||||
|
apply plugin: 'nebula.maven-scm'
|
||||||
esplugin {
|
esplugin {
|
||||||
name 'x-pack-security'
|
name 'x-pack-security'
|
||||||
description 'Elasticsearch Expanded Pack Plugin - Security'
|
description 'Elasticsearch Expanded Pack Plugin - Security'
|
||||||
|
|
|
@ -24,6 +24,7 @@ import org.elasticsearch.xpack.core.security.authc.esnative.ClientReservedRealm;
|
||||||
import org.elasticsearch.xpack.core.security.authc.support.Hasher;
|
import org.elasticsearch.xpack.core.security.authc.support.Hasher;
|
||||||
import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken;
|
import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken;
|
||||||
import org.elasticsearch.xpack.core.security.support.Exceptions;
|
import org.elasticsearch.xpack.core.security.support.Exceptions;
|
||||||
|
import org.elasticsearch.xpack.core.security.user.APMSystemUser;
|
||||||
import org.elasticsearch.xpack.core.security.user.AnonymousUser;
|
import org.elasticsearch.xpack.core.security.user.AnonymousUser;
|
||||||
import org.elasticsearch.xpack.core.security.user.BeatsSystemUser;
|
import org.elasticsearch.xpack.core.security.user.BeatsSystemUser;
|
||||||
import org.elasticsearch.xpack.core.security.user.ElasticUser;
|
import org.elasticsearch.xpack.core.security.user.ElasticUser;
|
||||||
|
@ -149,6 +150,8 @@ public class ReservedRealm extends CachingUsernamePasswordRealm {
|
||||||
return new LogstashSystemUser(userInfo.enabled);
|
return new LogstashSystemUser(userInfo.enabled);
|
||||||
case BeatsSystemUser.NAME:
|
case BeatsSystemUser.NAME:
|
||||||
return new BeatsSystemUser(userInfo.enabled);
|
return new BeatsSystemUser(userInfo.enabled);
|
||||||
|
case APMSystemUser.NAME:
|
||||||
|
return new APMSystemUser(userInfo.enabled);
|
||||||
default:
|
default:
|
||||||
if (anonymousEnabled && anonymousUser.principal().equals(username)) {
|
if (anonymousEnabled && anonymousUser.principal().equals(username)) {
|
||||||
return anonymousUser;
|
return anonymousUser;
|
||||||
|
@ -177,6 +180,9 @@ public class ReservedRealm extends CachingUsernamePasswordRealm {
|
||||||
userInfo = reservedUserInfos.get(BeatsSystemUser.NAME);
|
userInfo = reservedUserInfos.get(BeatsSystemUser.NAME);
|
||||||
users.add(new BeatsSystemUser(userInfo == null || userInfo.enabled));
|
users.add(new BeatsSystemUser(userInfo == null || userInfo.enabled));
|
||||||
|
|
||||||
|
userInfo = reservedUserInfos.get(APMSystemUser.NAME);
|
||||||
|
users.add(new APMSystemUser(userInfo == null || userInfo.enabled));
|
||||||
|
|
||||||
if (anonymousEnabled) {
|
if (anonymousEnabled) {
|
||||||
users.add(anonymousUser);
|
users.add(anonymousUser);
|
||||||
}
|
}
|
||||||
|
@ -228,6 +234,8 @@ public class ReservedRealm extends CachingUsernamePasswordRealm {
|
||||||
switch (username) {
|
switch (username) {
|
||||||
case BeatsSystemUser.NAME:
|
case BeatsSystemUser.NAME:
|
||||||
return BeatsSystemUser.DEFINED_SINCE;
|
return BeatsSystemUser.DEFINED_SINCE;
|
||||||
|
case APMSystemUser.NAME:
|
||||||
|
return APMSystemUser.DEFINED_SINCE;
|
||||||
default:
|
default:
|
||||||
return Version.V_6_0_0;
|
return Version.V_6_0_0;
|
||||||
}
|
}
|
||||||
|
|
|
@ -27,6 +27,7 @@ import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||||
import org.elasticsearch.env.Environment;
|
import org.elasticsearch.env.Environment;
|
||||||
import org.elasticsearch.xpack.core.XPackSettings;
|
import org.elasticsearch.xpack.core.XPackSettings;
|
||||||
import org.elasticsearch.xpack.core.security.support.Validation;
|
import org.elasticsearch.xpack.core.security.support.Validation;
|
||||||
|
import org.elasticsearch.xpack.core.security.user.APMSystemUser;
|
||||||
import org.elasticsearch.xpack.core.security.user.BeatsSystemUser;
|
import org.elasticsearch.xpack.core.security.user.BeatsSystemUser;
|
||||||
import org.elasticsearch.xpack.core.security.user.ElasticUser;
|
import org.elasticsearch.xpack.core.security.user.ElasticUser;
|
||||||
import org.elasticsearch.xpack.core.security.user.KibanaUser;
|
import org.elasticsearch.xpack.core.security.user.KibanaUser;
|
||||||
|
@ -63,7 +64,8 @@ import static java.util.Arrays.asList;
|
||||||
public class SetupPasswordTool extends LoggingAwareMultiCommand {
|
public class SetupPasswordTool extends LoggingAwareMultiCommand {
|
||||||
|
|
||||||
private static final char[] CHARS = ("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789").toCharArray();
|
private static final char[] CHARS = ("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789").toCharArray();
|
||||||
public static final List<String> USERS = asList(ElasticUser.NAME, KibanaUser.NAME, LogstashSystemUser.NAME, BeatsSystemUser.NAME);
|
public static final List<String> USERS = asList(ElasticUser.NAME, APMSystemUser.NAME, KibanaUser.NAME, LogstashSystemUser.NAME,
|
||||||
|
BeatsSystemUser.NAME);
|
||||||
|
|
||||||
private final BiFunction<Environment, Settings, CommandLineHttpClient> clientFunction;
|
private final BiFunction<Environment, Settings, CommandLineHttpClient> clientFunction;
|
||||||
private final CheckedFunction<Environment, KeyStoreWrapper, Exception> keyStoreFunction;
|
private final CheckedFunction<Environment, KeyStoreWrapper, Exception> keyStoreFunction;
|
||||||
|
|
|
@ -5,11 +5,9 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.xpack.security.authc.support;
|
package org.elasticsearch.xpack.security.authc.support;
|
||||||
|
|
||||||
import org.apache.lucene.util.SetOnce;
|
|
||||||
import org.elasticsearch.action.ActionListener;
|
import org.elasticsearch.action.ActionListener;
|
||||||
import org.elasticsearch.common.cache.Cache;
|
import org.elasticsearch.common.cache.Cache;
|
||||||
import org.elasticsearch.common.cache.CacheBuilder;
|
import org.elasticsearch.common.cache.CacheBuilder;
|
||||||
import org.elasticsearch.common.collect.Tuple;
|
|
||||||
import org.elasticsearch.common.settings.SecureString;
|
import org.elasticsearch.common.settings.SecureString;
|
||||||
import org.elasticsearch.common.unit.TimeValue;
|
import org.elasticsearch.common.unit.TimeValue;
|
||||||
import org.elasticsearch.common.util.concurrent.ListenableFuture;
|
import org.elasticsearch.common.util.concurrent.ListenableFuture;
|
||||||
|
@ -30,7 +28,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
|
||||||
|
|
||||||
public abstract class CachingUsernamePasswordRealm extends UsernamePasswordRealm implements CachingRealm {
|
public abstract class CachingUsernamePasswordRealm extends UsernamePasswordRealm implements CachingRealm {
|
||||||
|
|
||||||
private final Cache<String, ListenableFuture<Tuple<AuthenticationResult, UserWithHash>>> cache;
|
private final Cache<String, ListenableFuture<UserWithHash>> cache;
|
||||||
private final ThreadPool threadPool;
|
private final ThreadPool threadPool;
|
||||||
final Hasher cacheHasher;
|
final Hasher cacheHasher;
|
||||||
|
|
||||||
|
@ -38,9 +36,9 @@ public abstract class CachingUsernamePasswordRealm extends UsernamePasswordRealm
|
||||||
super(type, config);
|
super(type, config);
|
||||||
cacheHasher = Hasher.resolve(CachingUsernamePasswordRealmSettings.CACHE_HASH_ALGO_SETTING.get(config.settings()));
|
cacheHasher = Hasher.resolve(CachingUsernamePasswordRealmSettings.CACHE_HASH_ALGO_SETTING.get(config.settings()));
|
||||||
this.threadPool = threadPool;
|
this.threadPool = threadPool;
|
||||||
TimeValue ttl = CachingUsernamePasswordRealmSettings.CACHE_TTL_SETTING.get(config.settings());
|
final TimeValue ttl = CachingUsernamePasswordRealmSettings.CACHE_TTL_SETTING.get(config.settings());
|
||||||
if (ttl.getNanos() > 0) {
|
if (ttl.getNanos() > 0) {
|
||||||
cache = CacheBuilder.<String, ListenableFuture<Tuple<AuthenticationResult, UserWithHash>>>builder()
|
cache = CacheBuilder.<String, ListenableFuture<UserWithHash>>builder()
|
||||||
.setExpireAfterWrite(ttl)
|
.setExpireAfterWrite(ttl)
|
||||||
.setMaximumWeight(CachingUsernamePasswordRealmSettings.CACHE_MAX_USERS_SETTING.get(config.settings()))
|
.setMaximumWeight(CachingUsernamePasswordRealmSettings.CACHE_MAX_USERS_SETTING.get(config.settings()))
|
||||||
.build();
|
.build();
|
||||||
|
@ -49,6 +47,7 @@ public abstract class CachingUsernamePasswordRealm extends UsernamePasswordRealm
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
public final void expire(String username) {
|
public final void expire(String username) {
|
||||||
if (cache != null) {
|
if (cache != null) {
|
||||||
logger.trace("invalidating cache for user [{}] in realm [{}]", username, name());
|
logger.trace("invalidating cache for user [{}] in realm [{}]", username, name());
|
||||||
|
@ -56,6 +55,7 @@ public abstract class CachingUsernamePasswordRealm extends UsernamePasswordRealm
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
public final void expireAll() {
|
public final void expireAll() {
|
||||||
if (cache != null) {
|
if (cache != null) {
|
||||||
logger.trace("invalidating cache for all users in realm [{}]", name());
|
logger.trace("invalidating cache for all users in realm [{}]", name());
|
||||||
|
@ -72,108 +72,84 @@ public abstract class CachingUsernamePasswordRealm extends UsernamePasswordRealm
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public final void authenticate(AuthenticationToken authToken, ActionListener<AuthenticationResult> listener) {
|
public final void authenticate(AuthenticationToken authToken, ActionListener<AuthenticationResult> listener) {
|
||||||
UsernamePasswordToken token = (UsernamePasswordToken) authToken;
|
final UsernamePasswordToken token = (UsernamePasswordToken) authToken;
|
||||||
try {
|
try {
|
||||||
if (cache == null) {
|
if (cache == null) {
|
||||||
doAuthenticate(token, listener);
|
doAuthenticate(token, listener);
|
||||||
} else {
|
} else {
|
||||||
authenticateWithCache(token, listener);
|
authenticateWithCache(token, listener);
|
||||||
}
|
}
|
||||||
} catch (Exception e) {
|
} catch (final Exception e) {
|
||||||
// each realm should handle exceptions, if we get one here it should be considered fatal
|
// each realm should handle exceptions, if we get one here it should be considered fatal
|
||||||
listener.onFailure(e);
|
listener.onFailure(e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This validates the {@code token} while making sure there is only one inflight
|
||||||
|
* request to the authentication source. Only successful responses are cached
|
||||||
|
* and any subsequent requests, bearing the <b>same</b> password, will succeed
|
||||||
|
* without reaching to the authentication source. A different password in a
|
||||||
|
* subsequent request, however, will clear the cache and <b>try</b> to reach to
|
||||||
|
* the authentication source.
|
||||||
|
*
|
||||||
|
* @param token The authentication token
|
||||||
|
* @param listener to be called at completion
|
||||||
|
*/
|
||||||
private void authenticateWithCache(UsernamePasswordToken token, ActionListener<AuthenticationResult> listener) {
|
private void authenticateWithCache(UsernamePasswordToken token, ActionListener<AuthenticationResult> listener) {
|
||||||
try {
|
try {
|
||||||
final SetOnce<User> authenticatedUser = new SetOnce<>();
|
final AtomicBoolean authenticationInCache = new AtomicBoolean(true);
|
||||||
final AtomicBoolean createdAndStartedFuture = new AtomicBoolean(false);
|
final ListenableFuture<UserWithHash> listenableCacheEntry = cache.computeIfAbsent(token.principal(), k -> {
|
||||||
final ListenableFuture<Tuple<AuthenticationResult, UserWithHash>> future = cache.computeIfAbsent(token.principal(), k -> {
|
authenticationInCache.set(false);
|
||||||
final ListenableFuture<Tuple<AuthenticationResult, UserWithHash>> created = new ListenableFuture<>();
|
return new ListenableFuture<>();
|
||||||
if (createdAndStartedFuture.compareAndSet(false, true) == false) {
|
|
||||||
throw new IllegalStateException("something else already started this. how?");
|
|
||||||
}
|
|
||||||
return created;
|
|
||||||
});
|
});
|
||||||
|
if (authenticationInCache.get()) {
|
||||||
if (createdAndStartedFuture.get()) {
|
// there is a cached or an inflight authenticate request
|
||||||
doAuthenticate(token, ActionListener.wrap(result -> {
|
listenableCacheEntry.addListener(ActionListener.wrap(authenticatedUserWithHash -> {
|
||||||
if (result.isAuthenticated()) {
|
if (authenticatedUserWithHash != null && authenticatedUserWithHash.verify(token.credentials())) {
|
||||||
final User user = result.getUser();
|
// cached credential hash matches the credential hash for this forestalled request
|
||||||
authenticatedUser.set(user);
|
final User user = authenticatedUserWithHash.user;
|
||||||
final UserWithHash userWithHash = new UserWithHash(user, token.credentials(), cacheHasher);
|
logger.debug("realm [{}] authenticated user [{}], with roles [{}], from cache", name(), token.principal(),
|
||||||
future.onResponse(new Tuple<>(result, userWithHash));
|
user.roles());
|
||||||
} else {
|
|
||||||
future.onResponse(new Tuple<>(result, null));
|
|
||||||
}
|
|
||||||
}, future::onFailure));
|
|
||||||
}
|
|
||||||
|
|
||||||
future.addListener(ActionListener.wrap(tuple -> {
|
|
||||||
if (tuple != null) {
|
|
||||||
final UserWithHash userWithHash = tuple.v2();
|
|
||||||
final boolean performedAuthentication = createdAndStartedFuture.get() && userWithHash != null &&
|
|
||||||
tuple.v2().user == authenticatedUser.get();
|
|
||||||
handleResult(future, createdAndStartedFuture.get(), performedAuthentication, token, tuple, listener);
|
|
||||||
} else {
|
|
||||||
handleFailure(future, createdAndStartedFuture.get(), token, new IllegalStateException("unknown error authenticating"),
|
|
||||||
listener);
|
|
||||||
}
|
|
||||||
}, e -> handleFailure(future, createdAndStartedFuture.get(), token, e, listener)),
|
|
||||||
threadPool.executor(ThreadPool.Names.GENERIC));
|
|
||||||
} catch (ExecutionException e) {
|
|
||||||
listener.onResponse(AuthenticationResult.unsuccessful("", e));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void handleResult(ListenableFuture<Tuple<AuthenticationResult, UserWithHash>> future, boolean createdAndStartedFuture,
|
|
||||||
boolean performedAuthentication, UsernamePasswordToken token,
|
|
||||||
Tuple<AuthenticationResult, UserWithHash> result, ActionListener<AuthenticationResult> listener) {
|
|
||||||
final AuthenticationResult authResult = result.v1();
|
|
||||||
if (authResult == null) {
|
|
||||||
// this was from a lookup; clear and redo
|
|
||||||
cache.invalidate(token.principal(), future);
|
|
||||||
authenticateWithCache(token, listener);
|
|
||||||
} else if (authResult.isAuthenticated()) {
|
|
||||||
if (performedAuthentication) {
|
|
||||||
listener.onResponse(authResult);
|
|
||||||
} else {
|
|
||||||
UserWithHash userWithHash = result.v2();
|
|
||||||
if (userWithHash.verify(token.credentials())) {
|
|
||||||
if (userWithHash.user.enabled()) {
|
|
||||||
User user = userWithHash.user;
|
|
||||||
logger.debug("realm [{}] authenticated user [{}], with roles [{}]",
|
|
||||||
name(), token.principal(), user.roles());
|
|
||||||
listener.onResponse(AuthenticationResult.success(user));
|
listener.onResponse(AuthenticationResult.success(user));
|
||||||
} else {
|
} else {
|
||||||
// re-auth to see if user has been enabled
|
// The inflight request has failed or its credential hash does not match the
|
||||||
cache.invalidate(token.principal(), future);
|
// hash of the credential for this forestalled request.
|
||||||
|
// clear cache and try to reach the authentication source again because password
|
||||||
|
// might have changed there and the local cached hash got stale
|
||||||
|
cache.invalidate(token.principal(), listenableCacheEntry);
|
||||||
authenticateWithCache(token, listener);
|
authenticateWithCache(token, listener);
|
||||||
}
|
}
|
||||||
} else {
|
}, e -> {
|
||||||
// could be a password change?
|
// the inflight request failed, so try again, but first (always) make sure cache
|
||||||
cache.invalidate(token.principal(), future);
|
// is cleared of the failed authentication
|
||||||
|
cache.invalidate(token.principal(), listenableCacheEntry);
|
||||||
authenticateWithCache(token, listener);
|
authenticateWithCache(token, listener);
|
||||||
}
|
}), threadPool.executor(ThreadPool.Names.GENERIC));
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
cache.invalidate(token.principal(), future);
|
// attempt authentication against the authentication source
|
||||||
if (createdAndStartedFuture) {
|
doAuthenticate(token, ActionListener.wrap(authResult -> {
|
||||||
|
if (authResult.isAuthenticated() && authResult.getUser().enabled()) {
|
||||||
|
// compute the credential hash of this successful authentication request
|
||||||
|
final UserWithHash userWithHash = new UserWithHash(authResult.getUser(), token.credentials(), cacheHasher);
|
||||||
|
// notify any forestalled request listeners; they will not reach to the
|
||||||
|
// authentication request and instead will use this hash for comparison
|
||||||
|
listenableCacheEntry.onResponse(userWithHash);
|
||||||
|
} else {
|
||||||
|
// notify any forestalled request listeners; they will retry the request
|
||||||
|
listenableCacheEntry.onResponse(null);
|
||||||
|
}
|
||||||
|
// notify the listener of the inflight authentication request; this request is not retried
|
||||||
listener.onResponse(authResult);
|
listener.onResponse(authResult);
|
||||||
} else {
|
}, e -> {
|
||||||
authenticateWithCache(token, listener);
|
// notify any staved off listeners; they will retry the request
|
||||||
}
|
listenableCacheEntry.onFailure(e);
|
||||||
}
|
// notify the listener of the inflight authentication request; this request is not retried
|
||||||
}
|
listener.onFailure(e);
|
||||||
|
}));
|
||||||
private void handleFailure(ListenableFuture<Tuple<AuthenticationResult, UserWithHash>> future, boolean createdAndStarted,
|
}
|
||||||
UsernamePasswordToken token, Exception e, ActionListener<AuthenticationResult> listener) {
|
} catch (final ExecutionException e) {
|
||||||
cache.invalidate(token.principal(), future);
|
|
||||||
if (createdAndStarted) {
|
|
||||||
listener.onFailure(e);
|
listener.onFailure(e);
|
||||||
} else {
|
|
||||||
authenticateWithCache(token, listener);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -193,39 +169,58 @@ public abstract class CachingUsernamePasswordRealm extends UsernamePasswordRealm
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public final void lookupUser(String username, ActionListener<User> listener) {
|
public final void lookupUser(String username, ActionListener<User> listener) {
|
||||||
if (cache != null) {
|
|
||||||
try {
|
try {
|
||||||
ListenableFuture<Tuple<AuthenticationResult, UserWithHash>> future = cache.computeIfAbsent(username, key -> {
|
if (cache == null) {
|
||||||
ListenableFuture<Tuple<AuthenticationResult, UserWithHash>> created = new ListenableFuture<>();
|
doLookupUser(username, listener);
|
||||||
|
} else {
|
||||||
|
lookupWithCache(username, listener);
|
||||||
|
}
|
||||||
|
} catch (final Exception e) {
|
||||||
|
// each realm should handle exceptions, if we get one here it should be
|
||||||
|
// considered fatal
|
||||||
|
listener.onFailure(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void lookupWithCache(String username, ActionListener<User> listener) {
|
||||||
|
try {
|
||||||
|
final AtomicBoolean lookupInCache = new AtomicBoolean(true);
|
||||||
|
final ListenableFuture<UserWithHash> listenableCacheEntry = cache.computeIfAbsent(username, key -> {
|
||||||
|
lookupInCache.set(false);
|
||||||
|
return new ListenableFuture<>();
|
||||||
|
});
|
||||||
|
if (false == lookupInCache.get()) {
|
||||||
|
// attempt lookup against the user directory
|
||||||
doLookupUser(username, ActionListener.wrap(user -> {
|
doLookupUser(username, ActionListener.wrap(user -> {
|
||||||
if (user != null) {
|
if (user != null) {
|
||||||
UserWithHash userWithHash = new UserWithHash(user, null, null);
|
// user found
|
||||||
created.onResponse(new Tuple<>(null, userWithHash));
|
final UserWithHash userWithHash = new UserWithHash(user, null, null);
|
||||||
|
// notify forestalled request listeners
|
||||||
|
listenableCacheEntry.onResponse(userWithHash);
|
||||||
} else {
|
} else {
|
||||||
created.onResponse(new Tuple<>(null, null));
|
// user not found, invalidate cache so that subsequent requests are forwarded to
|
||||||
|
// the user directory
|
||||||
|
cache.invalidate(username, listenableCacheEntry);
|
||||||
|
// notify forestalled request listeners
|
||||||
|
listenableCacheEntry.onResponse(null);
|
||||||
}
|
}
|
||||||
}, created::onFailure));
|
}, e -> {
|
||||||
return created;
|
// the next request should be forwarded, not halted by a failed lookup attempt
|
||||||
});
|
cache.invalidate(username, listenableCacheEntry);
|
||||||
|
// notify forestalled listeners
|
||||||
future.addListener(ActionListener.wrap(tuple -> {
|
listenableCacheEntry.onFailure(e);
|
||||||
if (tuple != null) {
|
}));
|
||||||
if (tuple.v2() == null) {
|
|
||||||
cache.invalidate(username, future);
|
|
||||||
listener.onResponse(null);
|
|
||||||
} else {
|
|
||||||
listener.onResponse(tuple.v2().user);
|
|
||||||
}
|
}
|
||||||
|
listenableCacheEntry.addListener(ActionListener.wrap(userWithHash -> {
|
||||||
|
if (userWithHash != null) {
|
||||||
|
listener.onResponse(userWithHash.user);
|
||||||
} else {
|
} else {
|
||||||
listener.onResponse(null);
|
listener.onResponse(null);
|
||||||
}
|
}
|
||||||
}, listener::onFailure), threadPool.executor(ThreadPool.Names.GENERIC));
|
}, listener::onFailure), threadPool.executor(ThreadPool.Names.GENERIC));
|
||||||
} catch (ExecutionException e) {
|
} catch (final ExecutionException e) {
|
||||||
listener.onFailure(e);
|
listener.onFailure(e);
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
doLookupUser(username, listener);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
protected abstract void doLookupUser(String username, ActionListener<User> listener);
|
protected abstract void doLookupUser(String username, ActionListener<User> listener);
|
||||||
|
|
|
@ -418,7 +418,7 @@ class IndicesAndAliasesResolver {
|
||||||
|
|
||||||
private RemoteClusterResolver(Settings settings, ClusterSettings clusterSettings) {
|
private RemoteClusterResolver(Settings settings, ClusterSettings clusterSettings) {
|
||||||
super(settings);
|
super(settings);
|
||||||
clusters = new CopyOnWriteArraySet<>(buildRemoteClustersSeeds(settings).keySet());
|
clusters = new CopyOnWriteArraySet<>(buildRemoteClustersDynamicConfig(settings).keySet());
|
||||||
listenForUpdates(clusterSettings);
|
listenForUpdates(clusterSettings);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -428,7 +428,7 @@ class IndicesAndAliasesResolver {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void updateRemoteCluster(String clusterAlias, List<String> addresses) {
|
protected void updateRemoteCluster(String clusterAlias, List<String> addresses, String proxyAddress) {
|
||||||
if (addresses.isEmpty()) {
|
if (addresses.isEmpty()) {
|
||||||
clusters.remove(clusterAlias);
|
clusters.remove(clusterAlias);
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -12,6 +12,7 @@ import org.elasticsearch.common.settings.SecureString;
|
||||||
import org.elasticsearch.common.util.set.Sets;
|
import org.elasticsearch.common.util.set.Sets;
|
||||||
import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken;
|
import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken;
|
||||||
import org.elasticsearch.xpack.core.security.client.SecurityClient;
|
import org.elasticsearch.xpack.core.security.client.SecurityClient;
|
||||||
|
import org.elasticsearch.xpack.core.security.user.APMSystemUser;
|
||||||
import org.elasticsearch.xpack.core.security.user.BeatsSystemUser;
|
import org.elasticsearch.xpack.core.security.user.BeatsSystemUser;
|
||||||
import org.elasticsearch.xpack.core.security.user.ElasticUser;
|
import org.elasticsearch.xpack.core.security.user.ElasticUser;
|
||||||
import org.elasticsearch.xpack.core.security.user.KibanaUser;
|
import org.elasticsearch.xpack.core.security.user.KibanaUser;
|
||||||
|
@ -88,7 +89,7 @@ public abstract class NativeRealmIntegTestCase extends SecurityIntegTestCase {
|
||||||
RequestOptions.Builder optionsBuilder = RequestOptions.DEFAULT.toBuilder();
|
RequestOptions.Builder optionsBuilder = RequestOptions.DEFAULT.toBuilder();
|
||||||
optionsBuilder.addHeader("Authorization", UsernamePasswordToken.basicAuthHeaderValue(ElasticUser.NAME, reservedPassword));
|
optionsBuilder.addHeader("Authorization", UsernamePasswordToken.basicAuthHeaderValue(ElasticUser.NAME, reservedPassword));
|
||||||
RequestOptions options = optionsBuilder.build();
|
RequestOptions options = optionsBuilder.build();
|
||||||
for (String username : Arrays.asList(KibanaUser.NAME, LogstashSystemUser.NAME, BeatsSystemUser.NAME)) {
|
for (String username : Arrays.asList(KibanaUser.NAME, LogstashSystemUser.NAME, BeatsSystemUser.NAME, APMSystemUser.NAME)) {
|
||||||
Request request = new Request("PUT", "/_xpack/security/user/" + username + "/_password");
|
Request request = new Request("PUT", "/_xpack/security/user/" + username + "/_password");
|
||||||
request.setJsonEntity("{\"password\": \"" + new String(reservedPassword.getChars()) + "\"}");
|
request.setJsonEntity("{\"password\": \"" + new String(reservedPassword.getChars()) + "\"}");
|
||||||
request.setOptions(options);
|
request.setOptions(options);
|
||||||
|
|
|
@ -26,6 +26,7 @@ import org.elasticsearch.test.ESTestCase;
|
||||||
import org.elasticsearch.threadpool.ThreadPool;
|
import org.elasticsearch.threadpool.ThreadPool;
|
||||||
import org.elasticsearch.xpack.core.security.authc.AuthenticationResult;
|
import org.elasticsearch.xpack.core.security.authc.AuthenticationResult;
|
||||||
import org.elasticsearch.xpack.core.security.authc.support.Hasher;
|
import org.elasticsearch.xpack.core.security.authc.support.Hasher;
|
||||||
|
import org.elasticsearch.xpack.core.security.user.APMSystemUser;
|
||||||
import org.elasticsearch.xpack.core.security.user.BeatsSystemUser;
|
import org.elasticsearch.xpack.core.security.user.BeatsSystemUser;
|
||||||
import org.elasticsearch.xpack.core.security.user.ElasticUser;
|
import org.elasticsearch.xpack.core.security.user.ElasticUser;
|
||||||
import org.elasticsearch.xpack.core.security.user.KibanaUser;
|
import org.elasticsearch.xpack.core.security.user.KibanaUser;
|
||||||
|
@ -81,7 +82,8 @@ public class NativeUsersStoreTests extends ESTestCase {
|
||||||
public void testPasswordUpsertWhenSetEnabledOnReservedUser() throws Exception {
|
public void testPasswordUpsertWhenSetEnabledOnReservedUser() throws Exception {
|
||||||
final NativeUsersStore nativeUsersStore = startNativeUsersStore();
|
final NativeUsersStore nativeUsersStore = startNativeUsersStore();
|
||||||
|
|
||||||
final String user = randomFrom(ElasticUser.NAME, KibanaUser.NAME, LogstashSystemUser.NAME, BeatsSystemUser.NAME);
|
final String user = randomFrom(ElasticUser.NAME, KibanaUser.NAME, LogstashSystemUser.NAME,
|
||||||
|
BeatsSystemUser.NAME, APMSystemUser.NAME);
|
||||||
|
|
||||||
final PlainActionFuture<Void> future = new PlainActionFuture<>();
|
final PlainActionFuture<Void> future = new PlainActionFuture<>();
|
||||||
nativeUsersStore.setEnabled(user, true, WriteRequest.RefreshPolicy.IMMEDIATE, future);
|
nativeUsersStore.setEnabled(user, true, WriteRequest.RefreshPolicy.IMMEDIATE, future);
|
||||||
|
@ -99,7 +101,8 @@ public class NativeUsersStoreTests extends ESTestCase {
|
||||||
public void testBlankPasswordInIndexImpliesDefaultPassword() throws Exception {
|
public void testBlankPasswordInIndexImpliesDefaultPassword() throws Exception {
|
||||||
final NativeUsersStore nativeUsersStore = startNativeUsersStore();
|
final NativeUsersStore nativeUsersStore = startNativeUsersStore();
|
||||||
|
|
||||||
final String user = randomFrom(ElasticUser.NAME, KibanaUser.NAME, LogstashSystemUser.NAME, BeatsSystemUser.NAME);
|
final String user = randomFrom(ElasticUser.NAME, KibanaUser.NAME, LogstashSystemUser.NAME,
|
||||||
|
BeatsSystemUser.NAME, APMSystemUser.NAME);
|
||||||
final Map<String, Object> values = new HashMap<>();
|
final Map<String, Object> values = new HashMap<>();
|
||||||
values.put(ENABLED_FIELD, Boolean.TRUE);
|
values.put(ENABLED_FIELD, Boolean.TRUE);
|
||||||
values.put(PASSWORD_FIELD, BLANK_PASSWORD);
|
values.put(PASSWORD_FIELD, BLANK_PASSWORD);
|
||||||
|
|
|
@ -13,6 +13,7 @@ import org.elasticsearch.test.NativeRealmIntegTestCase;
|
||||||
import org.elasticsearch.xpack.core.security.action.user.ChangePasswordResponse;
|
import org.elasticsearch.xpack.core.security.action.user.ChangePasswordResponse;
|
||||||
import org.elasticsearch.xpack.core.security.authc.support.Hasher;
|
import org.elasticsearch.xpack.core.security.authc.support.Hasher;
|
||||||
import org.elasticsearch.xpack.core.security.client.SecurityClient;
|
import org.elasticsearch.xpack.core.security.client.SecurityClient;
|
||||||
|
import org.elasticsearch.xpack.core.security.user.APMSystemUser;
|
||||||
import org.elasticsearch.xpack.core.security.user.BeatsSystemUser;
|
import org.elasticsearch.xpack.core.security.user.BeatsSystemUser;
|
||||||
import org.elasticsearch.xpack.core.security.user.ElasticUser;
|
import org.elasticsearch.xpack.core.security.user.ElasticUser;
|
||||||
import org.elasticsearch.xpack.core.security.user.KibanaUser;
|
import org.elasticsearch.xpack.core.security.user.KibanaUser;
|
||||||
|
@ -20,6 +21,7 @@ import org.elasticsearch.xpack.core.security.user.LogstashSystemUser;
|
||||||
import org.junit.BeforeClass;
|
import org.junit.BeforeClass;
|
||||||
|
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
import static java.util.Collections.singletonMap;
|
import static java.util.Collections.singletonMap;
|
||||||
import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue;
|
import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue;
|
||||||
|
@ -49,7 +51,9 @@ public class ReservedRealmIntegTests extends NativeRealmIntegTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testAuthenticate() {
|
public void testAuthenticate() {
|
||||||
for (String username : Arrays.asList(ElasticUser.NAME, KibanaUser.NAME, LogstashSystemUser.NAME, BeatsSystemUser.NAME)) {
|
final List<String> usernames = Arrays.asList(ElasticUser.NAME, KibanaUser.NAME, LogstashSystemUser.NAME,
|
||||||
|
BeatsSystemUser.NAME, APMSystemUser.NAME);
|
||||||
|
for (String username : usernames) {
|
||||||
ClusterHealthResponse response = client()
|
ClusterHealthResponse response = client()
|
||||||
.filterWithHeader(singletonMap("Authorization", basicAuthHeaderValue(username, getReservedPassword())))
|
.filterWithHeader(singletonMap("Authorization", basicAuthHeaderValue(username, getReservedPassword())))
|
||||||
.admin()
|
.admin()
|
||||||
|
@ -67,7 +71,9 @@ public class ReservedRealmIntegTests extends NativeRealmIntegTestCase {
|
||||||
*/
|
*/
|
||||||
public void testAuthenticateAfterEnablingUser() {
|
public void testAuthenticateAfterEnablingUser() {
|
||||||
final SecurityClient c = securityClient();
|
final SecurityClient c = securityClient();
|
||||||
for (String username : Arrays.asList(ElasticUser.NAME, KibanaUser.NAME, LogstashSystemUser.NAME, BeatsSystemUser.NAME)) {
|
final List<String> usernames = Arrays.asList(ElasticUser.NAME, KibanaUser.NAME, LogstashSystemUser.NAME,
|
||||||
|
BeatsSystemUser.NAME, APMSystemUser.NAME);
|
||||||
|
for (String username : usernames) {
|
||||||
c.prepareSetEnabled(username, true).get();
|
c.prepareSetEnabled(username, true).get();
|
||||||
ClusterHealthResponse response = client()
|
ClusterHealthResponse response = client()
|
||||||
.filterWithHeader(singletonMap("Authorization", basicAuthHeaderValue(username, getReservedPassword())))
|
.filterWithHeader(singletonMap("Authorization", basicAuthHeaderValue(username, getReservedPassword())))
|
||||||
|
@ -81,7 +87,8 @@ public class ReservedRealmIntegTests extends NativeRealmIntegTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testChangingPassword() {
|
public void testChangingPassword() {
|
||||||
String username = randomFrom(ElasticUser.NAME, KibanaUser.NAME, LogstashSystemUser.NAME, BeatsSystemUser.NAME);
|
String username = randomFrom(ElasticUser.NAME, KibanaUser.NAME, LogstashSystemUser.NAME,
|
||||||
|
BeatsSystemUser.NAME, APMSystemUser.NAME);
|
||||||
final char[] newPassword = "supersecretvalue".toCharArray();
|
final char[] newPassword = "supersecretvalue".toCharArray();
|
||||||
|
|
||||||
if (randomBoolean()) {
|
if (randomBoolean()) {
|
||||||
|
|
|
@ -21,6 +21,7 @@ import org.elasticsearch.xpack.core.security.authc.AuthenticationResult;
|
||||||
import org.elasticsearch.xpack.core.security.authc.esnative.ClientReservedRealm;
|
import org.elasticsearch.xpack.core.security.authc.esnative.ClientReservedRealm;
|
||||||
import org.elasticsearch.xpack.core.security.authc.support.Hasher;
|
import org.elasticsearch.xpack.core.security.authc.support.Hasher;
|
||||||
import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken;
|
import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken;
|
||||||
|
import org.elasticsearch.xpack.core.security.user.APMSystemUser;
|
||||||
import org.elasticsearch.xpack.core.security.user.AnonymousUser;
|
import org.elasticsearch.xpack.core.security.user.AnonymousUser;
|
||||||
import org.elasticsearch.xpack.core.security.user.BeatsSystemUser;
|
import org.elasticsearch.xpack.core.security.user.BeatsSystemUser;
|
||||||
import org.elasticsearch.xpack.core.security.user.ElasticUser;
|
import org.elasticsearch.xpack.core.security.user.ElasticUser;
|
||||||
|
@ -262,7 +263,8 @@ public class ReservedRealmTests extends ESTestCase {
|
||||||
PlainActionFuture<Collection<User>> userFuture = new PlainActionFuture<>();
|
PlainActionFuture<Collection<User>> userFuture = new PlainActionFuture<>();
|
||||||
reservedRealm.users(userFuture);
|
reservedRealm.users(userFuture);
|
||||||
assertThat(userFuture.actionGet(),
|
assertThat(userFuture.actionGet(),
|
||||||
containsInAnyOrder(new ElasticUser(true), new KibanaUser(true), new LogstashSystemUser(true), new BeatsSystemUser(true)));
|
containsInAnyOrder(new ElasticUser(true), new KibanaUser(true), new LogstashSystemUser(true),
|
||||||
|
new BeatsSystemUser(true), new APMSystemUser((true))));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testGetUsersDisabled() {
|
public void testGetUsersDisabled() {
|
||||||
|
@ -394,7 +396,7 @@ public class ReservedRealmTests extends ESTestCase {
|
||||||
new AnonymousUser(Settings.EMPTY), securityIndex, threadPool);
|
new AnonymousUser(Settings.EMPTY), securityIndex, threadPool);
|
||||||
PlainActionFuture<AuthenticationResult> listener = new PlainActionFuture<>();
|
PlainActionFuture<AuthenticationResult> listener = new PlainActionFuture<>();
|
||||||
|
|
||||||
final String principal = randomFrom(KibanaUser.NAME, LogstashSystemUser.NAME, BeatsSystemUser.NAME);
|
final String principal = randomFrom(KibanaUser.NAME, LogstashSystemUser.NAME, BeatsSystemUser.NAME, APMSystemUser.NAME);
|
||||||
doAnswer((i) -> {
|
doAnswer((i) -> {
|
||||||
ActionListener callback = (ActionListener) i.getArguments()[1];
|
ActionListener callback = (ActionListener) i.getArguments()[1];
|
||||||
callback.onResponse(null);
|
callback.onResponse(null);
|
||||||
|
@ -416,14 +418,15 @@ public class ReservedRealmTests extends ESTestCase {
|
||||||
new AnonymousUser(Settings.EMPTY), securityIndex, threadPool);
|
new AnonymousUser(Settings.EMPTY), securityIndex, threadPool);
|
||||||
PlainActionFuture<AuthenticationResult> listener = new PlainActionFuture<>();
|
PlainActionFuture<AuthenticationResult> listener = new PlainActionFuture<>();
|
||||||
|
|
||||||
final String principal = randomFrom(KibanaUser.NAME, LogstashSystemUser.NAME, BeatsSystemUser.NAME);
|
final String principal = randomFrom(KibanaUser.NAME, LogstashSystemUser.NAME, BeatsSystemUser.NAME, APMSystemUser.NAME);
|
||||||
reservedRealm.doAuthenticate(new UsernamePasswordToken(principal, mockSecureSettings.getString("bootstrap.password")), listener);
|
reservedRealm.doAuthenticate(new UsernamePasswordToken(principal, mockSecureSettings.getString("bootstrap.password")), listener);
|
||||||
final AuthenticationResult result = listener.get();
|
final AuthenticationResult result = listener.get();
|
||||||
assertThat(result.getStatus(), is(AuthenticationResult.Status.TERMINATE));
|
assertThat(result.getStatus(), is(AuthenticationResult.Status.TERMINATE));
|
||||||
}
|
}
|
||||||
|
|
||||||
private User randomReservedUser(boolean enabled) {
|
private User randomReservedUser(boolean enabled) {
|
||||||
return randomFrom(new ElasticUser(enabled), new KibanaUser(enabled), new LogstashSystemUser(enabled), new BeatsSystemUser(enabled));
|
return randomFrom(new ElasticUser(enabled), new KibanaUser(enabled), new LogstashSystemUser(enabled),
|
||||||
|
new BeatsSystemUser(enabled), new APMSystemUser(enabled));
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
@ -452,6 +455,10 @@ public class ReservedRealmTests extends ESTestCase {
|
||||||
assertThat(versionPredicate.test(Version.V_6_2_3), is(false));
|
assertThat(versionPredicate.test(Version.V_6_2_3), is(false));
|
||||||
assertThat(versionPredicate.test(Version.V_6_3_0), is(true));
|
assertThat(versionPredicate.test(Version.V_6_3_0), is(true));
|
||||||
break;
|
break;
|
||||||
|
case APMSystemUser.NAME:
|
||||||
|
assertThat(versionPredicate.test(Version.V_6_4_0), is(false));
|
||||||
|
assertThat(versionPredicate.test(Version.V_6_5_0), is(true));
|
||||||
|
break;
|
||||||
default:
|
default:
|
||||||
assertThat(versionPredicate.test(Version.V_6_3_0), is(true));
|
assertThat(versionPredicate.test(Version.V_6_3_0), is(true));
|
||||||
break;
|
break;
|
||||||
|
|
|
@ -25,7 +25,8 @@ public class GroupByColumnKey extends GroupByKey {
|
||||||
public TermsValuesSourceBuilder asValueSource() {
|
public TermsValuesSourceBuilder asValueSource() {
|
||||||
return new TermsValuesSourceBuilder(id())
|
return new TermsValuesSourceBuilder(id())
|
||||||
.field(fieldName())
|
.field(fieldName())
|
||||||
.order(direction().asOrder());
|
.order(direction().asOrder())
|
||||||
|
.missingBucket(true);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -44,7 +44,8 @@ public class GroupByDateKey extends GroupByKey {
|
||||||
return new DateHistogramValuesSourceBuilder(id())
|
return new DateHistogramValuesSourceBuilder(id())
|
||||||
.field(fieldName())
|
.field(fieldName())
|
||||||
.dateHistogramInterval(new DateHistogramInterval(interval))
|
.dateHistogramInterval(new DateHistogramInterval(interval))
|
||||||
.timeZone(DateTimeZone.forTimeZone(timeZone));
|
.timeZone(DateTimeZone.forTimeZone(timeZone))
|
||||||
|
.missingBucket(true);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -36,7 +36,8 @@ public class GroupByScriptKey extends GroupByKey {
|
||||||
public TermsValuesSourceBuilder asValueSource() {
|
public TermsValuesSourceBuilder asValueSource() {
|
||||||
TermsValuesSourceBuilder builder = new TermsValuesSourceBuilder(id())
|
TermsValuesSourceBuilder builder = new TermsValuesSourceBuilder(id())
|
||||||
.script(script.toPainless())
|
.script(script.toPainless())
|
||||||
.order(direction().asOrder());
|
.order(direction().asOrder())
|
||||||
|
.missingBucket(true);
|
||||||
|
|
||||||
if (script.outputType().isNumeric()) {
|
if (script.outputType().isNumeric()) {
|
||||||
builder.valueType(ValueType.NUMBER);
|
builder.valueType(ValueType.NUMBER);
|
||||||
|
|
|
@ -24,8 +24,6 @@ import org.apache.http.client.utils.URLEncodedUtils;
|
||||||
import org.apache.http.cookie.Cookie;
|
import org.apache.http.cookie.Cookie;
|
||||||
import org.apache.http.cookie.CookieOrigin;
|
import org.apache.http.cookie.CookieOrigin;
|
||||||
import org.apache.http.cookie.MalformedCookieException;
|
import org.apache.http.cookie.MalformedCookieException;
|
||||||
import org.apache.http.entity.ContentType;
|
|
||||||
import org.apache.http.entity.StringEntity;
|
|
||||||
import org.apache.http.impl.client.CloseableHttpClient;
|
import org.apache.http.impl.client.CloseableHttpClient;
|
||||||
import org.apache.http.impl.client.HttpClients;
|
import org.apache.http.impl.client.HttpClients;
|
||||||
import org.apache.http.impl.cookie.DefaultCookieSpec;
|
import org.apache.http.impl.cookie.DefaultCookieSpec;
|
||||||
|
@ -39,6 +37,8 @@ import org.apache.http.util.CharArrayBuffer;
|
||||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||||
import org.elasticsearch.ElasticsearchException;
|
import org.elasticsearch.ElasticsearchException;
|
||||||
import org.elasticsearch.cli.SuppressForbidden;
|
import org.elasticsearch.cli.SuppressForbidden;
|
||||||
|
import org.elasticsearch.client.Request;
|
||||||
|
import org.elasticsearch.client.RequestOptions;
|
||||||
import org.elasticsearch.client.Response;
|
import org.elasticsearch.client.Response;
|
||||||
import org.elasticsearch.common.CheckedFunction;
|
import org.elasticsearch.common.CheckedFunction;
|
||||||
import org.elasticsearch.common.Strings;
|
import org.elasticsearch.common.Strings;
|
||||||
|
@ -85,7 +85,6 @@ import java.util.concurrent.ExecutorService;
|
||||||
import java.util.regex.Matcher;
|
import java.util.regex.Matcher;
|
||||||
import java.util.regex.Pattern;
|
import java.util.regex.Pattern;
|
||||||
|
|
||||||
import static java.util.Collections.emptyMap;
|
|
||||||
import static org.elasticsearch.common.xcontent.XContentHelper.convertToMap;
|
import static org.elasticsearch.common.xcontent.XContentHelper.convertToMap;
|
||||||
import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue;
|
import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue;
|
||||||
import static org.hamcrest.Matchers.contains;
|
import static org.hamcrest.Matchers.contains;
|
||||||
|
@ -176,9 +175,9 @@ public class SamlAuthenticationIT extends ESRestTestCase {
|
||||||
*/
|
*/
|
||||||
@Before
|
@Before
|
||||||
public void setKibanaPassword() throws IOException {
|
public void setKibanaPassword() throws IOException {
|
||||||
final HttpEntity json = new StringEntity("{ \"password\" : \"" + KIBANA_PASSWORD + "\" }", ContentType.APPLICATION_JSON);
|
Request request = new Request("PUT", "/_xpack/security/user/kibana/_password");
|
||||||
final Response response = adminClient().performRequest("PUT", "/_xpack/security/user/kibana/_password", emptyMap(), json);
|
request.setJsonEntity("{ \"password\" : \"" + KIBANA_PASSWORD + "\" }");
|
||||||
assertOK(response);
|
adminClient().performRequest(request);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -188,8 +187,8 @@ public class SamlAuthenticationIT extends ESRestTestCase {
|
||||||
*/
|
*/
|
||||||
@Before
|
@Before
|
||||||
public void setupRoleMapping() throws IOException {
|
public void setupRoleMapping() throws IOException {
|
||||||
final StringEntity json = new StringEntity(Strings // top-level
|
Request request = new Request("PUT", "/_xpack/security/role_mapping/thor-kibana");
|
||||||
.toString(XContentBuilder.builder(XContentType.JSON.xContent())
|
request.setJsonEntity(Strings.toString(XContentBuilder.builder(XContentType.JSON.xContent())
|
||||||
.startObject()
|
.startObject()
|
||||||
.array("roles", new String[] { "kibana_user"} )
|
.array("roles", new String[] { "kibana_user"} )
|
||||||
.field("enabled", true)
|
.field("enabled", true)
|
||||||
|
@ -199,10 +198,8 @@ public class SamlAuthenticationIT extends ESRestTestCase {
|
||||||
.startObject().startObject("field").field("realm.name", "shibboleth").endObject().endObject()
|
.startObject().startObject("field").field("realm.name", "shibboleth").endObject().endObject()
|
||||||
.endArray() // "all"
|
.endArray() // "all"
|
||||||
.endObject() // "rules"
|
.endObject() // "rules"
|
||||||
.endObject()), ContentType.APPLICATION_JSON);
|
.endObject()));
|
||||||
|
adminClient().performRequest(request);
|
||||||
final Response response = adminClient().performRequest("PUT", "/_xpack/security/role_mapping/thor-kibana", emptyMap(), json);
|
|
||||||
assertOK(response);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -251,10 +248,11 @@ public class SamlAuthenticationIT extends ESRestTestCase {
|
||||||
* is for the expected user with the expected name and roles.
|
* is for the expected user with the expected name and roles.
|
||||||
*/
|
*/
|
||||||
private void verifyElasticsearchAccessToken(String accessToken) throws IOException {
|
private void verifyElasticsearchAccessToken(String accessToken) throws IOException {
|
||||||
final BasicHeader authorization = new BasicHeader("Authorization", "Bearer " + accessToken);
|
Request request = new Request("GET", "/_xpack/security/_authenticate");
|
||||||
final Response response = client().performRequest("GET", "/_xpack/security/_authenticate", authorization);
|
RequestOptions.Builder options = request.getOptions().toBuilder();
|
||||||
assertOK(response);
|
options.addHeader("Authorization", "Bearer " + accessToken);
|
||||||
final Map<String, Object> map = parseResponseAsMap(response.getEntity());
|
request.setOptions(options);
|
||||||
|
final Map<String, Object> map = entityAsMap(client().performRequest(request));
|
||||||
assertThat(map.get("username"), equalTo("thor"));
|
assertThat(map.get("username"), equalTo("thor"));
|
||||||
assertThat(map.get("full_name"), equalTo("Thor Odinson"));
|
assertThat(map.get("full_name"), equalTo("Thor Odinson"));
|
||||||
assertSingletonList(map.get("roles"), "kibana_user");
|
assertSingletonList(map.get("roles"), "kibana_user");
|
||||||
|
@ -272,12 +270,11 @@ public class SamlAuthenticationIT extends ESRestTestCase {
|
||||||
* can be used to get a new valid access token and refresh token.
|
* can be used to get a new valid access token and refresh token.
|
||||||
*/
|
*/
|
||||||
private void verifyElasticsearchRefreshToken(String refreshToken) throws IOException {
|
private void verifyElasticsearchRefreshToken(String refreshToken) throws IOException {
|
||||||
final String body = "{ \"grant_type\":\"refresh_token\", \"refresh_token\":\"" + refreshToken + "\" }";
|
Request request = new Request("POST", "/_xpack/security/oauth2/token");
|
||||||
final Response response = client().performRequest("POST", "/_xpack/security/oauth2/token",
|
request.setJsonEntity("{ \"grant_type\":\"refresh_token\", \"refresh_token\":\"" + refreshToken + "\" }");
|
||||||
emptyMap(), new StringEntity(body, ContentType.APPLICATION_JSON), kibanaAuth());
|
kibanaAuth(request);
|
||||||
assertOK(response);
|
|
||||||
|
|
||||||
final Map<String, Object> result = parseResponseAsMap(response.getEntity());
|
final Map<String, Object> result = entityAsMap(client().performRequest(request));
|
||||||
final Object newRefreshToken = result.get("refresh_token");
|
final Object newRefreshToken = result.get("refresh_token");
|
||||||
assertThat(newRefreshToken, notNullValue());
|
assertThat(newRefreshToken, notNullValue());
|
||||||
assertThat(newRefreshToken, instanceOf(String.class));
|
assertThat(newRefreshToken, instanceOf(String.class));
|
||||||
|
@ -463,10 +460,10 @@ public class SamlAuthenticationIT extends ESRestTestCase {
|
||||||
* sends a redirect to that page.
|
* sends a redirect to that page.
|
||||||
*/
|
*/
|
||||||
private void httpLogin(HttpExchange http) throws IOException {
|
private void httpLogin(HttpExchange http) throws IOException {
|
||||||
final Response prepare = client().performRequest("POST", "/_xpack/security/saml/prepare",
|
Request request = new Request("POST", "/_xpack/security/saml/prepare");
|
||||||
emptyMap(), new StringEntity("{}", ContentType.APPLICATION_JSON), kibanaAuth());
|
request.setJsonEntity("{}");
|
||||||
assertOK(prepare);
|
kibanaAuth(request);
|
||||||
final Map<String, Object> body = parseResponseAsMap(prepare.getEntity());
|
final Map<String, Object> body = entityAsMap(client().performRequest(request));
|
||||||
logger.info("Created SAML authentication request {}", body);
|
logger.info("Created SAML authentication request {}", body);
|
||||||
http.getResponseHeaders().add("Set-Cookie", REQUEST_ID_COOKIE + "=" + body.get("id"));
|
http.getResponseHeaders().add("Set-Cookie", REQUEST_ID_COOKIE + "=" + body.get("id"));
|
||||||
http.getResponseHeaders().add("Location", (String) body.get("redirect"));
|
http.getResponseHeaders().add("Location", (String) body.get("redirect"));
|
||||||
|
@ -504,9 +501,10 @@ public class SamlAuthenticationIT extends ESRestTestCase {
|
||||||
final String id = getCookie(REQUEST_ID_COOKIE, http);
|
final String id = getCookie(REQUEST_ID_COOKIE, http);
|
||||||
assertThat(id, notNullValue());
|
assertThat(id, notNullValue());
|
||||||
|
|
||||||
final String body = "{ \"content\" : \"" + saml + "\", \"ids\": [\"" + id + "\"] }";
|
Request request = new Request("POST", "/_xpack/security/saml/authenticate");
|
||||||
return client().performRequest("POST", "/_xpack/security/saml/authenticate",
|
request.setJsonEntity("{ \"content\" : \"" + saml + "\", \"ids\": [\"" + id + "\"] }");
|
||||||
emptyMap(), new StringEntity(body, ContentType.APPLICATION_JSON), kibanaAuth());
|
kibanaAuth(request);
|
||||||
|
return client().performRequest(request);
|
||||||
}
|
}
|
||||||
|
|
||||||
private List<NameValuePair> parseRequestForm(HttpExchange http) throws IOException {
|
private List<NameValuePair> parseRequestForm(HttpExchange http) throws IOException {
|
||||||
|
@ -542,9 +540,11 @@ public class SamlAuthenticationIT extends ESRestTestCase {
|
||||||
assertThat(((List<?>) value), contains(expectedElement));
|
assertThat(((List<?>) value), contains(expectedElement));
|
||||||
}
|
}
|
||||||
|
|
||||||
private static BasicHeader kibanaAuth() {
|
private static void kibanaAuth(Request request) {
|
||||||
final String auth = UsernamePasswordToken.basicAuthHeaderValue("kibana", new SecureString(KIBANA_PASSWORD.toCharArray()));
|
RequestOptions.Builder options = request.getOptions().toBuilder();
|
||||||
return new BasicHeader(UsernamePasswordToken.BASIC_AUTH_HEADER, auth);
|
options.addHeader("Authorization",
|
||||||
|
UsernamePasswordToken.basicAuthHeaderValue("kibana", new SecureString(KIBANA_PASSWORD.toCharArray())));
|
||||||
|
request.setOptions(options);
|
||||||
}
|
}
|
||||||
|
|
||||||
private CloseableHttpClient getHttpClient() throws Exception {
|
private CloseableHttpClient getHttpClient() throws Exception {
|
||||||
|
|
|
@ -5,8 +5,9 @@
|
||||||
*/
|
*/
|
||||||
package org.elasticsearch.xpack.security.authc.esnative.tool;
|
package org.elasticsearch.xpack.security.authc.esnative.tool;
|
||||||
|
|
||||||
import org.apache.http.message.BasicHeader;
|
|
||||||
import org.elasticsearch.cli.MockTerminal;
|
import org.elasticsearch.cli.MockTerminal;
|
||||||
|
import org.elasticsearch.client.Request;
|
||||||
|
import org.elasticsearch.client.RequestOptions;
|
||||||
import org.elasticsearch.client.Response;
|
import org.elasticsearch.client.Response;
|
||||||
import org.elasticsearch.common.Strings;
|
import org.elasticsearch.common.Strings;
|
||||||
import org.elasticsearch.common.SuppressForbidden;
|
import org.elasticsearch.common.SuppressForbidden;
|
||||||
|
@ -52,7 +53,7 @@ public class SetupPasswordToolIT extends ESRestTestCase {
|
||||||
final Path configPath = PathUtils.get(testConfigDir);
|
final Path configPath = PathUtils.get(testConfigDir);
|
||||||
setSystemPropsForTool(configPath);
|
setSystemPropsForTool(configPath);
|
||||||
|
|
||||||
Response nodesResponse = client().performRequest("GET", "/_nodes/http");
|
Response nodesResponse = client().performRequest(new Request("GET", "/_nodes/http"));
|
||||||
Map<String, Object> nodesMap = entityAsMap(nodesResponse);
|
Map<String, Object> nodesMap = entityAsMap(nodesResponse);
|
||||||
|
|
||||||
Map<String,Object> nodes = (Map<String,Object>) nodesMap.get("nodes");
|
Map<String,Object> nodes = (Map<String,Object>) nodesMap.get("nodes");
|
||||||
|
@ -97,15 +98,16 @@ public class SetupPasswordToolIT extends ESRestTestCase {
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
assertEquals(4, userPasswordMap.size());
|
assertEquals(5, userPasswordMap.size());
|
||||||
userPasswordMap.entrySet().forEach(entry -> {
|
userPasswordMap.entrySet().forEach(entry -> {
|
||||||
final String basicHeader = "Basic " +
|
final String basicHeader = "Basic " +
|
||||||
Base64.getEncoder().encodeToString((entry.getKey() + ":" + entry.getValue()).getBytes(StandardCharsets.UTF_8));
|
Base64.getEncoder().encodeToString((entry.getKey() + ":" + entry.getValue()).getBytes(StandardCharsets.UTF_8));
|
||||||
try {
|
try {
|
||||||
Response authenticateResponse = client().performRequest("GET", "/_xpack/security/_authenticate",
|
Request request = new Request("GET", "/_xpack/security/_authenticate");
|
||||||
new BasicHeader("Authorization", basicHeader));
|
RequestOptions.Builder options = request.getOptions().toBuilder();
|
||||||
assertEquals(200, authenticateResponse.getStatusLine().getStatusCode());
|
options.addHeader("Authorization", basicHeader);
|
||||||
Map<String, Object> userInfoMap = entityAsMap(authenticateResponse);
|
request.setOptions(options);
|
||||||
|
Map<String, Object> userInfoMap = entityAsMap(client().performRequest(request));
|
||||||
assertEquals(entry.getKey(), userInfoMap.get("username"));
|
assertEquals(entry.getKey(), userInfoMap.get("username"));
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
throw new UncheckedIOException(e);
|
throw new UncheckedIOException(e);
|
||||||
|
|
|
@ -42,14 +42,15 @@ public class DataLoader {
|
||||||
}
|
}
|
||||||
|
|
||||||
protected static void loadEmpDatasetIntoEs(RestClient client) throws Exception {
|
protected static void loadEmpDatasetIntoEs(RestClient client) throws Exception {
|
||||||
loadEmpDatasetIntoEs(client, "test_emp");
|
loadEmpDatasetIntoEs(client, "test_emp", "employees");
|
||||||
loadEmpDatasetIntoEs(client, "test_emp_copy");
|
loadEmpDatasetIntoEs(client, "test_emp_copy", "employees");
|
||||||
|
loadEmpDatasetIntoEs(client, "test_emp_with_nulls", "employees_with_nulls");
|
||||||
makeAlias(client, "test_alias", "test_emp", "test_emp_copy");
|
makeAlias(client, "test_alias", "test_emp", "test_emp_copy");
|
||||||
makeAlias(client, "test_alias_emp", "test_emp", "test_emp_copy");
|
makeAlias(client, "test_alias_emp", "test_emp", "test_emp_copy");
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void loadDocsDatasetIntoEs(RestClient client) throws Exception {
|
public static void loadDocsDatasetIntoEs(RestClient client) throws Exception {
|
||||||
loadEmpDatasetIntoEs(client, "emp");
|
loadEmpDatasetIntoEs(client, "emp", "employees");
|
||||||
loadLibDatasetIntoEs(client, "library");
|
loadLibDatasetIntoEs(client, "library");
|
||||||
makeAlias(client, "employees", "emp");
|
makeAlias(client, "employees", "emp");
|
||||||
}
|
}
|
||||||
|
@ -62,7 +63,7 @@ public class DataLoader {
|
||||||
.endObject();
|
.endObject();
|
||||||
}
|
}
|
||||||
|
|
||||||
protected static void loadEmpDatasetIntoEs(RestClient client, String index) throws Exception {
|
protected static void loadEmpDatasetIntoEs(RestClient client, String index, String fileName) throws Exception {
|
||||||
Request request = new Request("PUT", "/" + index);
|
Request request = new Request("PUT", "/" + index);
|
||||||
XContentBuilder createIndex = JsonXContent.contentBuilder().startObject();
|
XContentBuilder createIndex = JsonXContent.contentBuilder().startObject();
|
||||||
createIndex.startObject("settings");
|
createIndex.startObject("settings");
|
||||||
|
@ -129,16 +130,19 @@ public class DataLoader {
|
||||||
request = new Request("POST", "/" + index + "/emp/_bulk");
|
request = new Request("POST", "/" + index + "/emp/_bulk");
|
||||||
request.addParameter("refresh", "true");
|
request.addParameter("refresh", "true");
|
||||||
StringBuilder bulk = new StringBuilder();
|
StringBuilder bulk = new StringBuilder();
|
||||||
csvToLines("employees", (titles, fields) -> {
|
csvToLines(fileName, (titles, fields) -> {
|
||||||
bulk.append("{\"index\":{}}\n");
|
bulk.append("{\"index\":{}}\n");
|
||||||
bulk.append('{');
|
bulk.append('{');
|
||||||
String emp_no = fields.get(1);
|
String emp_no = fields.get(1);
|
||||||
for (int f = 0; f < fields.size(); f++) {
|
for (int f = 0; f < fields.size(); f++) {
|
||||||
|
// an empty value in the csv file is treated as 'null', thus skipping it in the bulk request
|
||||||
|
if (fields.get(f).trim().length() > 0) {
|
||||||
if (f != 0) {
|
if (f != 0) {
|
||||||
bulk.append(',');
|
bulk.append(',');
|
||||||
}
|
}
|
||||||
bulk.append('"').append(titles.get(f)).append("\":\"").append(fields.get(f)).append('"');
|
bulk.append('"').append(titles.get(f)).append("\":\"").append(fields.get(f)).append('"');
|
||||||
}
|
}
|
||||||
|
}
|
||||||
// append department
|
// append department
|
||||||
List<List<String>> list = dep_emp.get(emp_no);
|
List<List<String>> list = dep_emp.get(emp_no);
|
||||||
if (!list.isEmpty()) {
|
if (!list.isEmpty()) {
|
||||||
|
|
|
@ -25,7 +25,10 @@ public abstract class SqlSpecTestCase extends SpecBaseIntegrationTestCase {
|
||||||
private String query;
|
private String query;
|
||||||
|
|
||||||
@ClassRule
|
@ClassRule
|
||||||
public static LocalH2 H2 = new LocalH2((c) -> c.createStatement().execute("RUNSCRIPT FROM 'classpath:/setup_test_emp.sql'"));
|
public static LocalH2 H2 = new LocalH2((c) -> {
|
||||||
|
c.createStatement().execute("RUNSCRIPT FROM 'classpath:/setup_test_emp.sql'");
|
||||||
|
c.createStatement().execute("RUNSCRIPT FROM 'classpath:/setup_test_emp_with_nulls.sql'");
|
||||||
|
});
|
||||||
|
|
||||||
@ParametersFactory(argumentFormatting = PARAM_FORMATTING)
|
@ParametersFactory(argumentFormatting = PARAM_FORMATTING)
|
||||||
public static List<Object[]> readScriptSpec() throws Exception {
|
public static List<Object[]> readScriptSpec() throws Exception {
|
||||||
|
@ -39,6 +42,7 @@ public abstract class SqlSpecTestCase extends SpecBaseIntegrationTestCase {
|
||||||
tests.addAll(readScriptSpec("/arithmetic.sql-spec", parser));
|
tests.addAll(readScriptSpec("/arithmetic.sql-spec", parser));
|
||||||
tests.addAll(readScriptSpec("/string-functions.sql-spec", parser));
|
tests.addAll(readScriptSpec("/string-functions.sql-spec", parser));
|
||||||
tests.addAll(readScriptSpec("/case-functions.sql-spec", parser));
|
tests.addAll(readScriptSpec("/case-functions.sql-spec", parser));
|
||||||
|
tests.addAll(readScriptSpec("/agg_nulls.sql-spec", parser));
|
||||||
return tests;
|
return tests;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,14 @@
|
||||||
|
selectGenderWithNullsAndGroupByGender
|
||||||
|
SELECT gender, COUNT(*) count FROM test_emp_with_nulls GROUP BY gender ORDER BY gender;
|
||||||
|
selectFirstNameWithNullsAndGroupByFirstName
|
||||||
|
SELECT first_name FROM test_emp_with_nulls GROUP BY first_name ORDER BY first_name;
|
||||||
|
selectCountWhereIsNull
|
||||||
|
SELECT COUNT(*) count FROM test_emp_with_nulls WHERE first_name IS NULL;
|
||||||
|
selectLanguagesCountWithNullsAndGroupByLanguage
|
||||||
|
SELECT languages l, COUNT(*) c FROM test_emp_with_nulls GROUP BY languages ORDER BY languages;
|
||||||
|
selectHireDateGroupByHireDate
|
||||||
|
SELECT hire_date HD, COUNT(*) c FROM test_emp_with_nulls GROUP BY hire_date ORDER BY hire_date DESC;
|
||||||
|
selectHireDateGroupByHireDate
|
||||||
|
SELECT hire_date HD, COUNT(*) c FROM test_emp_with_nulls GROUP BY hire_date ORDER BY hire_date DESC;
|
||||||
|
selectSalaryGroupBySalary
|
||||||
|
SELECT salary, COUNT(*) c FROM test_emp_with_nulls GROUP BY salary ORDER BY salary DESC;
|
|
@ -86,6 +86,7 @@ test_alias | ALIAS
|
||||||
test_alias_emp | ALIAS
|
test_alias_emp | ALIAS
|
||||||
test_emp | BASE TABLE
|
test_emp | BASE TABLE
|
||||||
test_emp_copy | BASE TABLE
|
test_emp_copy | BASE TABLE
|
||||||
|
test_emp_with_nulls | BASE TABLE
|
||||||
;
|
;
|
||||||
|
|
||||||
testGroupByOnAlias
|
testGroupByOnAlias
|
||||||
|
@ -98,10 +99,10 @@ F | 10099.28
|
||||||
;
|
;
|
||||||
|
|
||||||
testGroupByOnPattern
|
testGroupByOnPattern
|
||||||
SELECT gender, PERCENTILE(emp_no, 97) p1 FROM test_* GROUP BY gender;
|
SELECT gender, PERCENTILE(emp_no, 97) p1 FROM test_* WHERE gender is NOT NULL GROUP BY gender;
|
||||||
|
|
||||||
gender:s | p1:d
|
gender:s | p1:d
|
||||||
|
|
||||||
F | 10099.28
|
F | 10099.32
|
||||||
M | 10095.75
|
M | 10095.98
|
||||||
;
|
;
|
|
@ -0,0 +1,101 @@
|
||||||
|
birth_date,emp_no,first_name,gender,hire_date,languages,last_name,salary
|
||||||
|
1953-09-02T00:00:00Z,10001,Georgi,,1986-06-26T00:00:00Z,2,Facello,57305
|
||||||
|
1964-06-02T00:00:00Z,10002,Bezalel,,1985-11-21T00:00:00Z,5,Simmel,56371
|
||||||
|
1959-12-03T00:00:00Z,10003,Parto,,1986-08-28T00:00:00Z,4,Bamford,61805
|
||||||
|
1954-05-01T00:00:00Z,10004,Chirstian,,1986-12-01T00:00:00Z,5,Koblick,36174
|
||||||
|
1955-01-21T00:00:00Z,10005,Kyoichi,,1989-09-12T00:00:00Z,1,Maliniak,63528
|
||||||
|
1953-04-20T00:00:00Z,10006,Anneke,,1989-06-02T00:00:00Z,3,Preusig,60335
|
||||||
|
1957-05-23T00:00:00Z,10007,Tzvetan,,1989-02-10T00:00:00Z,4,Zielinski,74572
|
||||||
|
1958-02-19T00:00:00Z,10008,Saniya,,1994-09-15T00:00:00Z,2,Kalloufi,43906
|
||||||
|
1952-04-19T00:00:00Z,10009,Sumant,,1985-02-18T00:00:00Z,1,Peac,66174
|
||||||
|
1963-06-01T00:00:00Z,10010,Duangkaew,,1989-08-24T00:00:00Z,4,Piveteau,45797
|
||||||
|
1953-11-07T00:00:00Z,10011,Mary,F,1990-01-22T00:00:00Z,5,Sluis,31120
|
||||||
|
1960-10-04T00:00:00Z,10012,Patricio,M,1992-12-18T00:00:00Z,5,Bridgland,48942
|
||||||
|
1963-06-07T00:00:00Z,10013,Eberhardt,M,1985-10-20T00:00:00Z,1,Terkki,48735
|
||||||
|
1956-02-12T00:00:00Z,10014,Berni,M,1987-03-11T00:00:00Z,5,Genin,37137
|
||||||
|
1959-08-19T00:00:00Z,10015,Guoxiang,M,1987-07-02T00:00:00Z,5,Nooteboom,25324
|
||||||
|
1961-05-02T00:00:00Z,10016,Kazuhito,M,1995-01-27T00:00:00Z,2,Cappelletti,61358
|
||||||
|
1958-07-06T00:00:00Z,10017,Cristinel,F,1993-08-03T00:00:00Z,2,Bouloucos,58715
|
||||||
|
1954-06-19T00:00:00Z,10018,Kazuhide,F,1993-08-03T00:00:00Z,2,Peha,56760
|
||||||
|
1953-01-23T00:00:00Z,10019,Lillian,M,1993-08-03T00:00:00Z,1,Haddadi,73717
|
||||||
|
1952-12-24T00:00:00Z,10020,,M,1991-01-26T00:00:00Z,3,Warwick,40031
|
||||||
|
1960-02-20T00:00:00Z,10021,,M,1989-12-17T00:00:00Z,5,Erde,60408
|
||||||
|
1952-07-08T00:00:00Z,10022,,M,1995-08-22T00:00:00Z,3,Famili,48233
|
||||||
|
1953-09-29T00:00:00Z,10023,,F,1989-12-17T00:00:00Z,2,Montemayor,47896
|
||||||
|
1958-09-05T00:00:00Z,10024,,F,1997-05-19T00:00:00Z,3,Pettey,64675
|
||||||
|
1958-10-31T00:00:00Z,10025,Prasadram,M,1987-08-17T00:00:00Z,5,Heyers,47411
|
||||||
|
1953-04-03T00:00:00Z,10026,Yongqiao,M,1995-03-20T00:00:00Z,3,Berztiss,28336
|
||||||
|
1962-07-10T00:00:00Z,10027,Divier,F,1989-07-07T00:00:00Z,5,Reistad,73851
|
||||||
|
1963-11-26T00:00:00Z,10028,Domenick,M,1991-10-22T00:00:00Z,1,Tempesti,39356
|
||||||
|
1956-12-13T00:00:00Z,10029,Otmar,M,1985-11-20T00:00:00Z,,Herbst,74999
|
||||||
|
1958-07-14T00:00:00Z,10030,Elvis,M,1994-02-17T00:00:00Z,,Demeyer,67492
|
||||||
|
1959-01-27T00:00:00Z,10031,Karsten,M,1994-02-17T00:00:00Z,,Joslin,37716
|
||||||
|
1960-08-09T00:00:00Z,10032,Jeong,F,1990-06-20T00:00:00Z,,Reistad,62233
|
||||||
|
1956-11-14T00:00:00Z,10033,Arif,M,1987-03-18T00:00:00Z,,Merlo,70011
|
||||||
|
1962-12-29T00:00:00Z,10034,Bader,M,1988-09-05T00:00:00Z,,Swan,39878
|
||||||
|
1953-02-08T00:00:00Z,10035,Alain,M,1988-09-05T00:00:00Z,,Chappelet,25945
|
||||||
|
1959-08-10T00:00:00Z,10036,Adamantios,M,1992-01-03T00:00:00Z,,Portugali,60781
|
||||||
|
1963-07-22T00:00:00Z,10037,Pradeep,M,1990-12-05T00:00:00Z,,Makrucki,37691
|
||||||
|
1960-07-20T00:00:00Z,10038,Huan,M,1989-09-20T00:00:00Z,,Lortz,35222
|
||||||
|
1959-10-01T00:00:00Z,10039,Alejandro,M,1988-01-19T00:00:00Z,,Brender,36051
|
||||||
|
1959-09-13T00:00:00Z,10040,Weiyi,F,1993-02-14T00:00:00Z,,Meriste,37112
|
||||||
|
1959-08-27T00:00:00Z,10041,Uri,F,1989-11-12T00:00:00Z,1,Lenart,56415
|
||||||
|
1956-02-26T00:00:00Z,10042,Magy,F,1993-03-21T00:00:00Z,3,Stamatiou,30404
|
||||||
|
1960-09-19T00:00:00Z,10043,Yishay,M,1990-10-20T00:00:00Z,1,Tzvieli,34341
|
||||||
|
1961-09-21T00:00:00Z,10044,Mingsen,F,1994-05-21T00:00:00Z,1,Casley,39728
|
||||||
|
1957-08-14T00:00:00Z,10045,Moss,M,1989-09-02T00:00:00Z,3,Shanbhogue,74970
|
||||||
|
1960-07-23T00:00:00Z,10046,Lucien,M,1992-06-20T00:00:00Z,4,Rosenbaum,50064
|
||||||
|
1952-06-29T00:00:00Z,10047,Zvonko,M,1989-03-31T00:00:00Z,4,Nyanchama,42716
|
||||||
|
1963-07-11T00:00:00Z,10048,Florian,M,1985-02-24T00:00:00Z,3,Syrotiuk,26436
|
||||||
|
1961-04-24T00:00:00Z,10049,Basil,F,1992-05-04T00:00:00Z,5,Tramer,37853
|
||||||
|
1958-05-21T00:00:00Z,10050,Yinghua,M,1990-12-25T00:00:00Z,2,Dredge,43026
|
||||||
|
1953-07-28T00:00:00Z,10051,Hidefumi,M,1992-10-15T00:00:00Z,3,Caine,58121
|
||||||
|
1961-02-26T00:00:00Z,10052,Heping,M,1988-05-21T00:00:00Z,1,Nitsch,55360
|
||||||
|
1954-09-13T00:00:00Z,10053,Sanjiv,F,1986-02-04T00:00:00Z,3,Zschoche,54462
|
||||||
|
1957-04-04T00:00:00Z,10054,Mayumi,M,1995-03-13T00:00:00Z,4,Schueller,65367
|
||||||
|
1956-06-06T00:00:00Z,10055,Georgy,M,1992-04-27T00:00:00Z,5,Dredge,49281
|
||||||
|
1961-09-01T00:00:00Z,10056,Brendon,F,1990-02-01T00:00:00Z,2,Bernini,33370
|
||||||
|
1954-05-30T00:00:00Z,10057,Ebbe,F,1992-01-15T00:00:00Z,4,Callaway,27215
|
||||||
|
1954-10-01T00:00:00Z,10058,Berhard,M,1987-04-13T00:00:00Z,3,McFarlin,38376
|
||||||
|
1953-09-19T00:00:00Z,10059,Alejandro,F,1991-06-26T00:00:00Z,2,McAlpine,44307
|
||||||
|
1961-10-15T00:00:00Z,10060,Breannda,M,1987-11-02T00:00:00Z,2,Billingsley,29175
|
||||||
|
1962-10-19T00:00:00Z,10061,Tse,M,1985-09-17T00:00:00Z,1,Herber,49095
|
||||||
|
1961-11-02T00:00:00Z,10062,Anoosh,M,1991-08-30T00:00:00Z,3,Peyn,65030
|
||||||
|
1952-08-06T00:00:00Z,10063,Gino,F,1989-04-08T00:00:00Z,3,Leonhardt,52121
|
||||||
|
1959-04-07T00:00:00Z,10064,Udi,M,1985-11-20T00:00:00Z,5,Jansch,33956
|
||||||
|
1963-04-14T00:00:00Z,10065,Satosi,M,1988-05-18T00:00:00Z,2,Awdeh,50249
|
||||||
|
1952-11-13T00:00:00Z,10066,Kwee,M,1986-02-26T00:00:00Z,5,Schusler,31897
|
||||||
|
1953-01-07T00:00:00Z,10067,Claudi,M,1987-03-04T00:00:00Z,2,Stavenow,52044
|
||||||
|
1962-11-26T00:00:00Z,10068,Charlene,M,1987-08-07T00:00:00Z,3,Brattka,28941
|
||||||
|
1960-09-06T00:00:00Z,10069,Margareta,F,1989-11-05T00:00:00Z,5,Bierman,41933
|
||||||
|
1955-08-20T00:00:00Z,10070,Reuven,M,1985-10-14T00:00:00Z,3,Garigliano,54329
|
||||||
|
1958-01-21T00:00:00Z,10071,Hisao,M,1987-10-01T00:00:00Z,2,Lipner,40612
|
||||||
|
1952-05-15T00:00:00Z,10072,Hironoby,F,1988-07-21T00:00:00Z,5,Sidou,54518
|
||||||
|
1954-02-23T00:00:00Z,10073,Shir,M,1991-12-01T00:00:00Z,4,McClurg,32568
|
||||||
|
1955-08-28T00:00:00Z,10074,Mokhtar,F,1990-08-13T00:00:00Z,5,Bernatsky,38992
|
||||||
|
1960-03-09T00:00:00Z,10075,Gao,F,1987-03-19T00:00:00Z,5,Dolinsky,51956
|
||||||
|
1952-06-13T00:00:00Z,10076,Erez,F,1985-07-09T00:00:00Z,3,Ritzmann,62405
|
||||||
|
1964-04-18T00:00:00Z,10077,Mona,M,1990-03-02T00:00:00Z,5,Azuma,46595
|
||||||
|
1959-12-25T00:00:00Z,10078,Danel,F,1987-05-26T00:00:00Z,2,Mondadori,69904
|
||||||
|
1961-10-05T00:00:00Z,10079,Kshitij,F,1986-03-27T00:00:00Z,2,Gils,32263
|
||||||
|
1957-12-03T00:00:00Z,10080,Premal,M,1985-11-19T00:00:00Z,5,Baek,52833
|
||||||
|
1960-12-17T00:00:00Z,10081,Zhongwei,M,1986-10-30T00:00:00Z,2,Rosen,50128
|
||||||
|
1963-09-09T00:00:00Z,10082,Parviz,M,1990-01-03T00:00:00Z,4,Lortz,49818
|
||||||
|
1959-07-23T00:00:00Z,10083,Vishv,M,1987-03-31T00:00:00Z,1,Zockler,
|
||||||
|
1960-05-25T00:00:00Z,10084,Tuval,M,1995-12-15T00:00:00Z,1,Kalloufi,
|
||||||
|
1962-11-07T00:00:00Z,10085,Kenroku,M,1994-04-09T00:00:00Z,5,Malabarba,
|
||||||
|
1962-11-19T00:00:00Z,10086,Somnath,M,1990-02-16T00:00:00Z,1,Foote,
|
||||||
|
1959-07-23T00:00:00Z,10087,Xinglin,F,1986-09-08T00:00:00Z,5,Eugenio,
|
||||||
|
1954-02-25T00:00:00Z,10088,Jungsoon,F,1988-09-02T00:00:00Z,5,Syrzycki,
|
||||||
|
1963-03-21T00:00:00Z,10089,Sudharsan,F,1986-08-12T00:00:00Z,4,Flasterstein,
|
||||||
|
1961-05-30T00:00:00Z,10090,Kendra,M,1986-03-14T00:00:00Z,2,Hofting,44956
|
||||||
|
1955-10-04T00:00:00Z,10091,Amabile,M,1992-11-18T00:00:00Z,3,Gomatam,38645
|
||||||
|
1964-10-18T00:00:00Z,10092,Valdiodio,F,1989-09-22T00:00:00Z,1,Niizuma,25976
|
||||||
|
1964-06-11T00:00:00Z,10093,Sailaja,M,1996-11-05T00:00:00Z,3,Desikan,45656
|
||||||
|
1957-05-25T00:00:00Z,10094,Arumugam,F,1987-04-18T00:00:00Z,5,Ossenbruggen,66817
|
||||||
|
1965-01-03T00:00:00Z,10095,Hilari,M,1986-07-15T00:00:00Z,4,Morton,37702
|
||||||
|
1954-09-16T00:00:00Z,10096,Jayson,M,1990-01-14T00:00:00Z,4,Mandell,43889
|
||||||
|
1952-02-27T00:00:00Z,10097,Remzi,M,1990-09-15T00:00:00Z,3,Waschkowski,71165
|
||||||
|
1961-09-23T00:00:00Z,10098,Sreekrishna,F,1985-05-13T00:00:00Z,4,Servieres,44817
|
||||||
|
1956-05-25T00:00:00Z,10099,Valter,F,1988-10-18T00:00:00Z,2,Sullins,73578
|
||||||
|
1953-04-21T00:00:00Z,10100,Hironobu,F,1987-09-21T00:00:00Z,4,Haraldson,68431
|
|
|
@ -0,0 +1,12 @@
|
||||||
|
DROP TABLE IF EXISTS "test_emp_with_nulls";
|
||||||
|
CREATE TABLE "test_emp_with_nulls" (
|
||||||
|
"birth_date" TIMESTAMP WITH TIME ZONE,
|
||||||
|
"emp_no" INT,
|
||||||
|
"first_name" VARCHAR(50),
|
||||||
|
"gender" VARCHAR(1),
|
||||||
|
"hire_date" TIMESTAMP WITH TIME ZONE,
|
||||||
|
"languages" TINYINT,
|
||||||
|
"last_name" VARCHAR(50),
|
||||||
|
"salary" INT
|
||||||
|
)
|
||||||
|
AS SELECT * FROM CSVREAD('classpath:/employees_with_nulls.csv');
|
Loading…
Reference in New Issue