mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-02-17 10:25:15 +00:00
Merge branch 'master' into ccr
* master: Add proxy support to RemoteClusterConnection (#33062) TEST: Skip assertSeqNos for closed shards (#33130) TEST: resync operation on replica should acquire shard permit (#33103) Switch remaining x-pack tests to new style Requests (#33108) Switch remaining tests to new style Requests (#33109) Switch remaining ml tests to new style Requests (#33107) Build: Line up IDE detection logic Security index expands to a single replica (#33131) HLRC: request/response homogeneity and JavaDoc improvements (#33133) Checkstyle! [Test] Fix sporadic failure in MembershipActionTests Revert "Do NOT allow termvectors on nested fields (#32728)" [Rollup] Move toAggCap() methods out of rollup config objects (#32583) Fix race condition in scheduler engine test
This commit is contained in:
commit
75304f405b
@ -17,9 +17,6 @@ package org.elasticsearch.client;/*
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
|
||||
import org.apache.http.entity.ContentType;
|
||||
import org.apache.http.entity.StringEntity;
|
||||
import org.apache.http.util.EntityUtils;
|
||||
import org.elasticsearch.ElasticsearchStatusException;
|
||||
import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest;
|
||||
@ -35,7 +32,6 @@ import org.elasticsearch.script.StoredScriptSource;
|
||||
|
||||
import java.util.Collections;
|
||||
|
||||
import static java.util.Collections.emptyMap;
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
@ -52,12 +48,9 @@ public class StoredScriptsIT extends ESRestHighLevelClientTestCase {
|
||||
final String script = Strings.toString(scriptSource.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS));
|
||||
// TODO: change to HighLevel PutStoredScriptRequest when it will be ready
|
||||
// so far - using low-level REST API
|
||||
Response putResponse =
|
||||
adminClient()
|
||||
.performRequest("PUT", "/_scripts/calculate-score", emptyMap(),
|
||||
new StringEntity("{\"script\":" + script + "}",
|
||||
ContentType.APPLICATION_JSON));
|
||||
assertEquals(putResponse.getStatusLine().getReasonPhrase(), 200, putResponse.getStatusLine().getStatusCode());
|
||||
Request putRequest = new Request("PUT", "/_scripts/calculate-score");
|
||||
putRequest.setJsonEntity("{\"script\":" + script + "}");
|
||||
Response putResponse = adminClient().performRequest(putRequest);
|
||||
assertEquals("{\"acknowledged\":true}", EntityUtils.toString(putResponse.getEntity()));
|
||||
|
||||
GetStoredScriptRequest getRequest = new GetStoredScriptRequest("calculate-score");
|
||||
@ -78,12 +71,9 @@ public class StoredScriptsIT extends ESRestHighLevelClientTestCase {
|
||||
final String script = Strings.toString(scriptSource.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS));
|
||||
// TODO: change to HighLevel PutStoredScriptRequest when it will be ready
|
||||
// so far - using low-level REST API
|
||||
Response putResponse =
|
||||
adminClient()
|
||||
.performRequest("PUT", "/_scripts/" + id, emptyMap(),
|
||||
new StringEntity("{\"script\":" + script + "}",
|
||||
ContentType.APPLICATION_JSON));
|
||||
assertEquals(putResponse.getStatusLine().getReasonPhrase(), 200, putResponse.getStatusLine().getStatusCode());
|
||||
Request putRequest = new Request("PUT", "/_scripts/" + id);
|
||||
putRequest.setJsonEntity("{\"script\":" + script + "}");
|
||||
Response putResponse = adminClient().performRequest(putRequest);
|
||||
assertEquals("{\"acknowledged\":true}", EntityUtils.toString(putResponse.getEntity()));
|
||||
|
||||
DeleteStoredScriptRequest deleteRequest = new DeleteStoredScriptRequest(id);
|
||||
|
@ -17,8 +17,6 @@ package org.elasticsearch.client.documentation;/*
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import org.apache.http.entity.ContentType;
|
||||
import org.apache.http.entity.StringEntity;
|
||||
import org.apache.http.util.EntityUtils;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.LatchedActionListener;
|
||||
@ -27,6 +25,7 @@ import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptReque
|
||||
import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptResponse;
|
||||
import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
||||
import org.elasticsearch.client.ESRestHighLevelClientTestCase;
|
||||
import org.elasticsearch.client.Request;
|
||||
import org.elasticsearch.client.RequestOptions;
|
||||
import org.elasticsearch.client.Response;
|
||||
import org.elasticsearch.client.RestHighLevelClient;
|
||||
@ -43,7 +42,6 @@ import java.util.Map;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static java.util.Collections.emptyMap;
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
@ -193,11 +191,9 @@ public class StoredScriptsDocumentationIT extends ESRestHighLevelClientTestCase
|
||||
final String script = Strings.toString(scriptSource.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS));
|
||||
// TODO: change to HighLevel PutStoredScriptRequest when it will be ready
|
||||
// so far - using low-level REST API
|
||||
Response putResponse =
|
||||
adminClient()
|
||||
.performRequest("PUT", "/_scripts/" + id, emptyMap(),
|
||||
new StringEntity("{\"script\":" + script + "}",
|
||||
ContentType.APPLICATION_JSON));
|
||||
Request request = new Request("PUT", "/_scripts/" + id);
|
||||
request.setJsonEntity("{\"script\":" + script + "}");
|
||||
Response putResponse = adminClient().performRequest(request);
|
||||
assertEquals(putResponse.getStatusLine().getReasonPhrase(), 200, putResponse.getStatusLine().getStatusCode());
|
||||
assertEquals("{\"acknowledged\":true}", EntityUtils.toString(putResponse.getEntity()));
|
||||
}
|
||||
|
@ -53,7 +53,7 @@ public class WaitForRefreshAndCloseTests extends ESRestTestCase {
|
||||
|
||||
@After
|
||||
public void cleanupIndex() throws IOException {
|
||||
client().performRequest("DELETE", indexName());
|
||||
client().performRequest(new Request("DELETE", indexName()));
|
||||
}
|
||||
|
||||
private String indexName() {
|
||||
|
@ -30,10 +30,6 @@ in similar way to the <<query-dsl-multi-match-query,multi match query>>
|
||||
[WARNING]
|
||||
Note that the usage of `/_termvector` is deprecated in 2.0, and replaced by `/_termvectors`.
|
||||
|
||||
[WARNING]
|
||||
Term Vectors API doesn't work on nested fields. `/_termvectors` on a nested
|
||||
field and any sub-fields of a nested field returns empty results.
|
||||
|
||||
[float]
|
||||
=== Return values
|
||||
|
||||
|
@ -1,49 +0,0 @@
|
||||
setup:
|
||||
- do:
|
||||
indices.create:
|
||||
index: testidx
|
||||
body:
|
||||
mappings:
|
||||
_doc:
|
||||
properties:
|
||||
nested1:
|
||||
type : nested
|
||||
properties:
|
||||
nested1-text:
|
||||
type: text
|
||||
object1:
|
||||
properties:
|
||||
object1-text:
|
||||
type: text
|
||||
object1-nested1:
|
||||
type: nested
|
||||
properties:
|
||||
object1-nested1-text:
|
||||
type: text
|
||||
- do:
|
||||
index:
|
||||
index: testidx
|
||||
type: _doc
|
||||
id: 1
|
||||
body:
|
||||
"nested1" : [{ "nested1-text": "text1" }]
|
||||
"object1" : [{ "object1-text": "text2" }, "object1-nested1" : [{"object1-nested1-text" : "text3"}]]
|
||||
|
||||
- do:
|
||||
indices.refresh: {}
|
||||
|
||||
---
|
||||
"Termvectors on nested fields should return empty results":
|
||||
|
||||
- do:
|
||||
termvectors:
|
||||
index: testidx
|
||||
type: _doc
|
||||
id: 1
|
||||
fields: ["nested1", "nested1.nested1-text", "object1.object1-nested1", "object1.object1-nested1.object1-nested1-text", "object1.object1-text"]
|
||||
|
||||
- is_false: term_vectors.nested1
|
||||
- is_false: term_vectors.nested1\.nested1-text # escaping as the field name contains dot
|
||||
- is_false: term_vectors.object1\.object1-nested1
|
||||
- is_false: term_vectors.object1\.object1-nested1\.object1-nested1-text
|
||||
- is_true: term_vectors.object1\.object1-text
|
@ -272,6 +272,7 @@ public final class ClusterSettings extends AbstractScopedSettings {
|
||||
ElectMasterService.DISCOVERY_ZEN_MINIMUM_MASTER_NODES_SETTING,
|
||||
TransportSearchAction.SHARD_COUNT_LIMIT_SETTING,
|
||||
RemoteClusterAware.REMOTE_CLUSTERS_SEEDS,
|
||||
RemoteClusterAware.REMOTE_CLUSTERS_PROXY,
|
||||
RemoteClusterService.REMOTE_CLUSTER_SKIP_UNAVAILABLE,
|
||||
RemoteClusterService.REMOTE_CONNECTIONS_PER_CLUSTER,
|
||||
RemoteClusterService.REMOTE_INITIAL_CONNECTION_TIMEOUT_SETTING,
|
||||
|
@ -1009,6 +1009,10 @@ public class Setting<T> implements ToXContentObject {
|
||||
return new Setting<>(key, s -> "", Function.identity(), properties);
|
||||
}
|
||||
|
||||
public static Setting<String> simpleString(String key, Function<String, String> parser, Property... properties) {
|
||||
return new Setting<>(key, s -> "", parser, properties);
|
||||
}
|
||||
|
||||
public static Setting<String> simpleString(String key, Setting<String> fallback, Property... properties) {
|
||||
return new Setting<>(key, fallback, Function.identity(), properties);
|
||||
}
|
||||
|
@ -45,7 +45,6 @@ import org.elasticsearch.index.mapper.DocumentMapperForType;
|
||||
import org.elasticsearch.index.mapper.KeywordFieldMapper;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.ObjectMapper;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.ParsedDocument;
|
||||
import org.elasticsearch.index.mapper.SourceFieldMapper;
|
||||
@ -161,7 +160,7 @@ public class TermVectorsService {
|
||||
request.selectedFields(fieldNames.toArray(Strings.EMPTY_ARRAY));
|
||||
}
|
||||
|
||||
private static boolean isValidField(MappedFieldType fieldType, IndexShard indexShard) {
|
||||
private static boolean isValidField(MappedFieldType fieldType) {
|
||||
// must be a string
|
||||
if (fieldType instanceof StringFieldType == false) {
|
||||
return false;
|
||||
@ -170,16 +169,6 @@ public class TermVectorsService {
|
||||
if (fieldType.indexOptions() == IndexOptions.NONE) {
|
||||
return false;
|
||||
}
|
||||
// and must not be under nested field
|
||||
int dotIndex = fieldType.name().indexOf('.');
|
||||
while (dotIndex > -1) {
|
||||
String parentField = fieldType.name().substring(0, dotIndex);
|
||||
ObjectMapper mapper = indexShard.mapperService().getObjectMapper(parentField);
|
||||
if (mapper != null && mapper.nested().isNested()) {
|
||||
return false;
|
||||
}
|
||||
dotIndex = fieldType.name().indexOf('.', dotIndex + 1);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@ -188,7 +177,7 @@ public class TermVectorsService {
|
||||
Set<String> validFields = new HashSet<>();
|
||||
for (String field : selectedFields) {
|
||||
MappedFieldType fieldType = indexShard.mapperService().fullName(field);
|
||||
if (isValidField(fieldType, indexShard) == false) {
|
||||
if (!isValidField(fieldType)) {
|
||||
continue;
|
||||
}
|
||||
// already retrieved, only if the analyzer hasn't been overridden at the field
|
||||
@ -295,7 +284,7 @@ public class TermVectorsService {
|
||||
Collection<DocumentField> documentFields = new HashSet<>();
|
||||
for (IndexableField field : doc.getFields()) {
|
||||
MappedFieldType fieldType = indexShard.mapperService().fullName(field.name());
|
||||
if (isValidField(fieldType, indexShard) == false) {
|
||||
if (!isValidField(fieldType)) {
|
||||
continue;
|
||||
}
|
||||
if (request.selectedFields() != null && !request.selectedFields().contains(field.name())) {
|
||||
|
@ -18,10 +18,14 @@
|
||||
*/
|
||||
package org.elasticsearch.transport;
|
||||
|
||||
import java.util.EnumSet;
|
||||
import java.util.function.Supplier;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.metadata.ClusterNameExpressionResolver;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.UUIDs;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.settings.ClusterSettings;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
@ -66,6 +70,22 @@ public abstract class RemoteClusterAware extends AbstractComponent {
|
||||
public static final char REMOTE_CLUSTER_INDEX_SEPARATOR = ':';
|
||||
public static final String LOCAL_CLUSTER_GROUP_KEY = "";
|
||||
|
||||
/**
|
||||
* A proxy address for the remote cluster.
|
||||
* NOTE: this settings is undocumented until we have at last one transport that supports passing
|
||||
* on the hostname via a mechanism like SNI.
|
||||
*/
|
||||
public static final Setting.AffixSetting<String> REMOTE_CLUSTERS_PROXY = Setting.affixKeySetting(
|
||||
"search.remote.",
|
||||
"proxy",
|
||||
key -> Setting.simpleString(key, s -> {
|
||||
if (Strings.hasLength(s)) {
|
||||
parsePort(s);
|
||||
}
|
||||
return s;
|
||||
}, Setting.Property.NodeScope, Setting.Property.Dynamic), REMOTE_CLUSTERS_SEEDS);
|
||||
|
||||
|
||||
protected final ClusterNameExpressionResolver clusterNameResolver;
|
||||
|
||||
/**
|
||||
@ -77,25 +97,42 @@ public abstract class RemoteClusterAware extends AbstractComponent {
|
||||
this.clusterNameResolver = new ClusterNameExpressionResolver(settings);
|
||||
}
|
||||
|
||||
protected static Map<String, List<Supplier<DiscoveryNode>>> buildRemoteClustersSeeds(Settings settings) {
|
||||
/**
|
||||
* Builds the dynamic per-cluster config from the given settings. This is a map keyed by the cluster alias that points to a tuple
|
||||
* (ProxyAddresss, [SeedNodeSuppliers]). If a cluster is configured with a proxy address all seed nodes will point to
|
||||
* {@link TransportAddress#META_ADDRESS} and their configured address will be used as the hostname for the generated discovery node.
|
||||
*/
|
||||
protected static Map<String, Tuple<String, List<Supplier<DiscoveryNode>>>> buildRemoteClustersDynamicConfig(Settings settings) {
|
||||
Stream<Setting<List<String>>> allConcreteSettings = REMOTE_CLUSTERS_SEEDS.getAllConcreteSettings(settings);
|
||||
return allConcreteSettings.collect(
|
||||
Collectors.toMap(REMOTE_CLUSTERS_SEEDS::getNamespace, concreteSetting -> {
|
||||
String clusterName = REMOTE_CLUSTERS_SEEDS.getNamespace(concreteSetting);
|
||||
List<String> addresses = concreteSetting.get(settings);
|
||||
final boolean proxyMode = REMOTE_CLUSTERS_PROXY.getConcreteSettingForNamespace(clusterName).exists(settings);
|
||||
List<Supplier<DiscoveryNode>> nodes = new ArrayList<>(addresses.size());
|
||||
for (String address : addresses) {
|
||||
nodes.add(() -> {
|
||||
TransportAddress transportAddress = new TransportAddress(RemoteClusterAware.parseSeedAddress(address));
|
||||
return new DiscoveryNode(clusterName + "#" + transportAddress.toString(),
|
||||
transportAddress,
|
||||
Version.CURRENT.minimumCompatibilityVersion());
|
||||
});
|
||||
nodes.add(() -> buildSeedNode(clusterName, address, proxyMode));
|
||||
}
|
||||
return nodes;
|
||||
return new Tuple<>(REMOTE_CLUSTERS_PROXY.getConcreteSettingForNamespace(clusterName).get(settings), nodes);
|
||||
}));
|
||||
}
|
||||
|
||||
static DiscoveryNode buildSeedNode(String clusterName, String address, boolean proxyMode) {
|
||||
if (proxyMode) {
|
||||
TransportAddress transportAddress = new TransportAddress(TransportAddress.META_ADDRESS, 0);
|
||||
String hostName = address.substring(0, indexOfPortSeparator(address));
|
||||
return new DiscoveryNode("", clusterName + "#" + address, UUIDs.randomBase64UUID(), hostName, address,
|
||||
transportAddress, Collections
|
||||
.emptyMap(), EnumSet.allOf(DiscoveryNode.Role.class),
|
||||
Version.CURRENT.minimumCompatibilityVersion());
|
||||
} else {
|
||||
TransportAddress transportAddress = new TransportAddress(RemoteClusterAware.parseSeedAddress(address));
|
||||
return new DiscoveryNode(clusterName + "#" + transportAddress.toString(),
|
||||
transportAddress,
|
||||
Version.CURRENT.minimumCompatibilityVersion());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Groups indices per cluster by splitting remote cluster-alias, index-name pairs on {@link #REMOTE_CLUSTER_INDEX_SEPARATOR}. All
|
||||
* indices per cluster are collected as a list in the returned map keyed by the cluster alias. Local indices are grouped under
|
||||
@ -138,20 +175,24 @@ public abstract class RemoteClusterAware extends AbstractComponent {
|
||||
|
||||
protected abstract Set<String> getRemoteClusterNames();
|
||||
|
||||
|
||||
/**
|
||||
* Subclasses must implement this to receive information about updated cluster aliases. If the given address list is
|
||||
* empty the cluster alias is unregistered and should be removed.
|
||||
*/
|
||||
protected abstract void updateRemoteCluster(String clusterAlias, List<String> addresses);
|
||||
protected abstract void updateRemoteCluster(String clusterAlias, List<String> addresses, String proxy);
|
||||
|
||||
/**
|
||||
* Registers this instance to listen to updates on the cluster settings.
|
||||
*/
|
||||
public void listenForUpdates(ClusterSettings clusterSettings) {
|
||||
clusterSettings.addAffixUpdateConsumer(RemoteClusterAware.REMOTE_CLUSTERS_SEEDS, this::updateRemoteCluster,
|
||||
clusterSettings.addAffixUpdateConsumer(RemoteClusterAware.REMOTE_CLUSTERS_PROXY,
|
||||
RemoteClusterAware.REMOTE_CLUSTERS_SEEDS,
|
||||
(key, value) -> updateRemoteCluster(key, value.v2(), value.v1()),
|
||||
(namespace, value) -> {});
|
||||
}
|
||||
|
||||
|
||||
protected static InetSocketAddress parseSeedAddress(String remoteHost) {
|
||||
String host = remoteHost.substring(0, indexOfPortSeparator(remoteHost));
|
||||
InetAddress hostAddress;
|
||||
|
@ -18,6 +18,7 @@
|
||||
*/
|
||||
package org.elasticsearch.transport;
|
||||
|
||||
import java.net.InetSocketAddress;
|
||||
import java.util.function.Supplier;
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.apache.lucene.store.AlreadyClosedException;
|
||||
@ -88,6 +89,7 @@ final class RemoteClusterConnection extends AbstractComponent implements Transpo
|
||||
private final int maxNumRemoteConnections;
|
||||
private final Predicate<DiscoveryNode> nodePredicate;
|
||||
private final ThreadPool threadPool;
|
||||
private volatile String proxyAddress;
|
||||
private volatile List<Supplier<DiscoveryNode>> seedNodes;
|
||||
private volatile boolean skipUnavailable;
|
||||
private final ConnectHandler connectHandler;
|
||||
@ -106,6 +108,13 @@ final class RemoteClusterConnection extends AbstractComponent implements Transpo
|
||||
RemoteClusterConnection(Settings settings, String clusterAlias, List<Supplier<DiscoveryNode>> seedNodes,
|
||||
TransportService transportService, ConnectionManager connectionManager, int maxNumRemoteConnections,
|
||||
Predicate<DiscoveryNode> nodePredicate) {
|
||||
this(settings, clusterAlias, seedNodes, transportService, connectionManager, maxNumRemoteConnections, nodePredicate, null);
|
||||
}
|
||||
|
||||
RemoteClusterConnection(Settings settings, String clusterAlias, List<Supplier<DiscoveryNode>> seedNodes,
|
||||
TransportService transportService, ConnectionManager connectionManager, int maxNumRemoteConnections, Predicate<DiscoveryNode>
|
||||
nodePredicate,
|
||||
String proxyAddress) {
|
||||
super(settings);
|
||||
this.transportService = transportService;
|
||||
this.maxNumRemoteConnections = maxNumRemoteConnections;
|
||||
@ -130,13 +139,26 @@ final class RemoteClusterConnection extends AbstractComponent implements Transpo
|
||||
connectionManager.addListener(this);
|
||||
// we register the transport service here as a listener to make sure we notify handlers on disconnect etc.
|
||||
connectionManager.addListener(transportService);
|
||||
this.proxyAddress = proxyAddress;
|
||||
}
|
||||
|
||||
private static DiscoveryNode maybeAddProxyAddress(String proxyAddress, DiscoveryNode node) {
|
||||
if (proxyAddress == null || proxyAddress.isEmpty()) {
|
||||
return node;
|
||||
} else {
|
||||
// resovle proxy address lazy here
|
||||
InetSocketAddress proxyInetAddress = RemoteClusterAware.parseSeedAddress(proxyAddress);
|
||||
return new DiscoveryNode(node.getName(), node.getId(), node.getEphemeralId(), node.getHostName(), node
|
||||
.getHostAddress(), new TransportAddress(proxyInetAddress), node.getAttributes(), node.getRoles(), node.getVersion());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates the list of seed nodes for this cluster connection
|
||||
*/
|
||||
synchronized void updateSeedNodes(List<Supplier<DiscoveryNode>> seedNodes, ActionListener<Void> connectListener) {
|
||||
synchronized void updateSeedNodes(String proxyAddress, List<Supplier<DiscoveryNode>> seedNodes, ActionListener<Void> connectListener) {
|
||||
this.seedNodes = Collections.unmodifiableList(new ArrayList<>(seedNodes));
|
||||
this.proxyAddress = proxyAddress;
|
||||
connectHandler.connect(connectListener);
|
||||
}
|
||||
|
||||
@ -281,6 +303,7 @@ final class RemoteClusterConnection extends AbstractComponent implements Transpo
|
||||
return new ProxyConnection(connection, remoteClusterNode);
|
||||
}
|
||||
|
||||
|
||||
static final class ProxyConnection implements Transport.Connection {
|
||||
private final Transport.Connection proxyConnection;
|
||||
private final DiscoveryNode targetNode;
|
||||
@ -461,7 +484,7 @@ final class RemoteClusterConnection extends AbstractComponent implements Transpo
|
||||
try {
|
||||
if (seedNodes.hasNext()) {
|
||||
cancellableThreads.executeIO(() -> {
|
||||
final DiscoveryNode seedNode = seedNodes.next().get();
|
||||
final DiscoveryNode seedNode = maybeAddProxyAddress(proxyAddress, seedNodes.next().get());
|
||||
final TransportService.HandshakeResponse handshakeResponse;
|
||||
Transport.Connection connection = manager.openConnection(seedNode,
|
||||
ConnectionProfile.buildSingleChannelProfile(TransportRequestOptions.Type.REG, null, null));
|
||||
@ -476,7 +499,7 @@ final class RemoteClusterConnection extends AbstractComponent implements Transpo
|
||||
throw ex;
|
||||
}
|
||||
|
||||
final DiscoveryNode handshakeNode = handshakeResponse.getDiscoveryNode();
|
||||
final DiscoveryNode handshakeNode = maybeAddProxyAddress(proxyAddress, handshakeResponse.getDiscoveryNode());
|
||||
if (nodePredicate.test(handshakeNode) && connectedNodes.size() < maxNumRemoteConnections) {
|
||||
manager.connectToNode(handshakeNode, remoteProfile, transportService.connectionValidator(handshakeNode));
|
||||
if (remoteClusterName.get() == null) {
|
||||
@ -583,7 +606,8 @@ final class RemoteClusterConnection extends AbstractComponent implements Transpo
|
||||
cancellableThreads.executeIO(() -> {
|
||||
DiscoveryNodes nodes = response.getState().nodes();
|
||||
Iterable<DiscoveryNode> nodesIter = nodes.getNodes()::valuesIt;
|
||||
for (DiscoveryNode node : nodesIter) {
|
||||
for (DiscoveryNode n : nodesIter) {
|
||||
DiscoveryNode node = maybeAddProxyAddress(proxyAddress, n);
|
||||
if (nodePredicate.test(node) && connectedNodes.size() < maxNumRemoteConnections) {
|
||||
try {
|
||||
connectionManager.connectToNode(node, remoteProfile,
|
||||
@ -646,7 +670,8 @@ final class RemoteClusterConnection extends AbstractComponent implements Transpo
|
||||
* Get the information about remote nodes to be rendered on {@code _remote/info} requests.
|
||||
*/
|
||||
public RemoteConnectionInfo getConnectionInfo() {
|
||||
List<TransportAddress> seedNodeAddresses = seedNodes.stream().map(node -> node.get().getAddress()).collect(Collectors.toList());
|
||||
List<TransportAddress> seedNodeAddresses = seedNodes.stream().map(node -> node.get().getAddress()).collect
|
||||
(Collectors.toList());
|
||||
TimeValue initialConnectionTimeout = RemoteClusterService.REMOTE_INITIAL_CONNECTION_TIMEOUT_SETTING.get(settings);
|
||||
return new RemoteConnectionInfo(clusterAlias, seedNodeAddresses, maxNumRemoteConnections, connectedNodes.size(),
|
||||
initialConnectionTimeout, skipUnavailable);
|
||||
|
@ -31,10 +31,10 @@ import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.Booleans;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.settings.ClusterSettings;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.TransportAddress;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.util.concurrent.CountDown;
|
||||
import org.elasticsearch.core.internal.io.IOUtils;
|
||||
@ -116,8 +116,8 @@ public final class RemoteClusterService extends RemoteClusterAware implements Cl
|
||||
* @param seeds a cluster alias to discovery node mapping representing the remote clusters seeds nodes
|
||||
* @param connectionListener a listener invoked once every configured cluster has been connected to
|
||||
*/
|
||||
private synchronized void updateRemoteClusters(Map<String, List<Supplier<DiscoveryNode>>> seeds,
|
||||
ActionListener<Void> connectionListener) {
|
||||
private synchronized void updateRemoteClusters(Map<String, Tuple<String, List<Supplier<DiscoveryNode>>>> seeds,
|
||||
ActionListener<Void> connectionListener) {
|
||||
if (seeds.containsKey(LOCAL_CLUSTER_GROUP_KEY)) {
|
||||
throw new IllegalArgumentException("remote clusters must not have the empty string as its key");
|
||||
}
|
||||
@ -127,9 +127,12 @@ public final class RemoteClusterService extends RemoteClusterAware implements Cl
|
||||
} else {
|
||||
CountDown countDown = new CountDown(seeds.size());
|
||||
remoteClusters.putAll(this.remoteClusters);
|
||||
for (Map.Entry<String, List<Supplier<DiscoveryNode>>> entry : seeds.entrySet()) {
|
||||
for (Map.Entry<String, Tuple<String, List<Supplier<DiscoveryNode>>>> entry : seeds.entrySet()) {
|
||||
List<Supplier<DiscoveryNode>> seedList = entry.getValue().v2();
|
||||
String proxyAddress = entry.getValue().v1();
|
||||
|
||||
RemoteClusterConnection remote = this.remoteClusters.get(entry.getKey());
|
||||
if (entry.getValue().isEmpty()) { // with no seed nodes we just remove the connection
|
||||
if (seedList.isEmpty()) { // with no seed nodes we just remove the connection
|
||||
try {
|
||||
IOUtils.close(remote);
|
||||
} catch (IOException e) {
|
||||
@ -140,15 +143,15 @@ public final class RemoteClusterService extends RemoteClusterAware implements Cl
|
||||
}
|
||||
|
||||
if (remote == null) { // this is a new cluster we have to add a new representation
|
||||
remote = new RemoteClusterConnection(settings, entry.getKey(), entry.getValue(), transportService,
|
||||
remote = new RemoteClusterConnection(settings, entry.getKey(), seedList, transportService,
|
||||
new ConnectionManager(settings, transportService.transport, transportService.threadPool), numRemoteConnections,
|
||||
getNodePredicate(settings));
|
||||
getNodePredicate(settings), proxyAddress);
|
||||
remoteClusters.put(entry.getKey(), remote);
|
||||
}
|
||||
|
||||
// now update the seed nodes no matter if it's new or already existing
|
||||
RemoteClusterConnection finalRemote = remote;
|
||||
remote.updateSeedNodes(entry.getValue(), ActionListener.wrap(
|
||||
remote.updateSeedNodes(proxyAddress, seedList, ActionListener.wrap(
|
||||
response -> {
|
||||
if (countDown.countDown()) {
|
||||
connectionListener.onResponse(response);
|
||||
@ -302,8 +305,7 @@ public final class RemoteClusterService extends RemoteClusterAware implements Cl
|
||||
@Override
|
||||
public void listenForUpdates(ClusterSettings clusterSettings) {
|
||||
super.listenForUpdates(clusterSettings);
|
||||
clusterSettings.addAffixUpdateConsumer(REMOTE_CLUSTER_SKIP_UNAVAILABLE, this::updateSkipUnavailable,
|
||||
(clusterAlias, value) -> {});
|
||||
clusterSettings.addAffixUpdateConsumer(REMOTE_CLUSTER_SKIP_UNAVAILABLE, this::updateSkipUnavailable, (alias, value) -> {});
|
||||
}
|
||||
|
||||
synchronized void updateSkipUnavailable(String clusterAlias, Boolean skipUnavailable) {
|
||||
@ -313,22 +315,21 @@ public final class RemoteClusterService extends RemoteClusterAware implements Cl
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
protected void updateRemoteCluster(String clusterAlias, List<String> addresses) {
|
||||
updateRemoteCluster(clusterAlias, addresses, ActionListener.wrap((x) -> {}, (x) -> {}));
|
||||
protected void updateRemoteCluster(String clusterAlias, List<String> addresses, String proxyAddress) {
|
||||
updateRemoteCluster(clusterAlias, addresses, proxyAddress, ActionListener.wrap((x) -> {}, (x) -> {}));
|
||||
}
|
||||
|
||||
void updateRemoteCluster(
|
||||
final String clusterAlias,
|
||||
final List<String> addresses,
|
||||
final String proxyAddress,
|
||||
final ActionListener<Void> connectionListener) {
|
||||
final List<Supplier<DiscoveryNode>> nodes = addresses.stream().<Supplier<DiscoveryNode>>map(address -> () -> {
|
||||
final TransportAddress transportAddress = new TransportAddress(RemoteClusterAware.parseSeedAddress(address));
|
||||
final String id = clusterAlias + "#" + transportAddress.toString();
|
||||
final Version version = Version.CURRENT.minimumCompatibilityVersion();
|
||||
return new DiscoveryNode(id, transportAddress, version);
|
||||
}).collect(Collectors.toList());
|
||||
updateRemoteClusters(Collections.singletonMap(clusterAlias, nodes), connectionListener);
|
||||
final List<Supplier<DiscoveryNode>> nodes = addresses.stream().<Supplier<DiscoveryNode>>map(address -> () ->
|
||||
buildSeedNode(clusterAlias, address, Strings.hasLength(proxyAddress))
|
||||
).collect(Collectors.toList());
|
||||
updateRemoteClusters(Collections.singletonMap(clusterAlias, new Tuple<>(proxyAddress, nodes)), connectionListener);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -338,7 +339,7 @@ public final class RemoteClusterService extends RemoteClusterAware implements Cl
|
||||
void initializeRemoteClusters() {
|
||||
final TimeValue timeValue = REMOTE_INITIAL_CONNECTION_TIMEOUT_SETTING.get(settings);
|
||||
final PlainActionFuture<Void> future = new PlainActionFuture<>();
|
||||
Map<String, List<Supplier<DiscoveryNode>>> seeds = RemoteClusterAware.buildRemoteClustersSeeds(settings);
|
||||
Map<String, Tuple<String, List<Supplier<DiscoveryNode>>>> seeds = RemoteClusterAware.buildRemoteClustersDynamicConfig(settings);
|
||||
updateRemoteClusters(seeds, future);
|
||||
try {
|
||||
future.get(timeValue.millis(), TimeUnit.MILLISECONDS);
|
||||
|
@ -109,6 +109,7 @@ public abstract class AbstractDisruptionTestCase extends ESIntegTestCase {
|
||||
protected void beforeIndexDeletion() throws Exception {
|
||||
if (disableBeforeIndexDeletion == false) {
|
||||
super.beforeIndexDeletion();
|
||||
assertSeqNos();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -28,9 +28,6 @@ import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.VersionUtils;
|
||||
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.elasticsearch.test.VersionUtils.allVersions;
|
||||
import static org.elasticsearch.test.VersionUtils.getPreviousVersion;
|
||||
import static org.elasticsearch.test.VersionUtils.incompatibleFutureVersion;
|
||||
import static org.elasticsearch.test.VersionUtils.maxCompatibleVersion;
|
||||
@ -103,7 +100,7 @@ public class MembershipActionTests extends ESTestCase {
|
||||
}
|
||||
|
||||
if (minNodeVersion.onOrAfter(Version.V_7_0_0_alpha1)) {
|
||||
Version oldMajor = randomFrom(allVersions().stream().filter(v -> v.major < 6).collect(Collectors.toList()));
|
||||
Version oldMajor = Version.V_6_4_0.minimumCompatibilityVersion();
|
||||
expectThrows(IllegalStateException.class, () -> MembershipAction.ensureMajorVersionBarrier(oldMajor, minNodeVersion));
|
||||
}
|
||||
|
||||
|
@ -18,6 +18,8 @@
|
||||
*/
|
||||
package org.elasticsearch.transport;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.function.Supplier;
|
||||
import org.apache.lucene.store.AlreadyClosedException;
|
||||
import org.elasticsearch.Version;
|
||||
@ -52,6 +54,7 @@ import org.elasticsearch.mocksocket.MockServerSocket;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.VersionUtils;
|
||||
import org.elasticsearch.test.transport.MockTransportService;
|
||||
import org.elasticsearch.test.transport.StubbableTransport;
|
||||
import org.elasticsearch.threadpool.TestThreadPool;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
||||
@ -378,15 +381,19 @@ public class RemoteClusterConnectionTests extends ESTestCase {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void updateSeedNodes(RemoteClusterConnection connection, List<Supplier<DiscoveryNode>> seedNodes) throws Exception {
|
||||
updateSeedNodes(connection, seedNodes, null);
|
||||
}
|
||||
|
||||
private void updateSeedNodes(RemoteClusterConnection connection, List<Supplier<DiscoveryNode>> seedNodes, String proxyAddress)
|
||||
throws Exception {
|
||||
CountDownLatch latch = new CountDownLatch(1);
|
||||
AtomicReference<Exception> exceptionAtomicReference = new AtomicReference<>();
|
||||
ActionListener<Void> listener = ActionListener.wrap(x -> latch.countDown(), x -> {
|
||||
exceptionAtomicReference.set(x);
|
||||
latch.countDown();
|
||||
});
|
||||
connection.updateSeedNodes(seedNodes, listener);
|
||||
connection.updateSeedNodes(proxyAddress, seedNodes, listener);
|
||||
latch.await();
|
||||
if (exceptionAtomicReference.get() != null) {
|
||||
throw exceptionAtomicReference.get();
|
||||
@ -517,7 +524,7 @@ public class RemoteClusterConnectionTests extends ESTestCase {
|
||||
exceptionReference.set(x);
|
||||
listenerCalled.countDown();
|
||||
});
|
||||
connection.updateSeedNodes(Arrays.asList(() -> seedNode), listener);
|
||||
connection.updateSeedNodes(null, Arrays.asList(() -> seedNode), listener);
|
||||
acceptedLatch.await();
|
||||
connection.close(); // now close it, this should trigger an interrupt on the socket and we can move on
|
||||
assertTrue(connection.assertNoRunningConnections());
|
||||
@ -787,7 +794,7 @@ public class RemoteClusterConnectionTests extends ESTestCase {
|
||||
throw new AssertionError(x);
|
||||
}
|
||||
});
|
||||
connection.updateSeedNodes(seedNodes, listener);
|
||||
connection.updateSeedNodes(null, seedNodes, listener);
|
||||
}
|
||||
latch.await();
|
||||
} catch (Exception ex) {
|
||||
@ -875,7 +882,7 @@ public class RemoteClusterConnectionTests extends ESTestCase {
|
||||
}
|
||||
});
|
||||
try {
|
||||
connection.updateSeedNodes(seedNodes, listener);
|
||||
connection.updateSeedNodes(null, seedNodes, listener);
|
||||
} catch (Exception e) {
|
||||
// it's ok if we're shutting down
|
||||
assertThat(e.getMessage(), containsString("threadcontext is already closed"));
|
||||
@ -1384,4 +1391,97 @@ public class RemoteClusterConnectionTests extends ESTestCase {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void testProxyMode() throws Exception {
|
||||
List<DiscoveryNode> knownNodes = new CopyOnWriteArrayList<>();
|
||||
try (MockTransportService seedTransport = startTransport("node_0", knownNodes, Version.CURRENT);
|
||||
MockTransportService discoverableTransport = startTransport("node_1", knownNodes, Version.CURRENT)) {
|
||||
knownNodes.add(seedTransport.getLocalDiscoNode());
|
||||
knownNodes.add(discoverableTransport.getLocalDiscoNode());
|
||||
Collections.shuffle(knownNodes, random());
|
||||
final String proxyAddress = "1.1.1.1:99";
|
||||
Map<String, DiscoveryNode> nodes = new HashMap<>();
|
||||
nodes.put("node_0", seedTransport.getLocalDiscoNode());
|
||||
nodes.put("node_1", discoverableTransport.getLocalDiscoNode());
|
||||
Transport mockTcpTransport = getProxyTransport(threadPool, Collections.singletonMap(proxyAddress, nodes));
|
||||
try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, mockTcpTransport, Version.CURRENT,
|
||||
threadPool, null, Collections.emptySet())) {
|
||||
service.start();
|
||||
service.acceptIncomingRequests();
|
||||
Supplier<DiscoveryNode> seedSupplier = () ->
|
||||
RemoteClusterAware.buildSeedNode("some-remote-cluster", "node_0:" + randomIntBetween(1, 10000), true);
|
||||
try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster",
|
||||
Arrays.asList(seedSupplier), service, service.getConnectionManager(), Integer.MAX_VALUE, n -> true, proxyAddress)) {
|
||||
updateSeedNodes(connection, Arrays.asList(seedSupplier), proxyAddress);
|
||||
assertEquals(2, connection.getNumNodesConnected());
|
||||
assertNotNull(connection.getConnection(discoverableTransport.getLocalDiscoNode()));
|
||||
assertNotNull(connection.getConnection(seedTransport.getLocalDiscoNode()));
|
||||
assertEquals(proxyAddress, connection.getConnection(seedTransport.getLocalDiscoNode())
|
||||
.getNode().getAddress().toString());
|
||||
assertEquals(proxyAddress, connection.getConnection(discoverableTransport.getLocalDiscoNode())
|
||||
.getNode().getAddress().toString());
|
||||
service.getConnectionManager().disconnectFromNode(knownNodes.get(0));
|
||||
// ensure we reconnect
|
||||
assertBusy(() -> {
|
||||
assertEquals(2, connection.getNumNodesConnected());
|
||||
});
|
||||
discoverableTransport.close();
|
||||
seedTransport.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static Transport getProxyTransport(ThreadPool threadPool, Map<String, Map<String, DiscoveryNode>> nodeMap) {
|
||||
if (nodeMap.isEmpty()) {
|
||||
throw new IllegalArgumentException("nodeMap must be non-empty");
|
||||
}
|
||||
|
||||
StubbableTransport stubbableTransport = new StubbableTransport(MockTransportService.newMockTransport(Settings.EMPTY, Version
|
||||
.CURRENT, threadPool));
|
||||
stubbableTransport.setDefaultConnectBehavior((t, node, profile) -> {
|
||||
Map<String, DiscoveryNode> proxyMapping = nodeMap.get(node.getAddress().toString());
|
||||
if (proxyMapping == null) {
|
||||
throw new IllegalStateException("no proxy mapping for node: " + node);
|
||||
}
|
||||
DiscoveryNode proxyNode = proxyMapping.get(node.getName());
|
||||
if (proxyNode == null) {
|
||||
// this is a seednode - lets pick one randomly
|
||||
assertEquals("seed node must not have a port in the hostname: " + node.getHostName(),
|
||||
-1, node.getHostName().lastIndexOf(':'));
|
||||
assertTrue("missing hostname: " + node, proxyMapping.containsKey(node.getHostName()));
|
||||
// route by seed hostname
|
||||
proxyNode = proxyMapping.get(node.getHostName());
|
||||
}
|
||||
Transport.Connection connection = t.openConnection(proxyNode, profile);
|
||||
return new Transport.Connection() {
|
||||
@Override
|
||||
public DiscoveryNode getNode() {
|
||||
return node;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void sendRequest(long requestId, String action, TransportRequest request, TransportRequestOptions options)
|
||||
throws IOException, TransportException {
|
||||
connection.sendRequest(requestId, action, request, options);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void addCloseListener(ActionListener<Void> listener) {
|
||||
connection.addCloseListener(listener);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isClosed() {
|
||||
return connection.isClosed();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
connection.close();
|
||||
}
|
||||
};
|
||||
});
|
||||
return stubbableTransport;
|
||||
}
|
||||
}
|
||||
|
@ -26,6 +26,7 @@ import org.elasticsearch.action.OriginalIndices;
|
||||
import org.elasticsearch.action.admin.cluster.shards.ClusterSearchShardsResponse;
|
||||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.settings.AbstractScopedSettings;
|
||||
import org.elasticsearch.common.settings.ClusterSettings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
@ -55,6 +56,7 @@ import java.util.concurrent.TimeUnit;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
import java.util.function.BiFunction;
|
||||
import java.util.function.Predicate;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
import static org.hamcrest.CoreMatchers.instanceOf;
|
||||
@ -115,25 +117,38 @@ public class RemoteClusterServiceTests extends ESTestCase {
|
||||
assertEquals("failed to parse port", e.getMessage());
|
||||
}
|
||||
|
||||
public void testBuiltRemoteClustersSeeds() throws Exception {
|
||||
Map<String, List<Supplier<DiscoveryNode>>> map = RemoteClusterService.buildRemoteClustersSeeds(
|
||||
Settings.builder().put("search.remote.foo.seeds", "192.168.0.1:8080").put("search.remote.bar.seeds", "[::1]:9090").build());
|
||||
assertEquals(2, map.size());
|
||||
public void testBuildRemoteClustersDynamicConfig() throws Exception {
|
||||
Map<String, Tuple<String, List<Supplier<DiscoveryNode>>>> map = RemoteClusterService.buildRemoteClustersDynamicConfig(
|
||||
Settings.builder().put("search.remote.foo.seeds", "192.168.0.1:8080")
|
||||
.put("search.remote.bar.seeds", "[::1]:9090")
|
||||
.put("search.remote.boom.seeds", "boom-node1.internal:1000")
|
||||
.put("search.remote.boom.proxy", "foo.bar.com:1234").build());
|
||||
assertEquals(3, map.size());
|
||||
assertTrue(map.containsKey("foo"));
|
||||
assertTrue(map.containsKey("bar"));
|
||||
assertEquals(1, map.get("foo").size());
|
||||
assertEquals(1, map.get("bar").size());
|
||||
|
||||
DiscoveryNode foo = map.get("foo").get(0).get();
|
||||
assertTrue(map.containsKey("boom"));
|
||||
assertEquals(1, map.get("foo").v2().size());
|
||||
assertEquals(1, map.get("bar").v2().size());
|
||||
assertEquals(1, map.get("boom").v2().size());
|
||||
|
||||
DiscoveryNode foo = map.get("foo").v2().get(0).get();
|
||||
assertEquals("", map.get("foo").v1());
|
||||
assertEquals(foo.getAddress(), new TransportAddress(new InetSocketAddress(InetAddress.getByName("192.168.0.1"), 8080)));
|
||||
assertEquals(foo.getId(), "foo#192.168.0.1:8080");
|
||||
assertEquals(foo.getVersion(), Version.CURRENT.minimumCompatibilityVersion());
|
||||
|
||||
DiscoveryNode bar = map.get("bar").get(0).get();
|
||||
DiscoveryNode bar = map.get("bar").v2().get(0).get();
|
||||
assertEquals(bar.getAddress(), new TransportAddress(new InetSocketAddress(InetAddress.getByName("[::1]"), 9090)));
|
||||
assertEquals(bar.getId(), "bar#[::1]:9090");
|
||||
assertEquals("", map.get("bar").v1());
|
||||
assertEquals(bar.getVersion(), Version.CURRENT.minimumCompatibilityVersion());
|
||||
|
||||
DiscoveryNode boom = map.get("boom").v2().get(0).get();
|
||||
assertEquals(boom.getAddress(), new TransportAddress(TransportAddress.META_ADDRESS, 0));
|
||||
assertEquals("boom-node1.internal", boom.getHostName());
|
||||
assertEquals(boom.getId(), "boom#boom-node1.internal:1000");
|
||||
assertEquals("foo.bar.com:1234", map.get("boom").v1());
|
||||
assertEquals(boom.getVersion(), Version.CURRENT.minimumCompatibilityVersion());
|
||||
}
|
||||
|
||||
|
||||
@ -204,17 +219,17 @@ public class RemoteClusterServiceTests extends ESTestCase {
|
||||
assertFalse(service.isCrossClusterSearchEnabled());
|
||||
service.initializeRemoteClusters();
|
||||
assertFalse(service.isCrossClusterSearchEnabled());
|
||||
service.updateRemoteCluster("cluster_1", Collections.singletonList(seedNode.getAddress().toString()));
|
||||
service.updateRemoteCluster("cluster_1", Collections.singletonList(seedNode.getAddress().toString()), null);
|
||||
assertTrue(service.isCrossClusterSearchEnabled());
|
||||
assertTrue(service.isRemoteClusterRegistered("cluster_1"));
|
||||
service.updateRemoteCluster("cluster_2", Collections.singletonList(otherSeedNode.getAddress().toString()));
|
||||
service.updateRemoteCluster("cluster_2", Collections.singletonList(otherSeedNode.getAddress().toString()), null);
|
||||
assertTrue(service.isCrossClusterSearchEnabled());
|
||||
assertTrue(service.isRemoteClusterRegistered("cluster_1"));
|
||||
assertTrue(service.isRemoteClusterRegistered("cluster_2"));
|
||||
service.updateRemoteCluster("cluster_2", Collections.emptyList());
|
||||
service.updateRemoteCluster("cluster_2", Collections.emptyList(), null);
|
||||
assertFalse(service.isRemoteClusterRegistered("cluster_2"));
|
||||
IllegalArgumentException iae = expectThrows(IllegalArgumentException.class,
|
||||
() -> service.updateRemoteCluster(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY, Collections.emptyList()));
|
||||
() -> service.updateRemoteCluster(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY, Collections.emptyList(), null));
|
||||
assertEquals("remote clusters must not have the empty string as its key", iae.getMessage());
|
||||
}
|
||||
}
|
||||
@ -265,14 +280,14 @@ public class RemoteClusterServiceTests extends ESTestCase {
|
||||
final CountDownLatch firstLatch = new CountDownLatch(1);
|
||||
service.updateRemoteCluster(
|
||||
"cluster_1",
|
||||
Arrays.asList(c1N1Node.getAddress().toString(), c1N2Node.getAddress().toString()),
|
||||
Arrays.asList(c1N1Node.getAddress().toString(), c1N2Node.getAddress().toString()), null,
|
||||
connectionListener(firstLatch));
|
||||
firstLatch.await();
|
||||
|
||||
final CountDownLatch secondLatch = new CountDownLatch(1);
|
||||
service.updateRemoteCluster(
|
||||
"cluster_2",
|
||||
Arrays.asList(c2N1Node.getAddress().toString(), c2N2Node.getAddress().toString()),
|
||||
Arrays.asList(c2N1Node.getAddress().toString(), c2N2Node.getAddress().toString()), null,
|
||||
connectionListener(secondLatch));
|
||||
secondLatch.await();
|
||||
|
||||
@ -330,14 +345,14 @@ public class RemoteClusterServiceTests extends ESTestCase {
|
||||
final CountDownLatch firstLatch = new CountDownLatch(1);
|
||||
service.updateRemoteCluster(
|
||||
"cluster_1",
|
||||
Arrays.asList(c1N1Node.getAddress().toString(), c1N2Node.getAddress().toString()),
|
||||
Arrays.asList(c1N1Node.getAddress().toString(), c1N2Node.getAddress().toString()), null,
|
||||
connectionListener(firstLatch));
|
||||
firstLatch.await();
|
||||
|
||||
final CountDownLatch secondLatch = new CountDownLatch(1);
|
||||
service.updateRemoteCluster(
|
||||
"cluster_2",
|
||||
Arrays.asList(c2N1Node.getAddress().toString(), c2N2Node.getAddress().toString()),
|
||||
Arrays.asList(c2N1Node.getAddress().toString(), c2N2Node.getAddress().toString()), null,
|
||||
connectionListener(secondLatch));
|
||||
secondLatch.await();
|
||||
|
||||
@ -403,14 +418,14 @@ public class RemoteClusterServiceTests extends ESTestCase {
|
||||
final CountDownLatch firstLatch = new CountDownLatch(1);
|
||||
service.updateRemoteCluster(
|
||||
"cluster_1",
|
||||
Arrays.asList(c1N1Node.getAddress().toString(), c1N2Node.getAddress().toString()),
|
||||
Arrays.asList(c1N1Node.getAddress().toString(), c1N2Node.getAddress().toString()), null,
|
||||
connectionListener(firstLatch));
|
||||
firstLatch.await();
|
||||
|
||||
final CountDownLatch secondLatch = new CountDownLatch(1);
|
||||
service.updateRemoteCluster(
|
||||
"cluster_2",
|
||||
Arrays.asList(c2N1Node.getAddress().toString(), c2N2Node.getAddress().toString()),
|
||||
Arrays.asList(c2N1Node.getAddress().toString(), c2N2Node.getAddress().toString()), null,
|
||||
connectionListener(secondLatch));
|
||||
secondLatch.await();
|
||||
CountDownLatch latch = new CountDownLatch(1);
|
||||
@ -822,4 +837,76 @@ public class RemoteClusterServiceTests extends ESTestCase {
|
||||
assertTrue(nodePredicate.test(node));
|
||||
}
|
||||
}
|
||||
|
||||
public void testRemoteClusterWithProxy() throws Exception {
|
||||
List<DiscoveryNode> knownNodes = new CopyOnWriteArrayList<>();
|
||||
try (MockTransportService cluster_1_node0 = startTransport("cluster_1_node0", knownNodes, Version.CURRENT);
|
||||
MockTransportService cluster_1_node_1 = startTransport("cluster_1_node1", knownNodes, Version.CURRENT);
|
||||
MockTransportService cluster_2_node0 = startTransport("cluster_2_node0", Collections.emptyList(), Version.CURRENT)) {
|
||||
knownNodes.add(cluster_1_node0.getLocalDiscoNode());
|
||||
knownNodes.add(cluster_1_node_1.getLocalDiscoNode());
|
||||
String cluster1Proxy = "1.1.1.1:99";
|
||||
String cluster2Proxy = "2.2.2.2:99";
|
||||
Map<String, DiscoveryNode> nodesCluster1 = new HashMap<>();
|
||||
nodesCluster1.put("cluster_1_node0", cluster_1_node0.getLocalDiscoNode());
|
||||
nodesCluster1.put("cluster_1_node1", cluster_1_node_1.getLocalDiscoNode());
|
||||
Map<String, Map<String, DiscoveryNode>> mapping = new HashMap<>();
|
||||
mapping.put(cluster1Proxy, nodesCluster1);
|
||||
mapping.put(cluster2Proxy, Collections.singletonMap("cluster_2_node0", cluster_2_node0.getLocalDiscoNode()));
|
||||
|
||||
Collections.shuffle(knownNodes, random());
|
||||
Transport proxyTransport = RemoteClusterConnectionTests.getProxyTransport(threadPool, mapping);
|
||||
try (MockTransportService transportService = MockTransportService.createNewService(Settings.EMPTY, proxyTransport,
|
||||
Version.CURRENT, threadPool, null, Collections.emptySet());) {
|
||||
transportService.start();
|
||||
transportService.acceptIncomingRequests();
|
||||
Settings.Builder builder = Settings.builder();
|
||||
builder.putList("search.remote.cluster_1.seeds", "cluster_1_node0:8080");
|
||||
builder.put("search.remote.cluster_1.proxy", cluster1Proxy);
|
||||
try (RemoteClusterService service = new RemoteClusterService(builder.build(), transportService)) {
|
||||
assertFalse(service.isCrossClusterSearchEnabled());
|
||||
service.initializeRemoteClusters();
|
||||
assertTrue(service.isCrossClusterSearchEnabled());
|
||||
updateRemoteCluster(service, "cluster_1", Collections.singletonList("cluster_1_node1:8081"), cluster1Proxy);
|
||||
assertTrue(service.isCrossClusterSearchEnabled());
|
||||
assertTrue(service.isRemoteClusterRegistered("cluster_1"));
|
||||
assertFalse(service.isRemoteClusterRegistered("cluster_2"));
|
||||
updateRemoteCluster(service, "cluster_2", Collections.singletonList("cluster_2_node0:9300"), cluster2Proxy);
|
||||
assertTrue(service.isCrossClusterSearchEnabled());
|
||||
assertTrue(service.isRemoteClusterRegistered("cluster_1"));
|
||||
assertTrue(service.isRemoteClusterRegistered("cluster_2"));
|
||||
List<RemoteConnectionInfo> infos = service.getRemoteConnectionInfos().collect(Collectors.toList());
|
||||
for (RemoteConnectionInfo info : infos) {
|
||||
switch (info.clusterAlias) {
|
||||
case "cluster_1":
|
||||
assertEquals(2, info.numNodesConnected);
|
||||
break;
|
||||
case "cluster_2":
|
||||
assertEquals(1, info.numNodesConnected);
|
||||
break;
|
||||
default:
|
||||
fail("unknown cluster: " + info.clusterAlias);
|
||||
}
|
||||
}
|
||||
service.updateRemoteCluster("cluster_2", Collections.emptyList(), randomBoolean() ? cluster2Proxy : null);
|
||||
assertFalse(service.isRemoteClusterRegistered("cluster_2"));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void updateRemoteCluster(RemoteClusterService service, String clusterAlias, List<String> addresses, String proxyAddress)
|
||||
throws Exception {
|
||||
CountDownLatch latch = new CountDownLatch(1);
|
||||
AtomicReference<Exception> exceptionAtomicReference = new AtomicReference<>();
|
||||
ActionListener<Void> listener = ActionListener.wrap(x -> latch.countDown(), x -> {
|
||||
exceptionAtomicReference.set(x);
|
||||
latch.countDown();
|
||||
});
|
||||
service.updateRemoteCluster(clusterAlias, addresses, proxyAddress, listener);
|
||||
latch.await();
|
||||
if (exceptionAtomicReference.get() != null) {
|
||||
throw exceptionAtomicReference.get();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -78,7 +78,12 @@ addSubProjects('', new File(rootProject.projectDir, 'plugins'))
|
||||
addSubProjects('', new File(rootProject.projectDir, 'qa'))
|
||||
addSubProjects('', new File(rootProject.projectDir, 'x-pack'))
|
||||
|
||||
boolean isEclipse = System.getProperty("eclipse.launcher") != null || gradle.startParameter.taskNames.contains('eclipse') || gradle.startParameter.taskNames.contains('cleanEclipse')
|
||||
List startTasks = gradle.startParameter.taskNames
|
||||
boolean isEclipse =
|
||||
System.getProperty("eclipse.launcher") != null || // Detects gradle launched from the Eclipse IDE
|
||||
System.getProperty("eclipse.application") != null || // Detects gradle launched from the Eclipse compiler server
|
||||
startTasks.contains("eclipse") || // Detects gradle launched from the command line to do Eclipse stuff
|
||||
startTasks.contains("cleanEclipse");
|
||||
if (isEclipse) {
|
||||
// eclipse cannot handle an intermediate dependency between main and test, so we must create separate projects
|
||||
// for server-src and server-tests
|
||||
|
@ -753,7 +753,7 @@ public abstract class ESIndexLevelReplicationTestCase extends IndexShardTestCase
|
||||
|
||||
@Override
|
||||
protected void performOnReplica(ResyncReplicationRequest request, IndexShard replica) throws Exception {
|
||||
executeResyncOnReplica(replica, request);
|
||||
executeResyncOnReplica(replica, request, getPrimaryShard().getPendingPrimaryTerm(), getPrimaryShard().getGlobalCheckpoint());
|
||||
}
|
||||
}
|
||||
|
||||
@ -766,8 +766,15 @@ public abstract class ESIndexLevelReplicationTestCase extends IndexShardTestCase
|
||||
return result;
|
||||
}
|
||||
|
||||
private void executeResyncOnReplica(IndexShard replica, ResyncReplicationRequest request) throws Exception {
|
||||
final Translog.Location location = TransportResyncReplicationAction.performOnReplica(request, replica);
|
||||
private void executeResyncOnReplica(IndexShard replica, ResyncReplicationRequest request,
|
||||
long operationPrimaryTerm, long globalCheckpointOnPrimary) throws Exception {
|
||||
final Translog.Location location;
|
||||
final PlainActionFuture<Releasable> acquirePermitFuture = new PlainActionFuture<>();
|
||||
replica.acquireReplicaOperationPermit(
|
||||
operationPrimaryTerm, globalCheckpointOnPrimary, acquirePermitFuture, ThreadPool.Names.SAME, request);
|
||||
try (Releasable ignored = acquirePermitFuture.actionGet()) {
|
||||
location = TransportResyncReplicationAction.performOnReplica(request, replica);
|
||||
}
|
||||
TransportWriteActionTestHelper.performPostWriteActions(replica, request, location, logger);
|
||||
}
|
||||
}
|
||||
|
@ -2353,6 +2353,9 @@ public abstract class ESIntegTestCase extends ESTestCase {
|
||||
final ObjectLongMap<String> globalCheckpoints = indexShard.getInSyncGlobalCheckpoints();
|
||||
for (ShardStats shardStats : indexShardStats) {
|
||||
final SeqNoStats seqNoStats = shardStats.getSeqNoStats();
|
||||
if (seqNoStats == null) {
|
||||
continue; // this shard was closed
|
||||
}
|
||||
assertThat(shardStats.getShardRouting() + " local checkpoint mismatch",
|
||||
seqNoStats.getLocalCheckpoint(), equalTo(primarySeqNoStats.getLocalCheckpoint()));
|
||||
assertThat(shardStats.getShardRouting() + " global checkpoint mismatch",
|
||||
|
@ -95,6 +95,12 @@ public final class MockTransportService extends TransportService {
|
||||
|
||||
public static MockTransportService createNewService(Settings settings, Version version, ThreadPool threadPool,
|
||||
@Nullable ClusterSettings clusterSettings) {
|
||||
MockTcpTransport mockTcpTransport = newMockTransport(settings, version, threadPool);
|
||||
return createNewService(settings, mockTcpTransport, version, threadPool, clusterSettings,
|
||||
Collections.emptySet());
|
||||
}
|
||||
|
||||
public static MockTcpTransport newMockTransport(Settings settings, Version version, ThreadPool threadPool) {
|
||||
// some tests use MockTransportService to do network based testing. Yet, we run tests in multiple JVMs that means
|
||||
// concurrent tests could claim port that another JVM just released and if that test tries to simulate a disconnect it might
|
||||
// be smart enough to re-connect depending on what is tested. To reduce the risk, since this is very hard to debug we use
|
||||
@ -102,9 +108,8 @@ public final class MockTransportService extends TransportService {
|
||||
int basePort = 10300 + (JVM_ORDINAL * 100); // use a non-default port otherwise some cluster in this JVM might reuse a port
|
||||
settings = Settings.builder().put(TcpTransport.PORT.getKey(), basePort + "-" + (basePort + 100)).put(settings).build();
|
||||
NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(ClusterModule.getNamedWriteables());
|
||||
final Transport transport = new MockTcpTransport(settings, threadPool, BigArrays.NON_RECYCLING_INSTANCE,
|
||||
return new MockTcpTransport(settings, threadPool, BigArrays.NON_RECYCLING_INSTANCE,
|
||||
new NoneCircuitBreakerService(), namedWriteableRegistry, new NetworkService(Collections.emptyList()), version);
|
||||
return createNewService(settings, transport, version, threadPool, clusterSettings, Collections.emptySet());
|
||||
}
|
||||
|
||||
public static MockTransportService createNewService(Settings settings, Transport transport, Version version, ThreadPool threadPool,
|
||||
|
@ -41,7 +41,7 @@ import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
|
||||
public class StubbableTransport implements Transport {
|
||||
public final class StubbableTransport implements Transport {
|
||||
|
||||
private final ConcurrentHashMap<TransportAddress, SendRequestBehavior> sendBehaviors = new ConcurrentHashMap<>();
|
||||
private final ConcurrentHashMap<TransportAddress, OpenConnectionBehavior> connectBehaviors = new ConcurrentHashMap<>();
|
||||
@ -60,6 +60,12 @@ public class StubbableTransport implements Transport {
|
||||
return prior == null;
|
||||
}
|
||||
|
||||
public boolean setDefaultConnectBehavior(OpenConnectionBehavior openConnectionBehavior) {
|
||||
OpenConnectionBehavior prior = this.defaultConnectBehavior;
|
||||
this.defaultConnectBehavior = openConnectionBehavior;
|
||||
return prior == null;
|
||||
}
|
||||
|
||||
boolean addSendBehavior(TransportAddress transportAddress, SendRequestBehavior sendBehavior) {
|
||||
return sendBehaviors.put(transportAddress, sendBehavior) == null;
|
||||
}
|
||||
|
@ -11,15 +11,26 @@ import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
|
||||
import org.elasticsearch.xpack.core.rollup.job.DateHistogramGroupConfig;
|
||||
import org.elasticsearch.xpack.core.rollup.job.GroupConfig;
|
||||
import org.elasticsearch.xpack.core.rollup.job.HistogramGroupConfig;
|
||||
import org.elasticsearch.xpack.core.rollup.job.MetricConfig;
|
||||
import org.elasticsearch.xpack.core.rollup.job.RollupJobConfig;
|
||||
import org.elasticsearch.xpack.core.rollup.job.TermsGroupConfig;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static java.util.Collections.singletonMap;
|
||||
|
||||
/**
|
||||
* Represents the Rollup capabilities for a specific job on a single rollup index
|
||||
@ -42,52 +53,7 @@ public class RollupJobCaps implements Writeable, ToXContentObject {
|
||||
jobID = job.getId();
|
||||
rollupIndex = job.getRollupIndex();
|
||||
indexPattern = job.getIndexPattern();
|
||||
Map<String, Object> dateHistoAggCap = job.getGroupConfig().getDateHistogram().toAggCap();
|
||||
String dateField = job.getGroupConfig().getDateHistogram().getField();
|
||||
RollupFieldCaps fieldCaps = fieldCapLookup.get(dateField);
|
||||
if (fieldCaps == null) {
|
||||
fieldCaps = new RollupFieldCaps();
|
||||
}
|
||||
fieldCaps.addAgg(dateHistoAggCap);
|
||||
fieldCapLookup.put(dateField, fieldCaps);
|
||||
|
||||
if (job.getGroupConfig().getHistogram() != null) {
|
||||
Map<String, Object> histoAggCap = job.getGroupConfig().getHistogram().toAggCap();
|
||||
Arrays.stream(job.getGroupConfig().getHistogram().getFields()).forEach(field -> {
|
||||
RollupFieldCaps caps = fieldCapLookup.get(field);
|
||||
if (caps == null) {
|
||||
caps = new RollupFieldCaps();
|
||||
}
|
||||
caps.addAgg(histoAggCap);
|
||||
fieldCapLookup.put(field, caps);
|
||||
});
|
||||
}
|
||||
|
||||
if (job.getGroupConfig().getTerms() != null) {
|
||||
Map<String, Object> histoAggCap = job.getGroupConfig().getTerms().toAggCap();
|
||||
Arrays.stream(job.getGroupConfig().getTerms().getFields()).forEach(field -> {
|
||||
RollupFieldCaps caps = fieldCapLookup.get(field);
|
||||
if (caps == null) {
|
||||
caps = new RollupFieldCaps();
|
||||
}
|
||||
caps.addAgg(histoAggCap);
|
||||
fieldCapLookup.put(field, caps);
|
||||
});
|
||||
}
|
||||
|
||||
if (job.getMetricsConfig().size() > 0) {
|
||||
job.getMetricsConfig().forEach(metricConfig -> {
|
||||
List<Map<String, Object>> metrics = metricConfig.toAggCap();
|
||||
metrics.forEach(m -> {
|
||||
RollupFieldCaps caps = fieldCapLookup.get(metricConfig.getField());
|
||||
if (caps == null) {
|
||||
caps = new RollupFieldCaps();
|
||||
}
|
||||
caps.addAgg(m);
|
||||
fieldCapLookup.put(metricConfig.getField(), caps);
|
||||
});
|
||||
});
|
||||
}
|
||||
fieldCapLookup = createRollupFieldCaps(job);
|
||||
}
|
||||
|
||||
public RollupJobCaps(StreamInput in) throws IOException {
|
||||
@ -149,8 +115,8 @@ public class RollupJobCaps implements Writeable, ToXContentObject {
|
||||
RollupJobCaps that = (RollupJobCaps) other;
|
||||
|
||||
return Objects.equals(this.jobID, that.jobID)
|
||||
&& Objects.equals(this.rollupIndex, that.rollupIndex)
|
||||
&& Objects.equals(this.fieldCapLookup, that.fieldCapLookup);
|
||||
&& Objects.equals(this.rollupIndex, that.rollupIndex)
|
||||
&& Objects.equals(this.fieldCapLookup, that.fieldCapLookup);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -158,6 +124,77 @@ public class RollupJobCaps implements Writeable, ToXContentObject {
|
||||
return Objects.hash(jobID, rollupIndex, fieldCapLookup);
|
||||
}
|
||||
|
||||
static Map<String, RollupFieldCaps> createRollupFieldCaps(final RollupJobConfig rollupJobConfig) {
|
||||
final Map<String, RollupFieldCaps> fieldCapLookup = new HashMap<>();
|
||||
|
||||
final GroupConfig groupConfig = rollupJobConfig.getGroupConfig();
|
||||
if (groupConfig != null) {
|
||||
// Create RollupFieldCaps for the date histogram
|
||||
final DateHistogramGroupConfig dateHistogram = groupConfig.getDateHistogram();
|
||||
final Map<String, Object> dateHistogramAggCap = new HashMap<>();
|
||||
dateHistogramAggCap.put("agg", DateHistogramAggregationBuilder.NAME);
|
||||
dateHistogramAggCap.put(DateHistogramGroupConfig.INTERVAL, dateHistogram.getInterval().toString());
|
||||
if (dateHistogram.getDelay() != null) {
|
||||
dateHistogramAggCap.put(DateHistogramGroupConfig.DELAY, dateHistogram.getDelay().toString());
|
||||
}
|
||||
dateHistogramAggCap.put(DateHistogramGroupConfig.TIME_ZONE, dateHistogram.getTimeZone());
|
||||
|
||||
final RollupFieldCaps dateHistogramFieldCaps = new RollupFieldCaps();
|
||||
dateHistogramFieldCaps.addAgg(dateHistogramAggCap);
|
||||
fieldCapLookup.put(dateHistogram.getField(), dateHistogramFieldCaps);
|
||||
|
||||
// Create RollupFieldCaps for the histogram
|
||||
final HistogramGroupConfig histogram = groupConfig.getHistogram();
|
||||
if (histogram != null) {
|
||||
final Map<String, Object> histogramAggCap = new HashMap<>();
|
||||
histogramAggCap.put("agg", HistogramAggregationBuilder.NAME);
|
||||
histogramAggCap.put(HistogramGroupConfig.INTERVAL, histogram.getInterval());
|
||||
for (String field : histogram.getFields()) {
|
||||
RollupFieldCaps caps = fieldCapLookup.get(field);
|
||||
if (caps == null) {
|
||||
caps = new RollupFieldCaps();
|
||||
}
|
||||
caps.addAgg(histogramAggCap);
|
||||
fieldCapLookup.put(field, caps);
|
||||
}
|
||||
}
|
||||
|
||||
// Create RollupFieldCaps for the term
|
||||
final TermsGroupConfig terms = groupConfig.getTerms();
|
||||
if (terms != null) {
|
||||
final Map<String, Object> termsAggCap = singletonMap("agg", TermsAggregationBuilder.NAME);
|
||||
for (String field : terms.getFields()) {
|
||||
RollupFieldCaps caps = fieldCapLookup.get(field);
|
||||
if (caps == null) {
|
||||
caps = new RollupFieldCaps();
|
||||
}
|
||||
caps.addAgg(termsAggCap);
|
||||
fieldCapLookup.put(field, caps);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Create RollupFieldCaps for the metrics
|
||||
final List<MetricConfig> metricsConfig = rollupJobConfig.getMetricsConfig();
|
||||
if (metricsConfig.size() > 0) {
|
||||
metricsConfig.forEach(metricConfig -> {
|
||||
final List<Map<String, Object>> metrics = metricConfig.getMetrics().stream()
|
||||
.map(metric -> singletonMap("agg", (Object) metric))
|
||||
.collect(Collectors.toList());
|
||||
|
||||
metrics.forEach(m -> {
|
||||
RollupFieldCaps caps = fieldCapLookup.get(metricConfig.getField());
|
||||
if (caps == null) {
|
||||
caps = new RollupFieldCaps();
|
||||
}
|
||||
caps.addAgg(m);
|
||||
fieldCapLookup.put(metricConfig.getField(), caps);
|
||||
});
|
||||
});
|
||||
}
|
||||
return Collections.unmodifiableMap(fieldCapLookup);
|
||||
}
|
||||
|
||||
public static class RollupFieldCaps implements Writeable, ToXContentObject {
|
||||
private List<Map<String, Object>> aggs = new ArrayList<>();
|
||||
|
||||
|
@ -29,7 +29,6 @@ import org.joda.time.DateTimeZone;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
@ -55,10 +54,10 @@ import static org.elasticsearch.common.xcontent.ObjectParser.ValueType;
|
||||
public class DateHistogramGroupConfig implements Writeable, ToXContentObject {
|
||||
|
||||
static final String NAME = "date_histogram";
|
||||
private static final String INTERVAL = "interval";
|
||||
public static final String INTERVAL = "interval";
|
||||
private static final String FIELD = "field";
|
||||
public static final String TIME_ZONE = "time_zone";
|
||||
private static final String DELAY = "delay";
|
||||
public static final String DELAY = "delay";
|
||||
private static final String DEFAULT_TIMEZONE = "UTC";
|
||||
private static final ConstructingObjectParser<DateHistogramGroupConfig, Void> PARSER;
|
||||
static {
|
||||
@ -196,21 +195,6 @@ public class DateHistogramGroupConfig implements Writeable, ToXContentObject {
|
||||
return Collections.singletonList(vsBuilder);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return A map representing this config object as a RollupCaps aggregation object
|
||||
*/
|
||||
public Map<String, Object> toAggCap() {
|
||||
Map<String, Object> map = new HashMap<>(3);
|
||||
map.put("agg", DateHistogramAggregationBuilder.NAME);
|
||||
map.put(INTERVAL, interval.toString());
|
||||
if (delay != null) {
|
||||
map.put(DELAY, delay.toString());
|
||||
}
|
||||
map.put(TIME_ZONE, timeZone);
|
||||
|
||||
return map;
|
||||
}
|
||||
|
||||
public void validateMappings(Map<String, Map<String, FieldCapabilities>> fieldCapsResponse,
|
||||
ActionRequestValidationException validationException) {
|
||||
|
||||
|
@ -24,11 +24,9 @@ import org.elasticsearch.xpack.core.rollup.RollupField;
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||
@ -48,7 +46,7 @@ import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constru
|
||||
public class HistogramGroupConfig implements Writeable, ToXContentObject {
|
||||
|
||||
static final String NAME = "histogram";
|
||||
private static final String INTERVAL = "interval";
|
||||
public static final String INTERVAL = "interval";
|
||||
private static final String FIELDS = "fields";
|
||||
private static final ConstructingObjectParser<HistogramGroupConfig, Void> PARSER;
|
||||
static {
|
||||
@ -106,20 +104,6 @@ public class HistogramGroupConfig implements Writeable, ToXContentObject {
|
||||
}).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
/**
|
||||
* @return A map representing this config object as a RollupCaps aggregation object
|
||||
*/
|
||||
public Map<String, Object> toAggCap() {
|
||||
Map<String, Object> map = new HashMap<>(2);
|
||||
map.put("agg", HistogramAggregationBuilder.NAME);
|
||||
map.put(INTERVAL, interval);
|
||||
return map;
|
||||
}
|
||||
|
||||
public Set<String> getAllFields() {
|
||||
return Arrays.stream(fields).collect(Collectors.toSet());
|
||||
}
|
||||
|
||||
public void validateMappings(Map<String, Map<String, FieldCapabilities>> fieldCapsResponse,
|
||||
ActionRequestValidationException validationException) {
|
||||
|
||||
|
@ -31,7 +31,6 @@ import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||
|
||||
@ -152,13 +151,6 @@ public class MetricConfig implements Writeable, ToXContentObject {
|
||||
return aggs;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return A map representing this config object as a RollupCaps aggregation object
|
||||
*/
|
||||
public List<Map<String, Object>> toAggCap() {
|
||||
return metrics.stream().map(metric -> Collections.singletonMap("agg", (Object)metric)).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
public void validateMappings(Map<String, Map<String, FieldCapabilities>> fieldCapsResponse,
|
||||
ActionRequestValidationException validationException) {
|
||||
|
||||
|
@ -25,7 +25,6 @@ import org.elasticsearch.xpack.core.rollup.RollupField;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.stream.Collectors;
|
||||
@ -94,15 +93,6 @@ public class TermsGroupConfig implements Writeable, ToXContentObject {
|
||||
}).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
/**
|
||||
* @return A map representing this config object as a RollupCaps aggregation object
|
||||
*/
|
||||
public Map<String, Object> toAggCap() {
|
||||
Map<String, Object> map = new HashMap<>(1);
|
||||
map.put("agg", TermsAggregationBuilder.NAME);
|
||||
return map;
|
||||
}
|
||||
|
||||
public void validateMappings(Map<String, Map<String, FieldCapabilities>> fieldCapsResponse,
|
||||
ActionRequestValidationException validationException) {
|
||||
|
||||
|
@ -4,7 +4,7 @@
|
||||
"settings" : {
|
||||
"number_of_shards" : 1,
|
||||
"number_of_replicas" : 0,
|
||||
"auto_expand_replicas" : "0-all",
|
||||
"auto_expand_replicas" : "0-1",
|
||||
"index.priority": 1000,
|
||||
"index.format": 6,
|
||||
"analysis" : {
|
||||
|
@ -21,9 +21,12 @@ import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
import static org.hamcrest.Matchers.any;
|
||||
import static org.hamcrest.Matchers.arrayWithSize;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
import static org.mockito.Matchers.argThat;
|
||||
import static org.mockito.Mockito.doAnswer;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.times;
|
||||
import static org.mockito.Mockito.verify;
|
||||
@ -31,7 +34,6 @@ import static org.mockito.Mockito.verifyNoMoreInteractions;
|
||||
|
||||
public class SchedulerEngineTests extends ESTestCase {
|
||||
|
||||
@AwaitsFix(bugUrl="https://github.com/elastic/elasticsearch/issues/33124")
|
||||
public void testListenersThrowingExceptionsDoNotCauseOtherListenersToBeSkipped() throws InterruptedException {
|
||||
final Logger mockLogger = mock(Logger.class);
|
||||
final SchedulerEngine engine = new SchedulerEngine(Settings.EMPTY, Clock.systemUTC(), mockLogger);
|
||||
@ -40,6 +42,7 @@ public class SchedulerEngineTests extends ESTestCase {
|
||||
final int numberOfListeners = randomIntBetween(1, 32);
|
||||
int numberOfFailingListeners = 0;
|
||||
final CountDownLatch latch = new CountDownLatch(numberOfListeners);
|
||||
|
||||
for (int i = 0; i < numberOfListeners; i++) {
|
||||
final AtomicBoolean trigger = new AtomicBoolean();
|
||||
final SchedulerEngine.Listener listener;
|
||||
@ -55,12 +58,17 @@ public class SchedulerEngineTests extends ESTestCase {
|
||||
numberOfFailingListeners++;
|
||||
listener = event -> {
|
||||
if (trigger.compareAndSet(false, true)) {
|
||||
latch.countDown();
|
||||
// we count down the latch after this exception is caught and mock logged in SchedulerEngine#notifyListeners
|
||||
throw new RuntimeException(getTestName());
|
||||
} else {
|
||||
fail("listener invoked twice");
|
||||
}
|
||||
};
|
||||
doAnswer(invocationOnMock -> {
|
||||
// this happens after the listener has been notified, threw an exception, and then mock logged the exception
|
||||
latch.countDown();
|
||||
return null;
|
||||
}).when(mockLogger).warn(argThat(any(ParameterizedMessage.class)), argThat(any(RuntimeException.class)));
|
||||
}
|
||||
listeners.add(Tuple.tuple(listener, trigger));
|
||||
}
|
||||
@ -135,7 +143,7 @@ public class SchedulerEngineTests extends ESTestCase {
|
||||
listenersLatch.await();
|
||||
assertTrue(listeners.stream().map(Tuple::v2).allMatch(count -> count.get() == numberOfSchedules));
|
||||
latch.await();
|
||||
assertFailedListenerLogMessage(mockLogger, numberOfListeners * numberOfSchedules);
|
||||
assertFailedListenerLogMessage(mockLogger, numberOfSchedules * numberOfListeners);
|
||||
verifyNoMoreInteractions(mockLogger);
|
||||
} finally {
|
||||
engine.stop();
|
||||
|
@ -5,9 +5,9 @@
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.integration;
|
||||
|
||||
import org.apache.http.entity.ContentType;
|
||||
import org.apache.http.entity.StringEntity;
|
||||
import org.apache.http.message.BasicHeader;
|
||||
import org.apache.http.util.EntityUtils;
|
||||
import org.elasticsearch.client.Request;
|
||||
import org.elasticsearch.client.RequestOptions;
|
||||
import org.elasticsearch.client.Response;
|
||||
import org.elasticsearch.client.ResponseException;
|
||||
import org.elasticsearch.client.RestClient;
|
||||
@ -22,10 +22,7 @@ import org.elasticsearch.xpack.test.rest.XPackRestTestHelper;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
@ -36,6 +33,7 @@ import java.util.stream.Collectors;
|
||||
import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
|
||||
public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||
|
||||
@ -57,26 +55,24 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||
}
|
||||
|
||||
private void setupDataAccessRole(String index) throws IOException {
|
||||
String json = "{"
|
||||
Request request = new Request("PUT", "/_xpack/security/role/test_data_access");
|
||||
request.setJsonEntity("{"
|
||||
+ " \"indices\" : ["
|
||||
+ " { \"names\": [\"" + index + "\"], \"privileges\": [\"read\"] }"
|
||||
+ " ]"
|
||||
+ "}";
|
||||
|
||||
client().performRequest("put", "_xpack/security/role/test_data_access", Collections.emptyMap(),
|
||||
new StringEntity(json, ContentType.APPLICATION_JSON));
|
||||
+ "}");
|
||||
client().performRequest(request);
|
||||
}
|
||||
|
||||
private void setupUser(String user, List<String> roles) throws IOException {
|
||||
String password = new String(SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING.getChars());
|
||||
|
||||
String json = "{"
|
||||
Request request = new Request("PUT", "/_xpack/security/user/" + user);
|
||||
request.setJsonEntity("{"
|
||||
+ " \"password\" : \"" + password + "\","
|
||||
+ " \"roles\" : [ " + roles.stream().map(unquoted -> "\"" + unquoted + "\"").collect(Collectors.joining(", ")) + " ]"
|
||||
+ "}";
|
||||
|
||||
client().performRequest("put", "_xpack/security/user/" + user, Collections.emptyMap(),
|
||||
new StringEntity(json, ContentType.APPLICATION_JSON));
|
||||
+ "}");
|
||||
client().performRequest(request);
|
||||
}
|
||||
|
||||
@Before
|
||||
@ -92,7 +88,10 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||
}
|
||||
|
||||
private void addAirlineData() throws IOException {
|
||||
String mappings = "{"
|
||||
StringBuilder bulk = new StringBuilder();
|
||||
|
||||
Request createEmptyAirlineDataRequest = new Request("PUT", "/airline-data-empty");
|
||||
createEmptyAirlineDataRequest.setJsonEntity("{"
|
||||
+ " \"mappings\": {"
|
||||
+ " \"response\": {"
|
||||
+ " \"properties\": {"
|
||||
@ -102,12 +101,12 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||
+ " }"
|
||||
+ " }"
|
||||
+ " }"
|
||||
+ "}";
|
||||
client().performRequest("put", "airline-data-empty", Collections.emptyMap(),
|
||||
new StringEntity(mappings, ContentType.APPLICATION_JSON));
|
||||
+ "}");
|
||||
client().performRequest(createEmptyAirlineDataRequest);
|
||||
|
||||
// Create index with source = enabled, doc_values = enabled, stored = false + multi-field
|
||||
mappings = "{"
|
||||
Request createAirlineDataRequest = new Request("PUT", "/airline-data");
|
||||
createAirlineDataRequest.setJsonEntity("{"
|
||||
+ " \"mappings\": {"
|
||||
+ " \"response\": {"
|
||||
+ " \"properties\": {"
|
||||
@ -123,18 +122,17 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||
+ " }"
|
||||
+ " }"
|
||||
+ " }"
|
||||
+ "}";
|
||||
client().performRequest("put", "airline-data", Collections.emptyMap(), new StringEntity(mappings, ContentType.APPLICATION_JSON));
|
||||
+ "}");
|
||||
client().performRequest(createAirlineDataRequest);
|
||||
|
||||
client().performRequest("put", "airline-data/response/1", Collections.emptyMap(),
|
||||
new StringEntity("{\"time stamp\":\"2016-06-01T00:00:00Z\",\"airline\":\"AAA\",\"responsetime\":135.22}",
|
||||
ContentType.APPLICATION_JSON));
|
||||
client().performRequest("put", "airline-data/response/2", Collections.emptyMap(),
|
||||
new StringEntity("{\"time stamp\":\"2016-06-01T01:59:00Z\",\"airline\":\"AAA\",\"responsetime\":541.76}",
|
||||
ContentType.APPLICATION_JSON));
|
||||
bulk.append("{\"index\": {\"_index\": \"airline-data\", \"_type\": \"response\", \"_id\": 1}}\n");
|
||||
bulk.append("{\"time stamp\":\"2016-06-01T00:00:00Z\",\"airline\":\"AAA\",\"responsetime\":135.22}\n");
|
||||
bulk.append("{\"index\": {\"_index\": \"airline-data\", \"_type\": \"response\", \"_id\": 2}}\n");
|
||||
bulk.append("{\"time stamp\":\"2016-06-01T01:59:00Z\",\"airline\":\"AAA\",\"responsetime\":541.76}\n");
|
||||
|
||||
// Create index with source = enabled, doc_values = disabled (except time), stored = false
|
||||
mappings = "{"
|
||||
Request createAirlineDataDisabledDocValues = new Request("PUT", "/airline-data-disabled-doc-values");
|
||||
createAirlineDataDisabledDocValues.setJsonEntity("{"
|
||||
+ " \"mappings\": {"
|
||||
+ " \"response\": {"
|
||||
+ " \"properties\": {"
|
||||
@ -144,19 +142,17 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||
+ " }"
|
||||
+ " }"
|
||||
+ " }"
|
||||
+ "}";
|
||||
client().performRequest("put", "airline-data-disabled-doc-values", Collections.emptyMap(),
|
||||
new StringEntity(mappings, ContentType.APPLICATION_JSON));
|
||||
+ "}");
|
||||
client().performRequest(createAirlineDataDisabledDocValues);
|
||||
|
||||
client().performRequest("put", "airline-data-disabled-doc-values/response/1", Collections.emptyMap(),
|
||||
new StringEntity("{\"time stamp\":\"2016-06-01T00:00:00Z\",\"airline\":\"AAA\",\"responsetime\":135.22}",
|
||||
ContentType.APPLICATION_JSON));
|
||||
client().performRequest("put", "airline-data-disabled-doc-values/response/2", Collections.emptyMap(),
|
||||
new StringEntity("{\"time stamp\":\"2016-06-01T01:59:00Z\",\"airline\":\"AAA\",\"responsetime\":541.76}",
|
||||
ContentType.APPLICATION_JSON));
|
||||
bulk.append("{\"index\": {\"_index\": \"airline-data-disabled-doc-values\", \"_type\": \"response\", \"_id\": 1}}\n");
|
||||
bulk.append("{\"time stamp\":\"2016-06-01T00:00:00Z\",\"airline\":\"AAA\",\"responsetime\":135.22}\n");
|
||||
bulk.append("{\"index\": {\"_index\": \"airline-data-disabled-doc-values\", \"_type\": \"response\", \"_id\": 2}}\n");
|
||||
bulk.append("{\"time stamp\":\"2016-06-01T01:59:00Z\",\"airline\":\"AAA\",\"responsetime\":541.76}\n");
|
||||
|
||||
// Create index with source = disabled, doc_values = enabled (except time), stored = true
|
||||
mappings = "{"
|
||||
Request createAirlineDataDisabledSource = new Request("PUT", "/airline-data-disabled-source");
|
||||
createAirlineDataDisabledSource.setJsonEntity("{"
|
||||
+ " \"mappings\": {"
|
||||
+ " \"response\": {"
|
||||
+ " \"_source\":{\"enabled\":false},"
|
||||
@ -167,19 +163,16 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||
+ " }"
|
||||
+ " }"
|
||||
+ " }"
|
||||
+ "}";
|
||||
client().performRequest("put", "airline-data-disabled-source", Collections.emptyMap(),
|
||||
new StringEntity(mappings, ContentType.APPLICATION_JSON));
|
||||
+ "}");
|
||||
|
||||
client().performRequest("put", "airline-data-disabled-source/response/1", Collections.emptyMap(),
|
||||
new StringEntity("{\"time stamp\":\"2016-06-01T00:00:00Z\",\"airline\":\"AAA\",\"responsetime\":135.22}",
|
||||
ContentType.APPLICATION_JSON));
|
||||
client().performRequest("put", "airline-data-disabled-source/response/2", Collections.emptyMap(),
|
||||
new StringEntity("{\"time stamp\":\"2016-06-01T01:59:00Z\",\"airline\":\"AAA\",\"responsetime\":541.76}",
|
||||
ContentType.APPLICATION_JSON));
|
||||
bulk.append("{\"index\": {\"_index\": \"airline-data-disabled-source\", \"_type\": \"response\", \"_id\": 1}}\n");
|
||||
bulk.append("{\"time stamp\":\"2016-06-01T00:00:00Z\",\"airline\":\"AAA\",\"responsetime\":135.22}\n");
|
||||
bulk.append("{\"index\": {\"_index\": \"airline-data-disabled-source\", \"_type\": \"response\", \"_id\": 2}}\n");
|
||||
bulk.append("{\"time stamp\":\"2016-06-01T01:59:00Z\",\"airline\":\"AAA\",\"responsetime\":541.76}\n");
|
||||
|
||||
// Create index with nested documents
|
||||
mappings = "{"
|
||||
Request createAirlineDataNested = new Request("PUT", "/nested-data");
|
||||
createAirlineDataNested.setJsonEntity("{"
|
||||
+ " \"mappings\": {"
|
||||
+ " \"response\": {"
|
||||
+ " \"properties\": {"
|
||||
@ -187,18 +180,17 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||
+ " }"
|
||||
+ " }"
|
||||
+ " }"
|
||||
+ "}";
|
||||
client().performRequest("put", "nested-data", Collections.emptyMap(), new StringEntity(mappings, ContentType.APPLICATION_JSON));
|
||||
+ "}");
|
||||
client().performRequest(createAirlineDataNested);
|
||||
|
||||
client().performRequest("put", "nested-data/response/1", Collections.emptyMap(),
|
||||
new StringEntity("{\"time\":\"2016-06-01T00:00:00Z\", \"responsetime\":{\"millis\":135.22}}",
|
||||
ContentType.APPLICATION_JSON));
|
||||
client().performRequest("put", "nested-data/response/2", Collections.emptyMap(),
|
||||
new StringEntity("{\"time\":\"2016-06-01T01:59:00Z\",\"responsetime\":{\"millis\":222.0}}",
|
||||
ContentType.APPLICATION_JSON));
|
||||
bulk.append("{\"index\": {\"_index\": \"nested-data\", \"_type\": \"response\", \"_id\": 1}}\n");
|
||||
bulk.append("{\"time\":\"2016-06-01T00:00:00Z\", \"responsetime\":{\"millis\":135.22}}\n");
|
||||
bulk.append("{\"index\": {\"_index\": \"nested-data\", \"_type\": \"response\", \"_id\": 2}}\n");
|
||||
bulk.append("{\"time\":\"2016-06-01T01:59:00Z\",\"responsetime\":{\"millis\":222.0}}\n");
|
||||
|
||||
// Create index with multiple docs per time interval for aggregation testing
|
||||
mappings = "{"
|
||||
Request createAirlineDataAggs = new Request("PUT", "/airline-data-aggs");
|
||||
createAirlineDataAggs.setJsonEntity("{"
|
||||
+ " \"mappings\": {"
|
||||
+ " \"response\": {"
|
||||
+ " \"properties\": {"
|
||||
@ -208,43 +200,33 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||
+ " }"
|
||||
+ " }"
|
||||
+ " }"
|
||||
+ "}";
|
||||
client().performRequest("put", "airline-data-aggs", Collections.emptyMap(),
|
||||
new StringEntity(mappings, ContentType.APPLICATION_JSON));
|
||||
+ "}");
|
||||
client().performRequest(createAirlineDataAggs);
|
||||
|
||||
client().performRequest("put", "airline-data-aggs/response/1", Collections.emptyMap(),
|
||||
new StringEntity("{\"time stamp\":\"2016-06-01T00:00:00Z\",\"airline\":\"AAA\",\"responsetime\":100.0}",
|
||||
ContentType.APPLICATION_JSON));
|
||||
client().performRequest("put", "airline-data-aggs/response/2", Collections.emptyMap(),
|
||||
new StringEntity("{\"time stamp\":\"2016-06-01T00:01:00Z\",\"airline\":\"AAA\",\"responsetime\":200.0}",
|
||||
ContentType.APPLICATION_JSON));
|
||||
client().performRequest("put", "airline-data-aggs/response/3", Collections.emptyMap(),
|
||||
new StringEntity("{\"time stamp\":\"2016-06-01T00:00:00Z\",\"airline\":\"BBB\",\"responsetime\":1000.0}",
|
||||
ContentType.APPLICATION_JSON));
|
||||
client().performRequest("put", "airline-data-aggs/response/4", Collections.emptyMap(),
|
||||
new StringEntity("{\"time stamp\":\"2016-06-01T00:01:00Z\",\"airline\":\"BBB\",\"responsetime\":2000.0}",
|
||||
ContentType.APPLICATION_JSON));
|
||||
client().performRequest("put", "airline-data-aggs/response/5", Collections.emptyMap(),
|
||||
new StringEntity("{\"time stamp\":\"2016-06-01T01:00:00Z\",\"airline\":\"AAA\",\"responsetime\":300.0}",
|
||||
ContentType.APPLICATION_JSON));
|
||||
client().performRequest("put", "airline-data-aggs/response/6", Collections.emptyMap(),
|
||||
new StringEntity("{\"time stamp\":\"2016-06-01T01:01:00Z\",\"airline\":\"AAA\",\"responsetime\":400.0}",
|
||||
ContentType.APPLICATION_JSON));
|
||||
client().performRequest("put", "airline-data-aggs/response/7", Collections.emptyMap(),
|
||||
new StringEntity("{\"time stamp\":\"2016-06-01T01:00:00Z\",\"airline\":\"BBB\",\"responsetime\":3000.0}",
|
||||
ContentType.APPLICATION_JSON));
|
||||
client().performRequest("put", "airline-data-aggs/response/8", Collections.emptyMap(),
|
||||
new StringEntity("{\"time stamp\":\"2016-06-01T01:01:00Z\",\"airline\":\"BBB\",\"responsetime\":4000.0}",
|
||||
ContentType.APPLICATION_JSON));
|
||||
bulk.append("{\"index\": {\"_index\": \"airline-data-aggs\", \"_type\": \"response\", \"_id\": 1}}\n");
|
||||
bulk.append("{\"time stamp\":\"2016-06-01T00:00:00Z\",\"airline\":\"AAA\",\"responsetime\":100.0}\n");
|
||||
bulk.append("{\"index\": {\"_index\": \"airline-data-aggs\", \"_type\": \"response\", \"_id\": 2}}\n");
|
||||
bulk.append("{\"time stamp\":\"2016-06-01T00:01:00Z\",\"airline\":\"AAA\",\"responsetime\":200.0}\n");
|
||||
bulk.append("{\"index\": {\"_index\": \"airline-data-aggs\", \"_type\": \"response\", \"_id\": 3}}\n");
|
||||
bulk.append("{\"time stamp\":\"2016-06-01T00:00:00Z\",\"airline\":\"BBB\",\"responsetime\":1000.0}\n");
|
||||
bulk.append("{\"index\": {\"_index\": \"airline-data-aggs\", \"_type\": \"response\", \"_id\": 4}}\n");
|
||||
bulk.append("{\"time stamp\":\"2016-06-01T00:01:00Z\",\"airline\":\"BBB\",\"responsetime\":2000.0}\n");
|
||||
bulk.append("{\"index\": {\"_index\": \"airline-data-aggs\", \"_type\": \"response\", \"_id\": 5}}\n");
|
||||
bulk.append("{\"time stamp\":\"2016-06-01T01:00:00Z\",\"airline\":\"AAA\",\"responsetime\":300.0}\n");
|
||||
bulk.append("{\"index\": {\"_index\": \"airline-data-aggs\", \"_type\": \"response\", \"_id\": 6}}\n");
|
||||
bulk.append("{\"time stamp\":\"2016-06-01T01:01:00Z\",\"airline\":\"AAA\",\"responsetime\":400.0}\n");
|
||||
bulk.append("{\"index\": {\"_index\": \"airline-data-aggs\", \"_type\": \"response\", \"_id\": 7}}\n");
|
||||
bulk.append("{\"time stamp\":\"2016-06-01T01:00:00Z\",\"airline\":\"BBB\",\"responsetime\":3000.0}\n");
|
||||
bulk.append("{\"index\": {\"_index\": \"airline-data-aggs\", \"_type\": \"response\", \"_id\": 8}}\n");
|
||||
bulk.append("{\"time stamp\":\"2016-06-01T01:01:00Z\",\"airline\":\"BBB\",\"responsetime\":4000.0}\n");
|
||||
|
||||
// Ensure all data is searchable
|
||||
client().performRequest("post", "_refresh");
|
||||
bulkIndex(bulk.toString());
|
||||
}
|
||||
|
||||
private void addNetworkData(String index) throws IOException {
|
||||
|
||||
// Create index with source = enabled, doc_values = enabled, stored = false + multi-field
|
||||
String mappings = "{"
|
||||
Request createIndexRequest = new Request("PUT", index);
|
||||
createIndexRequest.setJsonEntity("{"
|
||||
+ " \"mappings\": {"
|
||||
+ " \"doc\": {"
|
||||
+ " \"properties\": {"
|
||||
@ -260,27 +242,25 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||
+ " }"
|
||||
+ " }"
|
||||
+ " }"
|
||||
+ "}";
|
||||
client().performRequest("put", index, Collections.emptyMap(), new StringEntity(mappings, ContentType.APPLICATION_JSON));
|
||||
+ "}");;
|
||||
client().performRequest(createIndexRequest);
|
||||
|
||||
StringBuilder bulk = new StringBuilder();
|
||||
String docTemplate = "{\"timestamp\":%d,\"host\":\"%s\",\"network_bytes_out\":%d}";
|
||||
Date date = new Date(1464739200735L);
|
||||
for (int i=0; i<120; i++) {
|
||||
for (int i = 0; i < 120; i++) {
|
||||
long byteCount = randomNonNegativeLong();
|
||||
String jsonDoc = String.format(Locale.ROOT, docTemplate, date.getTime(), "hostA", byteCount);
|
||||
client().performRequest("post", index + "/doc", Collections.emptyMap(),
|
||||
new StringEntity(jsonDoc, ContentType.APPLICATION_JSON));
|
||||
bulk.append("{\"index\": {\"_index\": \"").append(index).append("\", \"_type\": \"doc\"}}\n");
|
||||
bulk.append(String.format(Locale.ROOT, docTemplate, date.getTime(), "hostA", byteCount)).append('\n');
|
||||
|
||||
byteCount = randomNonNegativeLong();
|
||||
jsonDoc = String.format(Locale.ROOT, docTemplate, date.getTime(), "hostB", byteCount);
|
||||
client().performRequest("post", index + "/doc", Collections.emptyMap(),
|
||||
new StringEntity(jsonDoc, ContentType.APPLICATION_JSON));
|
||||
bulk.append("{\"index\": {\"_index\": \"").append(index).append("\", \"_type\": \"doc\"}}\n");
|
||||
bulk.append(String.format(Locale.ROOT, docTemplate, date.getTime(), "hostB", byteCount)).append('\n');
|
||||
|
||||
date = new Date(date.getTime() + 10_000);
|
||||
}
|
||||
|
||||
// Ensure all data is searchable
|
||||
client().performRequest("post", "_refresh");
|
||||
bulkIndex(bulk.toString());
|
||||
}
|
||||
|
||||
public void testLookbackOnlyWithMixedTypes() throws Exception {
|
||||
@ -314,11 +294,21 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||
|
||||
public void testLookbackOnlyWithNestedFields() throws Exception {
|
||||
String jobId = "test-lookback-only-with-nested-fields";
|
||||
String job = "{\"description\":\"Nested job\", \"analysis_config\" : {\"bucket_span\":\"1h\",\"detectors\" :"
|
||||
+ "[{\"function\":\"mean\",\"field_name\":\"responsetime.millis\"}]}, \"data_description\" : {\"time_field\":\"time\"}"
|
||||
+ "}";
|
||||
client().performRequest("put", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId, Collections.emptyMap(),
|
||||
new StringEntity(job, ContentType.APPLICATION_JSON));
|
||||
Request createJobRequest = new Request("PUT", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId);
|
||||
createJobRequest.setJsonEntity("{\n"
|
||||
+ " \"description\": \"Nested job\",\n"
|
||||
+ " \"analysis_config\": {\n"
|
||||
+ " \"bucket_span\": \"1h\",\n"
|
||||
+ " \"detectors\": [\n"
|
||||
+ " {\n"
|
||||
+ " \"function\": \"mean\",\n"
|
||||
+ " \"field_name\": \"responsetime.millis\"\n"
|
||||
+ " }\n"
|
||||
+ " ]\n"
|
||||
+ " },"
|
||||
+ " \"data_description\": {\"time_field\": \"time\"}\n"
|
||||
+ "}");
|
||||
client().performRequest(createJobRequest);
|
||||
|
||||
String datafeedId = jobId + "-datafeed";
|
||||
new DatafeedBuilder(datafeedId, jobId, "nested-data", "response").build();
|
||||
@ -326,8 +316,9 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||
|
||||
startDatafeedAndWaitUntilStopped(datafeedId);
|
||||
waitUntilJobIsClosed(jobId);
|
||||
Response jobStatsResponse = client().performRequest("get", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats");
|
||||
String jobStatsResponseAsString = responseEntityToString(jobStatsResponse);
|
||||
Response jobStatsResponse = client().performRequest(
|
||||
new Request("GET", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats"));
|
||||
String jobStatsResponseAsString = EntityUtils.toString(jobStatsResponse.getEntity());
|
||||
assertThat(jobStatsResponseAsString, containsString("\"input_record_count\":2"));
|
||||
assertThat(jobStatsResponseAsString, containsString("\"processed_record_count\":2"));
|
||||
assertThat(jobStatsResponseAsString, containsString("\"missing_field_count\":0"));
|
||||
@ -340,14 +331,23 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||
|
||||
public void testInsufficientSearchPrivilegesOnPut() throws Exception {
|
||||
String jobId = "privs-put-job";
|
||||
String job = "{\"description\":\"Aggs job\",\"analysis_config\" :{\"bucket_span\":\"1h\","
|
||||
+ "\"summary_count_field_name\":\"doc_count\","
|
||||
+ "\"detectors\":[{\"function\":\"mean\","
|
||||
+ "\"field_name\":\"responsetime\",\"by_field_name\":\"airline\"}]},"
|
||||
+ "\"data_description\" : {\"time_field\":\"time stamp\"}"
|
||||
+ "}";
|
||||
client().performRequest("put", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId,
|
||||
Collections.emptyMap(), new StringEntity(job, ContentType.APPLICATION_JSON));
|
||||
Request createJobRequest = new Request("PUT", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId);
|
||||
createJobRequest.setJsonEntity("{\n"
|
||||
+ " \"description\": \"Aggs job\",\n"
|
||||
+ " \"analysis_config\": {\n"
|
||||
+ " \"bucket_span\": \"1h\",\n "
|
||||
+ " \"summary_count_field_name\": \"doc_count\",\n"
|
||||
+ " \"detectors\": [\n"
|
||||
+ " {\n"
|
||||
+ " \"function\": \"mean\",\n"
|
||||
+ " \"field_name\": \"responsetime\",\n"
|
||||
+ " \"by_field_name\":\"airline\"\n"
|
||||
+ " }\n"
|
||||
+ " ]\n"
|
||||
+ " },\n"
|
||||
+ " \"data_description\" : {\"time_field\": \"time stamp\"}\n"
|
||||
+ "}");
|
||||
client().performRequest(createJobRequest);
|
||||
|
||||
String datafeedId = "datafeed-" + jobId;
|
||||
// This should be disallowed, because even though the ml_admin user has permission to
|
||||
@ -365,14 +365,23 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||
|
||||
public void testInsufficientSearchPrivilegesOnPreview() throws Exception {
|
||||
String jobId = "privs-preview-job";
|
||||
String job = "{\"description\":\"Aggs job\",\"analysis_config\" :{\"bucket_span\":\"1h\","
|
||||
+ "\"summary_count_field_name\":\"doc_count\","
|
||||
+ "\"detectors\":[{\"function\":\"mean\","
|
||||
+ "\"field_name\":\"responsetime\",\"by_field_name\":\"airline\"}]},"
|
||||
+ "\"data_description\" : {\"time_field\":\"time stamp\"}"
|
||||
+ "}";
|
||||
client().performRequest("put", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId,
|
||||
Collections.emptyMap(), new StringEntity(job, ContentType.APPLICATION_JSON));
|
||||
Request createJobRequest = new Request("PUT", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId);
|
||||
createJobRequest.setJsonEntity("{\n"
|
||||
+ " \"description\": \"Aggs job\",\n"
|
||||
+ " \"analysis_config\": {\n"
|
||||
+ " \"bucket_span\": \"1h\",\n"
|
||||
+ " \"summary_count_field_name\": \"doc_count\",\n"
|
||||
+ " \"detectors\": [\n"
|
||||
+ " {\n"
|
||||
+ " \"function\": \"mean\",\n"
|
||||
+ " \"field_name\": \"responsetime\",\n"
|
||||
+ " \"by_field_name\": \"airline\"\n"
|
||||
+ " }\n"
|
||||
+ " ]\n"
|
||||
+ " },\n"
|
||||
+ " \"data_description\" : {\"time_field\": \"time stamp\"}\n"
|
||||
+ "}");
|
||||
client().performRequest(createJobRequest);
|
||||
|
||||
String datafeedId = "datafeed-" + jobId;
|
||||
new DatafeedBuilder(datafeedId, jobId, "airline-data-aggs", "response").build();
|
||||
@ -380,10 +389,11 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||
// This should be disallowed, because ml_admin is trying to preview a datafeed created by
|
||||
// by another user (x_pack_rest_user in this case) that will reveal the content of an index they
|
||||
// don't have permission to search directly
|
||||
ResponseException e = expectThrows(ResponseException.class, () ->
|
||||
client().performRequest("get",
|
||||
MachineLearning.BASE_PATH + "datafeeds/" + datafeedId + "/_preview",
|
||||
new BasicHeader("Authorization", BASIC_AUTH_VALUE_ML_ADMIN)));
|
||||
Request getFeed = new Request("GET", MachineLearning.BASE_PATH + "datafeeds/" + datafeedId + "/_preview");
|
||||
RequestOptions.Builder options = getFeed.getOptions().toBuilder();
|
||||
options.addHeader("Authorization", BASIC_AUTH_VALUE_ML_ADMIN);
|
||||
getFeed.setOptions(options);
|
||||
ResponseException e = expectThrows(ResponseException.class, () -> client().performRequest(getFeed));
|
||||
|
||||
assertThat(e.getMessage(),
|
||||
containsString("[indices:data/read/field_caps] is unauthorized for user [ml_admin]"));
|
||||
@ -391,13 +401,23 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||
|
||||
public void testLookbackOnlyGivenAggregationsWithHistogram() throws Exception {
|
||||
String jobId = "aggs-histogram-job";
|
||||
String job = "{\"description\":\"Aggs job\",\"analysis_config\" :{\"bucket_span\":\"1h\","
|
||||
+ "\"summary_count_field_name\":\"doc_count\","
|
||||
+ "\"detectors\":[{\"function\":\"mean\",\"field_name\":\"responsetime\",\"by_field_name\":\"airline\"}]},"
|
||||
+ "\"data_description\" : {\"time_field\":\"time stamp\"}"
|
||||
+ "}";
|
||||
client().performRequest("put", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId, Collections.emptyMap(),
|
||||
new StringEntity(job, ContentType.APPLICATION_JSON));
|
||||
Request createJobRequest = new Request("PUT", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId);
|
||||
createJobRequest.setJsonEntity("{\n"
|
||||
+ " \"description\": \"Aggs job\",\n"
|
||||
+ " \"analysis_config\": {\n"
|
||||
+ " \"bucket_span\": \"1h\",\n"
|
||||
+ " \"summary_count_field_name\": \"doc_count\",\n"
|
||||
+ " \"detectors\": [\n"
|
||||
+ " {\n"
|
||||
+ " \"function\": \"mean\",\n"
|
||||
+ " \"field_name\": \"responsetime\",\n"
|
||||
+ " \"by_field_name\": \"airline\"\n"
|
||||
+ " }\n"
|
||||
+ " ]\n"
|
||||
+ " },\n"
|
||||
+ " \"data_description\": {\"time_field\": \"time stamp\"}\n"
|
||||
+ "}");
|
||||
client().performRequest(createJobRequest);
|
||||
|
||||
String datafeedId = "datafeed-" + jobId;
|
||||
String aggregations = "{\"buckets\":{\"histogram\":{\"field\":\"time stamp\",\"interval\":3600000},"
|
||||
@ -410,8 +430,9 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||
|
||||
startDatafeedAndWaitUntilStopped(datafeedId);
|
||||
waitUntilJobIsClosed(jobId);
|
||||
Response jobStatsResponse = client().performRequest("get", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats");
|
||||
String jobStatsResponseAsString = responseEntityToString(jobStatsResponse);
|
||||
Response jobStatsResponse = client().performRequest(new Request("GET",
|
||||
MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats"));
|
||||
String jobStatsResponseAsString = EntityUtils.toString(jobStatsResponse.getEntity());
|
||||
assertThat(jobStatsResponseAsString, containsString("\"input_record_count\":4"));
|
||||
assertThat(jobStatsResponseAsString, containsString("\"processed_record_count\":4"));
|
||||
assertThat(jobStatsResponseAsString, containsString("\"missing_field_count\":0"));
|
||||
@ -419,13 +440,23 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||
|
||||
public void testLookbackOnlyGivenAggregationsWithDateHistogram() throws Exception {
|
||||
String jobId = "aggs-date-histogram-job";
|
||||
String job = "{\"description\":\"Aggs job\",\"analysis_config\" :{\"bucket_span\":\"3600s\","
|
||||
+ "\"summary_count_field_name\":\"doc_count\","
|
||||
+ "\"detectors\":[{\"function\":\"mean\",\"field_name\":\"responsetime\",\"by_field_name\":\"airline\"}]},"
|
||||
+ "\"data_description\" : {\"time_field\":\"time stamp\"}"
|
||||
+ "}";
|
||||
client().performRequest("put", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId, Collections.emptyMap(),
|
||||
new StringEntity(job, ContentType.APPLICATION_JSON));
|
||||
Request createJobRequest = new Request("PUT", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId);
|
||||
createJobRequest.setJsonEntity("{\n"
|
||||
+ " \"description\": \"Aggs job\",\n"
|
||||
+ " \"analysis_config\": {\n"
|
||||
+ " \"bucket_span\": \"3600s\",\n"
|
||||
+ " \"summary_count_field_name\": \"doc_count\",\n"
|
||||
+ " \"detectors\": [\n"
|
||||
+ " {\n"
|
||||
+ " \"function\": \"mean\",\n"
|
||||
+ " \"field_name\": \"responsetime\",\n"
|
||||
+ " \"by_field_name\": \"airline\"\n"
|
||||
+ " }\n"
|
||||
+ " ]\n"
|
||||
+ " },\n"
|
||||
+ " \"data_description\": {\"time_field\": \"time stamp\"}\n"
|
||||
+ "}");
|
||||
client().performRequest(createJobRequest);
|
||||
|
||||
String datafeedId = "datafeed-" + jobId;
|
||||
String aggregations = "{\"time stamp\":{\"date_histogram\":{\"field\":\"time stamp\",\"interval\":\"1h\"},"
|
||||
@ -438,8 +469,9 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||
|
||||
startDatafeedAndWaitUntilStopped(datafeedId);
|
||||
waitUntilJobIsClosed(jobId);
|
||||
Response jobStatsResponse = client().performRequest("get", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats");
|
||||
String jobStatsResponseAsString = responseEntityToString(jobStatsResponse);
|
||||
Response jobStatsResponse = client().performRequest(new Request("GET",
|
||||
MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats"));
|
||||
String jobStatsResponseAsString = EntityUtils.toString(jobStatsResponse.getEntity());
|
||||
assertThat(jobStatsResponseAsString, containsString("\"input_record_count\":4"));
|
||||
assertThat(jobStatsResponseAsString, containsString("\"processed_record_count\":4"));
|
||||
assertThat(jobStatsResponseAsString, containsString("\"missing_field_count\":0"));
|
||||
@ -447,13 +479,22 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||
|
||||
public void testLookbackUsingDerivativeAggWithLargerHistogramBucketThanDataRate() throws Exception {
|
||||
String jobId = "derivative-agg-network-job";
|
||||
String job = "{\"analysis_config\" :{\"bucket_span\":\"300s\","
|
||||
+ "\"summary_count_field_name\":\"doc_count\","
|
||||
+ "\"detectors\":[{\"function\":\"mean\",\"field_name\":\"bytes-delta\",\"by_field_name\":\"hostname\"}]},"
|
||||
+ "\"data_description\" : {\"time_field\":\"timestamp\"}"
|
||||
+ "}";
|
||||
client().performRequest("put", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId, Collections.emptyMap(),
|
||||
new StringEntity(job, ContentType.APPLICATION_JSON));
|
||||
Request createJobRequest = new Request("PUT", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId);
|
||||
createJobRequest.setJsonEntity("{\n"
|
||||
+ " \"analysis_config\": {\n"
|
||||
+ " \"bucket_span\": \"300s\",\n"
|
||||
+ " \"summary_count_field_name\": \"doc_count\",\n"
|
||||
+ " \"detectors\": [\n"
|
||||
+ " {\n"
|
||||
+ " \"function\": \"mean\",\n"
|
||||
+ " \"field_name\": \"bytes-delta\",\n"
|
||||
+ " \"by_field_name\": \"hostname\"\n"
|
||||
+ " }\n"
|
||||
+ " ]\n"
|
||||
+ " },\n"
|
||||
+ " \"data_description\": {\"time_field\": \"timestamp\"}\n"
|
||||
+ "}");
|
||||
client().performRequest(createJobRequest);
|
||||
|
||||
String datafeedId = "datafeed-" + jobId;
|
||||
String aggregations =
|
||||
@ -471,8 +512,9 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||
|
||||
startDatafeedAndWaitUntilStopped(datafeedId);
|
||||
waitUntilJobIsClosed(jobId);
|
||||
Response jobStatsResponse = client().performRequest("get", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats");
|
||||
String jobStatsResponseAsString = responseEntityToString(jobStatsResponse);
|
||||
Response jobStatsResponse = client().performRequest(new Request("GET",
|
||||
MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats"));
|
||||
String jobStatsResponseAsString = EntityUtils.toString(jobStatsResponse.getEntity());
|
||||
assertThat(jobStatsResponseAsString, containsString("\"input_record_count\":40"));
|
||||
assertThat(jobStatsResponseAsString, containsString("\"processed_record_count\":40"));
|
||||
assertThat(jobStatsResponseAsString, containsString("\"out_of_order_timestamp_count\":0"));
|
||||
@ -483,13 +525,22 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||
|
||||
public void testLookbackUsingDerivativeAggWithSmallerHistogramBucketThanDataRate() throws Exception {
|
||||
String jobId = "derivative-agg-network-job";
|
||||
String job = "{\"analysis_config\" :{\"bucket_span\":\"300s\","
|
||||
+ "\"summary_count_field_name\":\"doc_count\","
|
||||
+ "\"detectors\":[{\"function\":\"mean\",\"field_name\":\"bytes-delta\",\"by_field_name\":\"hostname\"}]},"
|
||||
+ "\"data_description\" : {\"time_field\":\"timestamp\"}"
|
||||
+ "}";
|
||||
client().performRequest("put", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId, Collections.emptyMap(),
|
||||
new StringEntity(job, ContentType.APPLICATION_JSON));
|
||||
Request createJobRequest = new Request("PUT", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId);
|
||||
createJobRequest.setJsonEntity("{\n"
|
||||
+ " \"analysis_config\": {\n"
|
||||
+ " \"bucket_span\": \"300s\",\n"
|
||||
+ " \"summary_count_field_name\": \"doc_count\",\n"
|
||||
+ " \"detectors\": [\n"
|
||||
+ " {\n"
|
||||
+ " \"function\": \"mean\",\n"
|
||||
+ " \"field_name\": \"bytes-delta\",\n"
|
||||
+ " \"by_field_name\": \"hostname\"\n"
|
||||
+ " }\n"
|
||||
+ " ]\n"
|
||||
+ " },\n"
|
||||
+ " \"data_description\": {\"time_field\": \"timestamp\"}\n"
|
||||
+ "}");
|
||||
client().performRequest(createJobRequest);
|
||||
|
||||
String datafeedId = "datafeed-" + jobId;
|
||||
String aggregations =
|
||||
@ -507,21 +558,31 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||
|
||||
startDatafeedAndWaitUntilStopped(datafeedId);
|
||||
waitUntilJobIsClosed(jobId);
|
||||
Response jobStatsResponse = client().performRequest("get", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats");
|
||||
String jobStatsResponseAsString = responseEntityToString(jobStatsResponse);
|
||||
Response jobStatsResponse = client().performRequest(new Request("GET",
|
||||
MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats"));
|
||||
String jobStatsResponseAsString = EntityUtils.toString(jobStatsResponse.getEntity());
|
||||
assertThat(jobStatsResponseAsString, containsString("\"input_record_count\":240"));
|
||||
assertThat(jobStatsResponseAsString, containsString("\"processed_record_count\":240"));
|
||||
}
|
||||
|
||||
public void testLookbackWithoutPermissions() throws Exception {
|
||||
String jobId = "permission-test-network-job";
|
||||
String job = "{\"analysis_config\" :{\"bucket_span\":\"300s\","
|
||||
+ "\"summary_count_field_name\":\"doc_count\","
|
||||
+ "\"detectors\":[{\"function\":\"mean\",\"field_name\":\"bytes-delta\",\"by_field_name\":\"hostname\"}]},"
|
||||
+ "\"data_description\" : {\"time_field\":\"timestamp\"}"
|
||||
+ "}";
|
||||
client().performRequest("put", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId, Collections.emptyMap(),
|
||||
new StringEntity(job, ContentType.APPLICATION_JSON));
|
||||
Request createJobRequest = new Request("PUT", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId);
|
||||
createJobRequest.setJsonEntity("{\n"
|
||||
+ " \"analysis_config\": {\n"
|
||||
+ " \"bucket_span\": \"300s\",\n"
|
||||
+ " \"summary_count_field_name\": \"doc_count\",\n"
|
||||
+ " \"detectors\": [\n"
|
||||
+ " {\n"
|
||||
+ " \"function\": \"mean\",\n"
|
||||
+ " \"field_name\": \"bytes-delta\",\n"
|
||||
+ " \"by_field_name\": \"hostname\"\n"
|
||||
+ " }\n"
|
||||
+ " ]\n"
|
||||
+ " },\n"
|
||||
+ " \"data_description\": {\"time_field\": \"timestamp\"}\n"
|
||||
+ "}");
|
||||
client().performRequest(createJobRequest);
|
||||
|
||||
String datafeedId = "datafeed-" + jobId;
|
||||
String aggregations =
|
||||
@ -545,29 +606,39 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||
|
||||
startDatafeedAndWaitUntilStopped(datafeedId, BASIC_AUTH_VALUE_ML_ADMIN_WITH_SOME_DATA_ACCESS);
|
||||
waitUntilJobIsClosed(jobId);
|
||||
Response jobStatsResponse = client().performRequest("get", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats");
|
||||
String jobStatsResponseAsString = responseEntityToString(jobStatsResponse);
|
||||
Response jobStatsResponse = client().performRequest(new Request("GET",
|
||||
MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats"));
|
||||
String jobStatsResponseAsString = EntityUtils.toString(jobStatsResponse.getEntity());
|
||||
// We expect that no data made it through to the job
|
||||
assertThat(jobStatsResponseAsString, containsString("\"input_record_count\":0"));
|
||||
assertThat(jobStatsResponseAsString, containsString("\"processed_record_count\":0"));
|
||||
|
||||
// There should be a notification saying that there was a problem extracting data
|
||||
client().performRequest("post", "_refresh");
|
||||
Response notificationsResponse = client().performRequest("get", AuditorField.NOTIFICATIONS_INDEX + "/_search?q=job_id:" + jobId);
|
||||
String notificationsResponseAsString = responseEntityToString(notificationsResponse);
|
||||
client().performRequest(new Request("POST", "/_refresh"));
|
||||
Response notificationsResponse = client().performRequest(
|
||||
new Request("GET", AuditorField.NOTIFICATIONS_INDEX + "/_search?q=job_id:" + jobId));
|
||||
String notificationsResponseAsString = EntityUtils.toString(notificationsResponse.getEntity());
|
||||
assertThat(notificationsResponseAsString, containsString("\"message\":\"Datafeed is encountering errors extracting data: " +
|
||||
"action [indices:data/read/search] is unauthorized for user [ml_admin_plus_data]\""));
|
||||
}
|
||||
|
||||
public void testLookbackWithPipelineBucketAgg() throws Exception {
|
||||
String jobId = "pipeline-bucket-agg-job";
|
||||
String job = "{\"analysis_config\" :{\"bucket_span\":\"1h\","
|
||||
+ "\"summary_count_field_name\":\"doc_count\","
|
||||
+ "\"detectors\":[{\"function\":\"mean\",\"field_name\":\"percentile95_airlines_count\"}]},"
|
||||
+ "\"data_description\" : {\"time_field\":\"time stamp\"}"
|
||||
+ "}";
|
||||
client().performRequest("put", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId, Collections.emptyMap(),
|
||||
new StringEntity(job, ContentType.APPLICATION_JSON));
|
||||
Request createJobRequest = new Request("PUT", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId);
|
||||
createJobRequest.setJsonEntity("{\n"
|
||||
+ " \"analysis_config\": {\n"
|
||||
+ " \"bucket_span\": \"1h\",\n"
|
||||
+ " \"summary_count_field_name\": \"doc_count\",\n"
|
||||
+ " \"detectors\": [\n"
|
||||
+ " {\n"
|
||||
+ " \"function\": \"mean\",\n"
|
||||
+ " \"field_name\": \"percentile95_airlines_count\"\n"
|
||||
+ " }\n"
|
||||
+ " ]\n"
|
||||
+ " },\n"
|
||||
+ " \"data_description\": {\"time_field\": \"time stamp\"}\n"
|
||||
+ "}");
|
||||
client().performRequest(createJobRequest);
|
||||
|
||||
String datafeedId = "datafeed-" + jobId;
|
||||
String aggregations = "{\"buckets\":{\"date_histogram\":{\"field\":\"time stamp\",\"interval\":\"15m\"},"
|
||||
@ -582,8 +653,9 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||
|
||||
startDatafeedAndWaitUntilStopped(datafeedId);
|
||||
waitUntilJobIsClosed(jobId);
|
||||
Response jobStatsResponse = client().performRequest("get", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats");
|
||||
String jobStatsResponseAsString = responseEntityToString(jobStatsResponse);
|
||||
Response jobStatsResponse = client().performRequest(new Request("GET",
|
||||
MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats"));
|
||||
String jobStatsResponseAsString = EntityUtils.toString(jobStatsResponse.getEntity());
|
||||
assertThat(jobStatsResponseAsString, containsString("\"input_record_count\":2"));
|
||||
assertThat(jobStatsResponseAsString, containsString("\"input_field_count\":4"));
|
||||
assertThat(jobStatsResponseAsString, containsString("\"processed_record_count\":2"));
|
||||
@ -599,15 +671,15 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||
new DatafeedBuilder(datafeedId, jobId, "airline-data", "response").build();
|
||||
openJob(client(), jobId);
|
||||
|
||||
Response response = client().performRequest("post",
|
||||
MachineLearning.BASE_PATH + "datafeeds/" + datafeedId + "/_start?start=2016-06-01T00:00:00Z");
|
||||
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
|
||||
assertThat(responseEntityToString(response), equalTo("{\"started\":true}"));
|
||||
Request startRequest = new Request("POST", MachineLearning.BASE_PATH + "datafeeds/" + datafeedId + "/_start");
|
||||
startRequest.addParameter("start", "2016-06-01T00:00:00Z");
|
||||
Response response = client().performRequest(startRequest);
|
||||
assertThat(EntityUtils.toString(response.getEntity()), equalTo("{\"started\":true}"));
|
||||
assertBusy(() -> {
|
||||
try {
|
||||
Response getJobResponse = client().performRequest("get",
|
||||
MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats");
|
||||
String responseAsString = responseEntityToString(getJobResponse);
|
||||
Response getJobResponse = client().performRequest(new Request("GET",
|
||||
MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats"));
|
||||
String responseAsString = EntityUtils.toString(getJobResponse.getEntity());
|
||||
assertThat(responseAsString, containsString("\"processed_record_count\":2"));
|
||||
assertThat(responseAsString, containsString("\"state\":\"opened\""));
|
||||
} catch (Exception e1) {
|
||||
@ -619,9 +691,9 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||
// test a model snapshot is present
|
||||
assertBusy(() -> {
|
||||
try {
|
||||
Response getJobResponse = client().performRequest("get",
|
||||
MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/model_snapshots");
|
||||
String responseAsString = responseEntityToString(getJobResponse);
|
||||
Response getJobResponse = client().performRequest(new Request("GET",
|
||||
MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/model_snapshots"));
|
||||
String responseAsString = EntityUtils.toString(getJobResponse.getEntity());
|
||||
assertThat(responseAsString, containsString("\"count\":1"));
|
||||
} catch (Exception e1) {
|
||||
throw new RuntimeException(e1);
|
||||
@ -629,25 +701,25 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||
});
|
||||
|
||||
ResponseException e = expectThrows(ResponseException.class,
|
||||
() -> client().performRequest("delete", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId));
|
||||
() -> client().performRequest(new Request("DELETE", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId)));
|
||||
response = e.getResponse();
|
||||
assertThat(response.getStatusLine().getStatusCode(), equalTo(409));
|
||||
assertThat(responseEntityToString(response), containsString("Cannot delete job [" + jobId + "] because datafeed [" + datafeedId
|
||||
+ "] refers to it"));
|
||||
assertThat(EntityUtils.toString(response.getEntity()),
|
||||
containsString("Cannot delete job [" + jobId + "] because datafeed [" + datafeedId + "] refers to it"));
|
||||
|
||||
response = client().performRequest("post", MachineLearning.BASE_PATH + "datafeeds/" + datafeedId + "/_stop");
|
||||
response = client().performRequest(new Request("POST", MachineLearning.BASE_PATH + "datafeeds/" + datafeedId + "/_stop"));
|
||||
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
|
||||
assertThat(responseEntityToString(response), equalTo("{\"stopped\":true}"));
|
||||
assertThat(EntityUtils.toString(response.getEntity()), equalTo("{\"stopped\":true}"));
|
||||
|
||||
client().performRequest("POST", "/_xpack/ml/anomaly_detectors/" + jobId + "/_close");
|
||||
client().performRequest(new Request("POST", "/_xpack/ml/anomaly_detectors/" + jobId + "/_close"));
|
||||
|
||||
response = client().performRequest("delete", MachineLearning.BASE_PATH + "datafeeds/" + datafeedId);
|
||||
response = client().performRequest(new Request("DELETE", MachineLearning.BASE_PATH + "datafeeds/" + datafeedId));
|
||||
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
|
||||
assertThat(responseEntityToString(response), equalTo("{\"acknowledged\":true}"));
|
||||
assertThat(EntityUtils.toString(response.getEntity()), equalTo("{\"acknowledged\":true}"));
|
||||
|
||||
response = client().performRequest("delete", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId);
|
||||
response = client().performRequest(new Request("DELETE", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId));
|
||||
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
|
||||
assertThat(responseEntityToString(response), equalTo("{\"acknowledged\":true}"));
|
||||
assertThat(EntityUtils.toString(response.getEntity()), equalTo("{\"acknowledged\":true}"));
|
||||
}
|
||||
|
||||
public void testForceDeleteWhileDatafeedIsRunning() throws Exception {
|
||||
@ -657,25 +729,26 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||
new DatafeedBuilder(datafeedId, jobId, "airline-data", "response").build();
|
||||
openJob(client(), jobId);
|
||||
|
||||
Response response = client().performRequest("post",
|
||||
MachineLearning.BASE_PATH + "datafeeds/" + datafeedId + "/_start?start=2016-06-01T00:00:00Z");
|
||||
Request startRequest = new Request("POST", MachineLearning.BASE_PATH + "datafeeds/" + datafeedId + "/_start");
|
||||
startRequest.addParameter("start", "2016-06-01T00:00:00Z");
|
||||
Response response = client().performRequest(startRequest);
|
||||
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
|
||||
assertThat(responseEntityToString(response), equalTo("{\"started\":true}"));
|
||||
assertThat(EntityUtils.toString(response.getEntity()), equalTo("{\"started\":true}"));
|
||||
|
||||
ResponseException e = expectThrows(ResponseException.class,
|
||||
() -> client().performRequest("delete", MachineLearning.BASE_PATH + "datafeeds/" + datafeedId));
|
||||
() -> client().performRequest(new Request("DELETE", MachineLearning.BASE_PATH + "datafeeds/" + datafeedId)));
|
||||
response = e.getResponse();
|
||||
assertThat(response.getStatusLine().getStatusCode(), equalTo(409));
|
||||
assertThat(responseEntityToString(response), containsString("Cannot delete datafeed [" + datafeedId
|
||||
+ "] while its status is started"));
|
||||
assertThat(EntityUtils.toString(response.getEntity()),
|
||||
containsString("Cannot delete datafeed [" + datafeedId + "] while its status is started"));
|
||||
|
||||
response = client().performRequest("delete",
|
||||
MachineLearning.BASE_PATH + "datafeeds/" + datafeedId + "?force=true");
|
||||
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
|
||||
assertThat(responseEntityToString(response), equalTo("{\"acknowledged\":true}"));
|
||||
Request forceDeleteRequest = new Request("DELETE", MachineLearning.BASE_PATH + "datafeeds/" + datafeedId);
|
||||
forceDeleteRequest.addParameter("force", "true");
|
||||
response = client().performRequest(forceDeleteRequest);
|
||||
assertThat(EntityUtils.toString(response.getEntity()), equalTo("{\"acknowledged\":true}"));
|
||||
|
||||
expectThrows(ResponseException.class,
|
||||
() -> client().performRequest("get", "/_xpack/ml/datafeeds/" + datafeedId));
|
||||
() -> client().performRequest(new Request("GET", "/_xpack/ml/datafeeds/" + datafeedId)));
|
||||
}
|
||||
|
||||
private class LookbackOnlyTestHelper {
|
||||
@ -727,9 +800,9 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||
startDatafeedAndWaitUntilStopped(datafeedId);
|
||||
waitUntilJobIsClosed(jobId);
|
||||
|
||||
Response jobStatsResponse = client().performRequest("get",
|
||||
MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats");
|
||||
String jobStatsResponseAsString = responseEntityToString(jobStatsResponse);
|
||||
Response jobStatsResponse = client().performRequest(new Request("GET",
|
||||
MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats"));
|
||||
String jobStatsResponseAsString = EntityUtils.toString(jobStatsResponse.getEntity());
|
||||
if (shouldSucceedInput) {
|
||||
assertThat(jobStatsResponseAsString, containsString("\"input_record_count\":2"));
|
||||
} else {
|
||||
@ -748,16 +821,20 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||
}
|
||||
|
||||
private void startDatafeedAndWaitUntilStopped(String datafeedId, String authHeader) throws Exception {
|
||||
Response startDatafeedRequest = client().performRequest("post",
|
||||
MachineLearning.BASE_PATH + "datafeeds/" + datafeedId + "/_start?start=2016-06-01T00:00:00Z&end=2016-06-02T00:00:00Z",
|
||||
new BasicHeader("Authorization", authHeader));
|
||||
assertThat(startDatafeedRequest.getStatusLine().getStatusCode(), equalTo(200));
|
||||
assertThat(responseEntityToString(startDatafeedRequest), equalTo("{\"started\":true}"));
|
||||
Request request = new Request("POST", MachineLearning.BASE_PATH + "datafeeds/" + datafeedId + "/_start");
|
||||
request.addParameter("start", "2016-06-01T00:00:00Z");
|
||||
request.addParameter("end", "2016-06-02T00:00:00Z");
|
||||
RequestOptions.Builder options = request.getOptions().toBuilder();
|
||||
options.addHeader("Authorization", authHeader);
|
||||
request.setOptions(options);
|
||||
Response startDatafeedResponse = client().performRequest(request);
|
||||
assertThat(EntityUtils.toString(startDatafeedResponse.getEntity()), equalTo("{\"started\":true}"));
|
||||
assertBusy(() -> {
|
||||
try {
|
||||
Response datafeedStatsResponse = client().performRequest("get",
|
||||
MachineLearning.BASE_PATH + "datafeeds/" + datafeedId + "/_stats");
|
||||
assertThat(responseEntityToString(datafeedStatsResponse), containsString("\"state\":\"stopped\""));
|
||||
Response datafeedStatsResponse = client().performRequest(new Request("GET",
|
||||
MachineLearning.BASE_PATH + "datafeeds/" + datafeedId + "/_stats"));
|
||||
assertThat(EntityUtils.toString(datafeedStatsResponse.getEntity()),
|
||||
containsString("\"state\":\"stopped\""));
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
@ -767,9 +844,9 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||
private void waitUntilJobIsClosed(String jobId) throws Exception {
|
||||
assertBusy(() -> {
|
||||
try {
|
||||
Response jobStatsResponse = client().performRequest("get",
|
||||
MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats");
|
||||
assertThat(responseEntityToString(jobStatsResponse), containsString("\"state\":\"closed\""));
|
||||
Response jobStatsResponse = client().performRequest(new Request("GET",
|
||||
MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats"));
|
||||
assertThat(EntityUtils.toString(jobStatsResponse.getEntity()), containsString("\"state\":\"closed\""));
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
@ -777,27 +854,30 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||
}
|
||||
|
||||
private Response createJob(String id, String airlineVariant) throws Exception {
|
||||
String job = "{\n" + " \"description\":\"Analysis of response time by airline\",\n"
|
||||
+ " \"analysis_config\" : {\n" + " \"bucket_span\":\"1h\",\n"
|
||||
+ " \"detectors\" :[\n"
|
||||
+ " {\"function\":\"mean\",\"field_name\":\"responsetime\",\"by_field_name\":\"" + airlineVariant + "\"}]\n"
|
||||
+ " },\n" + " \"data_description\" : {\n"
|
||||
+ " \"format\":\"xcontent\",\n"
|
||||
+ " \"time_field\":\"time stamp\",\n" + " \"time_format\":\"yyyy-MM-dd'T'HH:mm:ssX\"\n" + " }\n"
|
||||
+ "}";
|
||||
return client().performRequest("put", MachineLearning.BASE_PATH + "anomaly_detectors/" + id,
|
||||
Collections.emptyMap(), new StringEntity(job, ContentType.APPLICATION_JSON));
|
||||
}
|
||||
|
||||
private static String responseEntityToString(Response response) throws Exception {
|
||||
try (BufferedReader reader = new BufferedReader(new InputStreamReader(response.getEntity().getContent(), StandardCharsets.UTF_8))) {
|
||||
return reader.lines().collect(Collectors.joining("\n"));
|
||||
}
|
||||
Request request = new Request("PUT", MachineLearning.BASE_PATH + "anomaly_detectors/" + id);
|
||||
request.setJsonEntity("{\n"
|
||||
+ " \"description\": \"Analysis of response time by airline\",\n"
|
||||
+ " \"analysis_config\": {\n"
|
||||
+ " \"bucket_span\": \"1h\",\n"
|
||||
+ " \"detectors\" :[\n"
|
||||
+ " {\n"
|
||||
+ " \"function\": \"mean\",\n"
|
||||
+ " \"field_name\": \"responsetime\",\n"
|
||||
+ " \"by_field_name\": \"" + airlineVariant + "\"\n"
|
||||
+ " }\n"
|
||||
+ " ]\n"
|
||||
+ " },\n"
|
||||
+ " \"data_description\": {\n"
|
||||
+ " \"format\": \"xcontent\",\n"
|
||||
+ " \"time_field\": \"time stamp\",\n"
|
||||
+ " \"time_format\": \"yyyy-MM-dd'T'HH:mm:ssX\"\n"
|
||||
+ " }\n"
|
||||
+ "}");
|
||||
return client().performRequest(request);
|
||||
}
|
||||
|
||||
public static void openJob(RestClient client, String jobId) throws IOException {
|
||||
Response response = client.performRequest("post", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_open");
|
||||
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
|
||||
client.performRequest(new Request("POST", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_open"));
|
||||
}
|
||||
|
||||
@After
|
||||
@ -850,17 +930,28 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
||||
}
|
||||
|
||||
Response build() throws IOException {
|
||||
String datafeedConfig = "{"
|
||||
Request request = new Request("PUT", MachineLearning.BASE_PATH + "datafeeds/" + datafeedId);
|
||||
request.setJsonEntity("{"
|
||||
+ "\"job_id\": \"" + jobId + "\",\"indexes\":[\"" + index + "\"],\"types\":[\"" + type + "\"]"
|
||||
+ (source ? ",\"_source\":true" : "")
|
||||
+ (scriptedFields == null ? "" : ",\"script_fields\":" + scriptedFields)
|
||||
+ (aggregations == null ? "" : ",\"aggs\":" + aggregations)
|
||||
+ (chunkingTimespan == null ? "" :
|
||||
",\"chunking_config\":{\"mode\":\"MANUAL\",\"time_span\":\"" + chunkingTimespan + "\"}")
|
||||
+ "}";
|
||||
return client().performRequest("put", MachineLearning.BASE_PATH + "datafeeds/" + datafeedId, Collections.emptyMap(),
|
||||
new StringEntity(datafeedConfig, ContentType.APPLICATION_JSON),
|
||||
new BasicHeader("Authorization", authHeader));
|
||||
+ "}");
|
||||
RequestOptions.Builder options = request.getOptions().toBuilder();
|
||||
options.addHeader("Authorization", authHeader);
|
||||
request.setOptions(options);
|
||||
return client().performRequest(request);
|
||||
}
|
||||
}
|
||||
|
||||
private void bulkIndex(String bulk) throws IOException {
|
||||
Request bulkRequest = new Request("POST", "/_bulk");
|
||||
bulkRequest.setJsonEntity(bulk);
|
||||
bulkRequest.addParameter("refresh", "true");
|
||||
bulkRequest.addParameter("pretty", null);
|
||||
String bulkResponse = EntityUtils.toString(client().performRequest(bulkRequest).getEntity());
|
||||
assertThat(bulkResponse, not(containsString("\"errors\": false")));
|
||||
}
|
||||
}
|
||||
|
@ -5,8 +5,7 @@
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.integration;
|
||||
|
||||
import org.apache.http.entity.ContentType;
|
||||
import org.apache.http.entity.StringEntity;
|
||||
import org.apache.http.util.EntityUtils;
|
||||
import org.elasticsearch.client.Request;
|
||||
import org.elasticsearch.client.Response;
|
||||
import org.elasticsearch.client.ResponseException;
|
||||
@ -23,15 +22,10 @@ import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndexFiel
|
||||
import org.elasticsearch.xpack.test.rest.XPackRestTestHelper;
|
||||
import org.junit.After;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Collections;
|
||||
import java.util.Locale;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
@ -55,15 +49,13 @@ public class MlJobIT extends ESRestTestCase {
|
||||
|
||||
public void testPutJob_GivenFarequoteConfig() throws Exception {
|
||||
Response response = createFarequoteJob("given-farequote-config-job");
|
||||
|
||||
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
|
||||
String responseAsString = responseEntityToString(response);
|
||||
String responseAsString = EntityUtils.toString(response.getEntity());
|
||||
assertThat(responseAsString, containsString("\"job_id\":\"given-farequote-config-job\""));
|
||||
}
|
||||
|
||||
public void testGetJob_GivenNoSuchJob() throws Exception {
|
||||
ResponseException e = expectThrows(ResponseException.class,
|
||||
() -> client().performRequest("get", MachineLearning.BASE_PATH + "anomaly_detectors/non-existing-job/_stats"));
|
||||
ResponseException e = expectThrows(ResponseException.class, () ->
|
||||
client().performRequest(new Request("GET", MachineLearning.BASE_PATH + "anomaly_detectors/non-existing-job/_stats")));
|
||||
|
||||
assertThat(e.getResponse().getStatusLine().getStatusCode(), equalTo(404));
|
||||
assertThat(e.getMessage(), containsString("No known job with id 'non-existing-job'"));
|
||||
@ -72,11 +64,9 @@ public class MlJobIT extends ESRestTestCase {
|
||||
public void testGetJob_GivenJobExists() throws Exception {
|
||||
createFarequoteJob("get-job_given-job-exists-job");
|
||||
|
||||
Response response = client().performRequest("get",
|
||||
MachineLearning.BASE_PATH + "anomaly_detectors/get-job_given-job-exists-job/_stats");
|
||||
|
||||
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
|
||||
String responseAsString = responseEntityToString(response);
|
||||
Response response = client().performRequest(new Request("GET",
|
||||
MachineLearning.BASE_PATH + "anomaly_detectors/get-job_given-job-exists-job/_stats"));
|
||||
String responseAsString = EntityUtils.toString(response.getEntity());
|
||||
assertThat(responseAsString, containsString("\"count\":1"));
|
||||
assertThat(responseAsString, containsString("\"job_id\":\"get-job_given-job-exists-job\""));
|
||||
}
|
||||
@ -86,20 +76,16 @@ public class MlJobIT extends ESRestTestCase {
|
||||
createFarequoteJob(jobId);
|
||||
|
||||
// Explicit _all
|
||||
Response response = client().performRequest("get", MachineLearning.BASE_PATH + "anomaly_detectors/_all");
|
||||
|
||||
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
|
||||
String responseAsString = responseEntityToString(response);
|
||||
assertThat(responseAsString, containsString("\"count\":1"));
|
||||
assertThat(responseAsString, containsString("\"job_id\":\"" + jobId + "\""));
|
||||
String explictAll = EntityUtils.toString(
|
||||
client().performRequest(new Request("GET", MachineLearning.BASE_PATH + "anomaly_detectors/_all")).getEntity());
|
||||
assertThat(explictAll, containsString("\"count\":1"));
|
||||
assertThat(explictAll, containsString("\"job_id\":\"" + jobId + "\""));
|
||||
|
||||
// Implicit _all
|
||||
response = client().performRequest("get", MachineLearning.BASE_PATH + "anomaly_detectors");
|
||||
|
||||
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
|
||||
responseAsString = responseEntityToString(response);
|
||||
assertThat(responseAsString, containsString("\"count\":1"));
|
||||
assertThat(responseAsString, containsString("\"job_id\":\"" + jobId + "\""));
|
||||
String implicitAll = EntityUtils.toString(
|
||||
client().performRequest(new Request("GET", MachineLearning.BASE_PATH + "anomaly_detectors")).getEntity());
|
||||
assertThat(implicitAll, containsString("\"count\":1"));
|
||||
assertThat(implicitAll, containsString("\"job_id\":\"" + jobId + "\""));
|
||||
}
|
||||
|
||||
public void testGetJobs_GivenMultipleJobs() throws Exception {
|
||||
@ -108,36 +94,37 @@ public class MlJobIT extends ESRestTestCase {
|
||||
createFarequoteJob("given-multiple-jobs-job-3");
|
||||
|
||||
// Explicit _all
|
||||
Response response = client().performRequest("get", MachineLearning.BASE_PATH + "anomaly_detectors/_all");
|
||||
|
||||
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
|
||||
String responseAsString = responseEntityToString(response);
|
||||
assertThat(responseAsString, containsString("\"count\":3"));
|
||||
assertThat(responseAsString, containsString("\"job_id\":\"given-multiple-jobs-job-1\""));
|
||||
assertThat(responseAsString, containsString("\"job_id\":\"given-multiple-jobs-job-2\""));
|
||||
assertThat(responseAsString, containsString("\"job_id\":\"given-multiple-jobs-job-3\""));
|
||||
String explicitAll = EntityUtils.toString(
|
||||
client().performRequest(new Request("GET", MachineLearning.BASE_PATH + "anomaly_detectors/_all")).getEntity());
|
||||
assertThat(explicitAll, containsString("\"count\":3"));
|
||||
assertThat(explicitAll, containsString("\"job_id\":\"given-multiple-jobs-job-1\""));
|
||||
assertThat(explicitAll, containsString("\"job_id\":\"given-multiple-jobs-job-2\""));
|
||||
assertThat(explicitAll, containsString("\"job_id\":\"given-multiple-jobs-job-3\""));
|
||||
|
||||
// Implicit _all
|
||||
response = client().performRequest("get", MachineLearning.BASE_PATH + "anomaly_detectors");
|
||||
|
||||
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
|
||||
responseAsString = responseEntityToString(response);
|
||||
assertThat(responseAsString, containsString("\"count\":3"));
|
||||
assertThat(responseAsString, containsString("\"job_id\":\"given-multiple-jobs-job-1\""));
|
||||
assertThat(responseAsString, containsString("\"job_id\":\"given-multiple-jobs-job-2\""));
|
||||
assertThat(responseAsString, containsString("\"job_id\":\"given-multiple-jobs-job-3\""));
|
||||
String implicitAll = EntityUtils.toString(
|
||||
client().performRequest(new Request("GET", MachineLearning.BASE_PATH + "anomaly_detectors")).getEntity());
|
||||
assertThat(implicitAll, containsString("\"count\":3"));
|
||||
assertThat(implicitAll, containsString("\"job_id\":\"given-multiple-jobs-job-1\""));
|
||||
assertThat(implicitAll, containsString("\"job_id\":\"given-multiple-jobs-job-2\""));
|
||||
assertThat(implicitAll, containsString("\"job_id\":\"given-multiple-jobs-job-3\""));
|
||||
}
|
||||
|
||||
private Response createFarequoteJob(String jobId) throws IOException {
|
||||
String job = "{\n" + " \"description\":\"Analysis of response time by airline\",\n"
|
||||
+ " \"analysis_config\" : {\n" + " \"bucket_span\": \"3600s\",\n"
|
||||
Request request = new Request("PUT", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId);
|
||||
request.setJsonEntity(
|
||||
"{\n"
|
||||
+ " \"description\":\"Analysis of response time by airline\",\n"
|
||||
+ " \"analysis_config\" : {\n"
|
||||
+ " \"bucket_span\": \"3600s\",\n"
|
||||
+ " \"detectors\" :[{\"function\":\"metric\",\"field_name\":\"responsetime\",\"by_field_name\":\"airline\"}]\n"
|
||||
+ " },\n" + " \"data_description\" : {\n" + " \"field_delimiter\":\",\",\n" + " " +
|
||||
"\"time_field\":\"time\",\n"
|
||||
+ " \"time_format\":\"yyyy-MM-dd HH:mm:ssX\"\n" + " }\n" + "}";
|
||||
|
||||
return client().performRequest("put", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId,
|
||||
Collections.emptyMap(), new StringEntity(job, ContentType.APPLICATION_JSON));
|
||||
+ " },\n" + " \"data_description\" : {\n"
|
||||
+ " \"field_delimiter\":\",\",\n"
|
||||
+ " \"time_field\":\"time\",\n"
|
||||
+ " \"time_format\":\"yyyy-MM-dd HH:mm:ssX\"\n"
|
||||
+ " }\n"
|
||||
+ "}");
|
||||
return client().performRequest(request);
|
||||
}
|
||||
|
||||
public void testCantCreateJobWithSameID() throws Exception {
|
||||
@ -148,18 +135,14 @@ public class MlJobIT extends ESRestTestCase {
|
||||
" \"data_description\": {},\n" +
|
||||
" \"results_index_name\" : \"%s\"}";
|
||||
|
||||
String jobConfig = String.format(Locale.ROOT, jobTemplate, "index-1");
|
||||
|
||||
String jobId = "cant-create-job-with-same-id-job";
|
||||
Response response = client().performRequest("put", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId ,
|
||||
Collections.emptyMap(),
|
||||
new StringEntity(jobConfig, ContentType.APPLICATION_JSON));
|
||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||
Request createJob1 = new Request("PUT", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId);
|
||||
createJob1.setJsonEntity(String.format(Locale.ROOT, jobTemplate, "index-1"));
|
||||
client().performRequest(createJob1);
|
||||
|
||||
final String jobConfig2 = String.format(Locale.ROOT, jobTemplate, "index-2");
|
||||
ResponseException e = expectThrows(ResponseException.class,
|
||||
() ->client().performRequest("put", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId,
|
||||
Collections.emptyMap(), new StringEntity(jobConfig2, ContentType.APPLICATION_JSON)));
|
||||
Request createJob2 = new Request("PUT", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId);
|
||||
createJob2.setJsonEntity(String.format(Locale.ROOT, jobTemplate, "index-2"));
|
||||
ResponseException e = expectThrows(ResponseException.class, () -> client().performRequest(createJob2));
|
||||
|
||||
assertThat(e.getResponse().getStatusLine().getStatusCode(), equalTo(400));
|
||||
assertThat(e.getMessage(), containsString("The job cannot be created with the Id '" + jobId + "'. The Id is already used."));
|
||||
@ -175,94 +158,78 @@ public class MlJobIT extends ESRestTestCase {
|
||||
|
||||
String jobId1 = "create-jobs-with-index-name-option-job-1";
|
||||
String indexName = "non-default-index";
|
||||
String jobConfig = String.format(Locale.ROOT, jobTemplate, indexName);
|
||||
|
||||
Response response = client().performRequest("put", MachineLearning.BASE_PATH
|
||||
+ "anomaly_detectors/" + jobId1, Collections.emptyMap(), new StringEntity(jobConfig, ContentType.APPLICATION_JSON));
|
||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||
Request createJob1 = new Request("PUT", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId1);
|
||||
createJob1.setJsonEntity(String.format(Locale.ROOT, jobTemplate, indexName));
|
||||
client().performRequest(createJob1);
|
||||
|
||||
String jobId2 = "create-jobs-with-index-name-option-job-2";
|
||||
response = client().performRequest("put", MachineLearning.BASE_PATH
|
||||
+ "anomaly_detectors/" + jobId2, Collections.emptyMap(), new StringEntity(jobConfig, ContentType.APPLICATION_JSON));
|
||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||
Request createJob2 = new Request("PUT", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId2);
|
||||
createJob2.setEntity(createJob1.getEntity());
|
||||
client().performRequest(createJob2);
|
||||
|
||||
// With security enabled GET _aliases throws an index_not_found_exception
|
||||
// if no aliases have been created. In multi-node tests the alias may not
|
||||
// appear immediately so wait here.
|
||||
assertBusy(() -> {
|
||||
try {
|
||||
Response aliasesResponse = client().performRequest("get", "_aliases");
|
||||
assertEquals(200, aliasesResponse.getStatusLine().getStatusCode());
|
||||
String responseAsString = responseEntityToString(aliasesResponse);
|
||||
assertThat(responseAsString,
|
||||
String aliasesResponse = EntityUtils.toString(client().performRequest(new Request("GET", "/_aliases")).getEntity());
|
||||
assertThat(aliasesResponse,
|
||||
containsString("\"" + AnomalyDetectorsIndex.jobResultsAliasedName("custom-" + indexName) + "\":{\"aliases\":{"));
|
||||
assertThat(responseAsString, containsString("\"" + AnomalyDetectorsIndex.jobResultsAliasedName(jobId1)
|
||||
assertThat(aliasesResponse, containsString("\"" + AnomalyDetectorsIndex.jobResultsAliasedName(jobId1)
|
||||
+ "\":{\"filter\":{\"term\":{\"job_id\":{\"value\":\"" + jobId1 + "\",\"boost\":1.0}}}}"));
|
||||
assertThat(responseAsString, containsString("\"" + AnomalyDetectorsIndex.resultsWriteAlias(jobId1) + "\":{}"));
|
||||
assertThat(responseAsString, containsString("\"" + AnomalyDetectorsIndex.jobResultsAliasedName(jobId2)
|
||||
assertThat(aliasesResponse, containsString("\"" + AnomalyDetectorsIndex.resultsWriteAlias(jobId1) + "\":{}"));
|
||||
assertThat(aliasesResponse, containsString("\"" + AnomalyDetectorsIndex.jobResultsAliasedName(jobId2)
|
||||
+ "\":{\"filter\":{\"term\":{\"job_id\":{\"value\":\"" + jobId2 + "\",\"boost\":1.0}}}}"));
|
||||
assertThat(responseAsString, containsString("\"" + AnomalyDetectorsIndex.resultsWriteAlias(jobId2) + "\":{}"));
|
||||
assertThat(aliasesResponse, containsString("\"" + AnomalyDetectorsIndex.resultsWriteAlias(jobId2) + "\":{}"));
|
||||
} catch (ResponseException e) {
|
||||
throw new AssertionError(e);
|
||||
}
|
||||
});
|
||||
|
||||
Response indicesResponse = client().performRequest("get", "_cat/indices");
|
||||
assertEquals(200, indicesResponse.getStatusLine().getStatusCode());
|
||||
String responseAsString = responseEntityToString(indicesResponse);
|
||||
String responseAsString = EntityUtils.toString(client().performRequest(new Request("GET", "/_cat/indices")).getEntity());
|
||||
assertThat(responseAsString,
|
||||
containsString(AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "custom-" + indexName));
|
||||
assertThat(responseAsString, not(containsString(AnomalyDetectorsIndex.jobResultsAliasedName(jobId1))));
|
||||
assertThat(responseAsString, not(containsString(AnomalyDetectorsIndex.jobResultsAliasedName(jobId2))));
|
||||
|
||||
String bucketResult = String.format(Locale.ROOT,
|
||||
"{\"job_id\":\"%s\", \"timestamp\": \"%s\", \"result_type\":\"bucket\", \"bucket_span\": \"%s\"}",
|
||||
jobId1, "1234", 1);
|
||||
String id = String.format(Locale.ROOT, "%s_bucket_%s_%s", jobId1, "1234", 300);
|
||||
response = client().performRequest("put", AnomalyDetectorsIndex.jobResultsAliasedName(jobId1) + "/doc/" + id,
|
||||
Collections.emptyMap(), new StringEntity(bucketResult, ContentType.APPLICATION_JSON));
|
||||
assertEquals(201, response.getStatusLine().getStatusCode());
|
||||
|
||||
bucketResult = String.format(Locale.ROOT,
|
||||
Request createResultRequest = new Request("PUT", AnomalyDetectorsIndex.jobResultsAliasedName(jobId1) + "/doc/" + id);
|
||||
createResultRequest.setJsonEntity(String.format(Locale.ROOT,
|
||||
"{\"job_id\":\"%s\", \"timestamp\": \"%s\", \"result_type\":\"bucket\", \"bucket_span\": \"%s\"}",
|
||||
jobId1, "1236", 1);
|
||||
jobId1, "1234", 1));
|
||||
client().performRequest(createResultRequest);
|
||||
|
||||
id = String.format(Locale.ROOT, "%s_bucket_%s_%s", jobId1, "1236", 300);
|
||||
response = client().performRequest("put", AnomalyDetectorsIndex.jobResultsAliasedName(jobId1) + "/doc/" + id,
|
||||
Collections.emptyMap(), new StringEntity(bucketResult, ContentType.APPLICATION_JSON));
|
||||
assertEquals(201, response.getStatusLine().getStatusCode());
|
||||
createResultRequest = new Request("PUT", AnomalyDetectorsIndex.jobResultsAliasedName(jobId1) + "/doc/" + id);
|
||||
createResultRequest.setJsonEntity(String.format(Locale.ROOT,
|
||||
"{\"job_id\":\"%s\", \"timestamp\": \"%s\", \"result_type\":\"bucket\", \"bucket_span\": \"%s\"}",
|
||||
jobId1, "1236", 1));
|
||||
client().performRequest(createResultRequest);
|
||||
|
||||
client().performRequest("post", "_refresh");
|
||||
client().performRequest(new Request("POST", "/_refresh"));
|
||||
|
||||
response = client().performRequest("get", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId1 + "/results/buckets");
|
||||
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
|
||||
responseAsString = responseEntityToString(response);
|
||||
responseAsString = EntityUtils.toString(client().performRequest(
|
||||
new Request("GET", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId1 + "/results/buckets")).getEntity());
|
||||
assertThat(responseAsString, containsString("\"count\":2"));
|
||||
|
||||
response = client().performRequest("get", AnomalyDetectorsIndex.jobResultsAliasedName(jobId1) + "/_search");
|
||||
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
|
||||
responseAsString = responseEntityToString(response);
|
||||
responseAsString = EntityUtils.toString(client().performRequest(
|
||||
new Request("GET", AnomalyDetectorsIndex.jobResultsAliasedName(jobId1) + "/_search")).getEntity());
|
||||
assertThat(responseAsString, containsString("\"total\":2"));
|
||||
|
||||
response = client().performRequest("delete", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId1);
|
||||
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
|
||||
client().performRequest(new Request("DELETE", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId1));
|
||||
|
||||
// check that indices still exist, but are empty and aliases are gone
|
||||
response = client().performRequest("get", "_aliases");
|
||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||
responseAsString = responseEntityToString(response);
|
||||
responseAsString = EntityUtils.toString(client().performRequest(new Request("GET", "/_aliases")).getEntity());
|
||||
assertThat(responseAsString, not(containsString(AnomalyDetectorsIndex.jobResultsAliasedName(jobId1))));
|
||||
assertThat(responseAsString, containsString(AnomalyDetectorsIndex.jobResultsAliasedName(jobId2))); //job2 still exists
|
||||
|
||||
response = client().performRequest("get", "_cat/indices");
|
||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||
responseAsString = responseEntityToString(response);
|
||||
responseAsString = EntityUtils.toString(client().performRequest(new Request("GET", "/_cat/indices")).getEntity());
|
||||
assertThat(responseAsString, containsString(AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "custom-" + indexName));
|
||||
|
||||
client().performRequest("post", "_refresh");
|
||||
client().performRequest(new Request("POST", "/_refresh"));
|
||||
|
||||
response = client().performRequest("get", AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "custom-" + indexName + "/_count");
|
||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||
responseAsString = responseEntityToString(response);
|
||||
responseAsString = EntityUtils.toString(client().performRequest(
|
||||
new Request("GET", AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "custom-" + indexName + "/_count")).getEntity());
|
||||
assertThat(responseAsString, containsString("\"count\":0"));
|
||||
}
|
||||
|
||||
@ -278,32 +245,27 @@ public class MlJobIT extends ESRestTestCase {
|
||||
String byFieldName1 = "responsetime";
|
||||
String jobId2 = "create-job-in-shared-index-updates-mapping-job-2";
|
||||
String byFieldName2 = "cpu-usage";
|
||||
String jobConfig = String.format(Locale.ROOT, jobTemplate, byFieldName1);
|
||||
|
||||
Response response = client().performRequest("put", MachineLearning.BASE_PATH
|
||||
+ "anomaly_detectors/" + jobId1, Collections.emptyMap(), new StringEntity(jobConfig, ContentType.APPLICATION_JSON));
|
||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||
Request createJob1Request = new Request("PUT", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId1);
|
||||
createJob1Request.setJsonEntity(String.format(Locale.ROOT, jobTemplate, byFieldName1));
|
||||
client().performRequest(createJob1Request);
|
||||
|
||||
// Check the index mapping contains the first by_field_name
|
||||
response = client().performRequest("get", AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX
|
||||
+ AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT + "/_mapping?pretty");
|
||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||
String responseAsString = responseEntityToString(response);
|
||||
assertThat(responseAsString, containsString(byFieldName1));
|
||||
assertThat(responseAsString, not(containsString(byFieldName2)));
|
||||
Request getResultsMappingRequest = new Request("GET",
|
||||
AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT + "/_mapping");
|
||||
getResultsMappingRequest.addParameter("pretty", null);
|
||||
String resultsMappingAfterJob1 = EntityUtils.toString(client().performRequest(getResultsMappingRequest).getEntity());
|
||||
assertThat(resultsMappingAfterJob1, containsString(byFieldName1));
|
||||
assertThat(resultsMappingAfterJob1, not(containsString(byFieldName2)));
|
||||
|
||||
jobConfig = String.format(Locale.ROOT, jobTemplate, byFieldName2);
|
||||
response = client().performRequest("put", MachineLearning.BASE_PATH
|
||||
+ "anomaly_detectors/" + jobId2, Collections.emptyMap(), new StringEntity(jobConfig, ContentType.APPLICATION_JSON));
|
||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||
Request createJob2Request = new Request("PUT", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId2);
|
||||
createJob2Request.setJsonEntity(String.format(Locale.ROOT, jobTemplate, byFieldName2));
|
||||
client().performRequest(createJob2Request);
|
||||
|
||||
// Check the index mapping now contains both fields
|
||||
response = client().performRequest("get", AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX
|
||||
+ AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT + "/_mapping?pretty");
|
||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||
responseAsString = responseEntityToString(response);
|
||||
assertThat(responseAsString, containsString(byFieldName1));
|
||||
assertThat(responseAsString, containsString(byFieldName2));
|
||||
String resultsMappingAfterJob2 = EntityUtils.toString(client().performRequest(getResultsMappingRequest).getEntity());
|
||||
assertThat(resultsMappingAfterJob2, containsString(byFieldName1));
|
||||
assertThat(resultsMappingAfterJob2, containsString(byFieldName2));
|
||||
}
|
||||
|
||||
public void testCreateJobInCustomSharedIndexUpdatesMapping() throws Exception {
|
||||
@ -318,32 +280,27 @@ public class MlJobIT extends ESRestTestCase {
|
||||
String byFieldName1 = "responsetime";
|
||||
String jobId2 = "create-job-in-custom-shared-index-updates-mapping-job-2";
|
||||
String byFieldName2 = "cpu-usage";
|
||||
String jobConfig = String.format(Locale.ROOT, jobTemplate, byFieldName1);
|
||||
|
||||
Response response = client().performRequest("put", MachineLearning.BASE_PATH
|
||||
+ "anomaly_detectors/" + jobId1, Collections.emptyMap(), new StringEntity(jobConfig, ContentType.APPLICATION_JSON));
|
||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||
Request createJob1Request = new Request("PUT", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId1);
|
||||
createJob1Request.setJsonEntity(String.format(Locale.ROOT, jobTemplate, byFieldName1));
|
||||
client().performRequest(createJob1Request);
|
||||
|
||||
// Check the index mapping contains the first by_field_name
|
||||
response = client().performRequest("get",
|
||||
AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "custom-shared-index" + "/_mapping?pretty");
|
||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||
String responseAsString = responseEntityToString(response);
|
||||
assertThat(responseAsString, containsString(byFieldName1));
|
||||
assertThat(responseAsString, not(containsString(byFieldName2)));
|
||||
Request getResultsMappingRequest = new Request("GET",
|
||||
AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "custom-shared-index/_mapping");
|
||||
getResultsMappingRequest.addParameter("pretty", null);
|
||||
String resultsMappingAfterJob1 = EntityUtils.toString(client().performRequest(getResultsMappingRequest).getEntity());
|
||||
assertThat(resultsMappingAfterJob1, containsString(byFieldName1));
|
||||
assertThat(resultsMappingAfterJob1, not(containsString(byFieldName2)));
|
||||
|
||||
jobConfig = String.format(Locale.ROOT, jobTemplate, byFieldName2);
|
||||
response = client().performRequest("put", MachineLearning.BASE_PATH
|
||||
+ "anomaly_detectors/" + jobId2, Collections.emptyMap(), new StringEntity(jobConfig, ContentType.APPLICATION_JSON));
|
||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||
Request createJob2Request = new Request("PUT", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId2);
|
||||
createJob2Request.setJsonEntity(String.format(Locale.ROOT, jobTemplate, byFieldName2));
|
||||
client().performRequest(createJob2Request);
|
||||
|
||||
// Check the index mapping now contains both fields
|
||||
response = client().performRequest("get",
|
||||
AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "custom-shared-index" + "/_mapping?pretty");
|
||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||
responseAsString = responseEntityToString(response);
|
||||
assertThat(responseAsString, containsString(byFieldName1));
|
||||
assertThat(responseAsString, containsString(byFieldName2));
|
||||
String resultsMappingAfterJob2 = EntityUtils.toString(client().performRequest(getResultsMappingRequest).getEntity());
|
||||
assertThat(resultsMappingAfterJob2, containsString(byFieldName1));
|
||||
assertThat(resultsMappingAfterJob2, containsString(byFieldName2));
|
||||
}
|
||||
|
||||
public void testCreateJob_WithClashingFieldMappingsFails() throws Exception {
|
||||
@ -366,17 +323,14 @@ public class MlJobIT extends ESRestTestCase {
|
||||
byFieldName1 = "response.time";
|
||||
byFieldName2 = "response";
|
||||
}
|
||||
String jobConfig = String.format(Locale.ROOT, jobTemplate, byFieldName1);
|
||||
|
||||
Response response = client().performRequest("put", MachineLearning.BASE_PATH
|
||||
+ "anomaly_detectors/" + jobId1, Collections.emptyMap(), new StringEntity(jobConfig, ContentType.APPLICATION_JSON));
|
||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||
|
||||
final String failingJobConfig = String.format(Locale.ROOT, jobTemplate, byFieldName2);
|
||||
ResponseException e = expectThrows(ResponseException.class,
|
||||
() -> client().performRequest("put", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId2,
|
||||
Collections.emptyMap(), new StringEntity(failingJobConfig, ContentType.APPLICATION_JSON)));
|
||||
Request createJob1Request = new Request("PUT", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId1);
|
||||
createJob1Request.setJsonEntity(String.format(Locale.ROOT, jobTemplate, byFieldName1));
|
||||
client().performRequest(createJob1Request);
|
||||
|
||||
Request createJob2Request = new Request("PUT", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId2);
|
||||
createJob2Request.setJsonEntity(String.format(Locale.ROOT, jobTemplate, byFieldName2));
|
||||
ResponseException e = expectThrows(ResponseException.class, () -> client().performRequest(createJob2Request));
|
||||
assertThat(e.getMessage(),
|
||||
containsString("This job would cause a mapping clash with existing field [response] - " +
|
||||
"avoid the clash by assigning a dedicated results index"));
|
||||
@ -387,35 +341,27 @@ public class MlJobIT extends ESRestTestCase {
|
||||
String indexName = AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT;
|
||||
createFarequoteJob(jobId);
|
||||
|
||||
Response response = client().performRequest("get", "_cat/indices");
|
||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||
String responseAsString = responseEntityToString(response);
|
||||
assertThat(responseAsString, containsString(indexName));
|
||||
String indicesBeforeDelete = EntityUtils.toString(client().performRequest(new Request("GET", "/_cat/indices")).getEntity());
|
||||
assertThat(indicesBeforeDelete, containsString(indexName));
|
||||
|
||||
response = client().performRequest("delete", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId);
|
||||
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
|
||||
client().performRequest(new Request("DELETE", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId));
|
||||
|
||||
// check that the index still exists (it's shared by default)
|
||||
response = client().performRequest("get", "_cat/indices");
|
||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||
responseAsString = responseEntityToString(response);
|
||||
assertThat(responseAsString, containsString(indexName));
|
||||
String indicesAfterDelete = EntityUtils.toString(client().performRequest(new Request("GET", "/_cat/indices")).getEntity());
|
||||
assertThat(indicesAfterDelete, containsString(indexName));
|
||||
|
||||
assertBusy(() -> {
|
||||
try {
|
||||
Response r = client().performRequest("get", indexName + "/_count");
|
||||
assertEquals(200, r.getStatusLine().getStatusCode());
|
||||
String responseString = responseEntityToString(r);
|
||||
assertThat(responseString, containsString("\"count\":0"));
|
||||
String count = EntityUtils.toString(client().performRequest(new Request("GET", indexName + "/_count")).getEntity());
|
||||
assertThat(count, containsString("\"count\":0"));
|
||||
} catch (Exception e) {
|
||||
fail(e.getMessage());
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
// check that the job itself is gone
|
||||
expectThrows(ResponseException.class, () ->
|
||||
client().performRequest("get", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats"));
|
||||
client().performRequest(new Request("GET", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats")));
|
||||
}
|
||||
|
||||
public void testDeleteJobAfterMissingIndex() throws Exception {
|
||||
@ -424,28 +370,22 @@ public class MlJobIT extends ESRestTestCase {
|
||||
String indexName = AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT;
|
||||
createFarequoteJob(jobId);
|
||||
|
||||
Response response = client().performRequest("get", "_cat/indices");
|
||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||
String responseAsString = responseEntityToString(response);
|
||||
assertThat(responseAsString, containsString(indexName));
|
||||
String indicesBeforeDelete = EntityUtils.toString(client().performRequest(new Request("GET", "/_cat/indices")).getEntity());
|
||||
assertThat(indicesBeforeDelete, containsString(indexName));
|
||||
|
||||
// Manually delete the index so that we can test that deletion proceeds
|
||||
// normally anyway
|
||||
response = client().performRequest("delete", indexName);
|
||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||
client().performRequest(new Request("DELETE", indexName));
|
||||
|
||||
response = client().performRequest("delete", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId);
|
||||
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
|
||||
client().performRequest(new Request("DELETE", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId));
|
||||
|
||||
// check index was deleted
|
||||
response = client().performRequest("get", "_cat/indices");
|
||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||
responseAsString = responseEntityToString(response);
|
||||
assertThat(responseAsString, not(containsString(aliasName)));
|
||||
assertThat(responseAsString, not(containsString(indexName)));
|
||||
String indicesAfterDelete = EntityUtils.toString(client().performRequest(new Request("GET", "/_cat/indices")).getEntity());
|
||||
assertThat(indicesAfterDelete, not(containsString(aliasName)));
|
||||
assertThat(indicesAfterDelete, not(containsString(indexName)));
|
||||
|
||||
expectThrows(ResponseException.class, () ->
|
||||
client().performRequest("get", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats"));
|
||||
client().performRequest(new Request("GET", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats")));
|
||||
}
|
||||
|
||||
public void testDeleteJobAfterMissingAliases() throws Exception {
|
||||
@ -460,11 +400,9 @@ public class MlJobIT extends ESRestTestCase {
|
||||
// appear immediately so wait here.
|
||||
assertBusy(() -> {
|
||||
try {
|
||||
Response aliasesResponse = client().performRequest(new Request("get", "_cat/aliases"));
|
||||
assertEquals(200, aliasesResponse.getStatusLine().getStatusCode());
|
||||
String responseAsString = responseEntityToString(aliasesResponse);
|
||||
assertThat(responseAsString, containsString(readAliasName));
|
||||
assertThat(responseAsString, containsString(writeAliasName));
|
||||
String aliases = EntityUtils.toString(client().performRequest(new Request("GET", "/_cat/aliases")).getEntity());
|
||||
assertThat(aliases, containsString(readAliasName));
|
||||
assertThat(aliases, containsString(writeAliasName));
|
||||
} catch (ResponseException e) {
|
||||
throw new AssertionError(e);
|
||||
}
|
||||
@ -472,17 +410,14 @@ public class MlJobIT extends ESRestTestCase {
|
||||
|
||||
// Manually delete the aliases so that we can test that deletion proceeds
|
||||
// normally anyway
|
||||
Response response = client().performRequest("delete", indexName + "/_alias/" + readAliasName);
|
||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||
response = client().performRequest("delete", indexName + "/_alias/" + writeAliasName);
|
||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||
client().performRequest(new Request("DELETE", indexName + "/_alias/" + readAliasName));
|
||||
client().performRequest(new Request("DELETE", indexName + "/_alias/" + writeAliasName));
|
||||
|
||||
// check aliases were deleted
|
||||
expectThrows(ResponseException.class, () -> client().performRequest("get", indexName + "/_alias/" + readAliasName));
|
||||
expectThrows(ResponseException.class, () -> client().performRequest("get", indexName + "/_alias/" + writeAliasName));
|
||||
expectThrows(ResponseException.class, () -> client().performRequest(new Request("GET", indexName + "/_alias/" + readAliasName)));
|
||||
expectThrows(ResponseException.class, () -> client().performRequest(new Request("GET", indexName + "/_alias/" + writeAliasName)));
|
||||
|
||||
response = client().performRequest("delete", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId);
|
||||
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
|
||||
client().performRequest(new Request("DELETE", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId));
|
||||
}
|
||||
|
||||
public void testMultiIndexDelete() throws Exception {
|
||||
@ -490,86 +425,63 @@ public class MlJobIT extends ESRestTestCase {
|
||||
String indexName = AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT;
|
||||
createFarequoteJob(jobId);
|
||||
|
||||
Response response = client().performRequest("put", indexName + "-001");
|
||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||
client().performRequest(new Request("PUT", indexName + "-001"));
|
||||
client().performRequest(new Request("PUT", indexName + "-002"));
|
||||
|
||||
response = client().performRequest("put", indexName + "-002");
|
||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||
|
||||
response = client().performRequest("get", "_cat/indices");
|
||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||
String responseAsString = responseEntityToString(response);
|
||||
assertThat(responseAsString, containsString(indexName));
|
||||
assertThat(responseAsString, containsString(indexName + "-001"));
|
||||
assertThat(responseAsString, containsString(indexName + "-002"));
|
||||
String indicesBeforeDelete = EntityUtils.toString(client().performRequest(new Request("GET", "/_cat/indices")).getEntity());
|
||||
assertThat(indicesBeforeDelete, containsString(indexName));
|
||||
assertThat(indicesBeforeDelete, containsString(indexName + "-001"));
|
||||
assertThat(indicesBeforeDelete, containsString(indexName + "-002"));
|
||||
|
||||
// Add some documents to each index to make sure the DBQ clears them out
|
||||
String recordResult =
|
||||
String.format(Locale.ROOT,
|
||||
Request createDoc0 = new Request("PUT", indexName + "/doc/" + 123);
|
||||
createDoc0.setJsonEntity(String.format(Locale.ROOT,
|
||||
"{\"job_id\":\"%s\", \"timestamp\": \"%s\", \"bucket_span\":%d, \"result_type\":\"record\"}",
|
||||
jobId, 123, 1);
|
||||
client().performRequest("put", indexName + "/doc/" + 123,
|
||||
Collections.singletonMap("refresh", "true"), new StringEntity(recordResult, ContentType.APPLICATION_JSON));
|
||||
client().performRequest("put", indexName + "-001/doc/" + 123,
|
||||
Collections.singletonMap("refresh", "true"), new StringEntity(recordResult, ContentType.APPLICATION_JSON));
|
||||
client().performRequest("put", indexName + "-002/doc/" + 123,
|
||||
Collections.singletonMap("refresh", "true"), new StringEntity(recordResult, ContentType.APPLICATION_JSON));
|
||||
jobId, 123, 1));
|
||||
client().performRequest(createDoc0);
|
||||
Request createDoc1 = new Request("PUT", indexName + "-001/doc/" + 123);
|
||||
createDoc1.setEntity(createDoc0.getEntity());
|
||||
client().performRequest(createDoc1);
|
||||
Request createDoc2 = new Request("PUT", indexName + "-002/doc/" + 123);
|
||||
createDoc2.setEntity(createDoc0.getEntity());
|
||||
client().performRequest(createDoc2);
|
||||
|
||||
// Also index a few through the alias for the first job
|
||||
client().performRequest("put", indexName + "/doc/" + 456,
|
||||
Collections.singletonMap("refresh", "true"), new StringEntity(recordResult, ContentType.APPLICATION_JSON));
|
||||
Request createDoc3 = new Request("PUT", indexName + "/doc/" + 456);
|
||||
createDoc3.setEntity(createDoc0.getEntity());
|
||||
client().performRequest(createDoc3);
|
||||
|
||||
|
||||
client().performRequest("post", "_refresh");
|
||||
client().performRequest(new Request("POST", "/_refresh"));
|
||||
|
||||
// check for the documents
|
||||
response = client().performRequest("get", indexName+ "/_count");
|
||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||
responseAsString = responseEntityToString(response);
|
||||
assertThat(responseAsString, containsString("\"count\":2"));
|
||||
|
||||
response = client().performRequest("get", indexName + "-001/_count");
|
||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||
responseAsString = responseEntityToString(response);
|
||||
assertThat(responseAsString, containsString("\"count\":1"));
|
||||
|
||||
response = client().performRequest("get", indexName + "-002/_count");
|
||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||
responseAsString = responseEntityToString(response);
|
||||
assertThat(responseAsString, containsString("\"count\":1"));
|
||||
assertThat(EntityUtils.toString(client().performRequest(new Request("GET", indexName+ "/_count")).getEntity()),
|
||||
containsString("\"count\":2"));
|
||||
assertThat(EntityUtils.toString(client().performRequest(new Request("GET", indexName+ "-001/_count")).getEntity()),
|
||||
containsString("\"count\":1"));
|
||||
assertThat(EntityUtils.toString(client().performRequest(new Request("GET", indexName+ "-002/_count")).getEntity()),
|
||||
containsString("\"count\":1"));
|
||||
|
||||
// Delete
|
||||
response = client().performRequest("delete", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId);
|
||||
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
|
||||
client().performRequest(new Request("DELETE", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId));
|
||||
|
||||
client().performRequest("post", "_refresh");
|
||||
client().performRequest(new Request("POST", "/_refresh"));
|
||||
|
||||
// check that the indices still exist but are empty
|
||||
response = client().performRequest("get", "_cat/indices");
|
||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||
responseAsString = responseEntityToString(response);
|
||||
assertThat(responseAsString, containsString(indexName));
|
||||
assertThat(responseAsString, containsString(indexName + "-001"));
|
||||
assertThat(responseAsString, containsString(indexName + "-002"));
|
||||
String indicesAfterDelete = EntityUtils.toString(client().performRequest(new Request("GET", "/_cat/indices")).getEntity());
|
||||
assertThat(indicesAfterDelete, containsString(indexName));
|
||||
assertThat(indicesAfterDelete, containsString(indexName + "-001"));
|
||||
assertThat(indicesAfterDelete, containsString(indexName + "-002"));
|
||||
|
||||
response = client().performRequest("get", indexName + "/_count");
|
||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||
responseAsString = responseEntityToString(response);
|
||||
assertThat(responseAsString, containsString("\"count\":0"));
|
||||
|
||||
response = client().performRequest("get", indexName + "-001/_count");
|
||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||
responseAsString = responseEntityToString(response);
|
||||
assertThat(responseAsString, containsString("\"count\":0"));
|
||||
|
||||
response = client().performRequest("get", indexName + "-002/_count");
|
||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||
responseAsString = responseEntityToString(response);
|
||||
assertThat(responseAsString, containsString("\"count\":0"));
|
||||
assertThat(EntityUtils.toString(client().performRequest(new Request("GET", indexName+ "/_count")).getEntity()),
|
||||
containsString("\"count\":0"));
|
||||
assertThat(EntityUtils.toString(client().performRequest(new Request("GET", indexName+ "-001/_count")).getEntity()),
|
||||
containsString("\"count\":0"));
|
||||
assertThat(EntityUtils.toString(client().performRequest(new Request("GET", indexName+ "-002/_count")).getEntity()),
|
||||
containsString("\"count\":0"));
|
||||
|
||||
|
||||
expectThrows(ResponseException.class, () ->
|
||||
client().performRequest("get", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats"));
|
||||
client().performRequest(new Request("GET", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats")));
|
||||
}
|
||||
|
||||
public void testDelete_multipleRequest() throws Exception {
|
||||
@ -590,7 +502,7 @@ public class MlJobIT extends ESRestTestCase {
|
||||
if (forceDelete) {
|
||||
url += "?force=true";
|
||||
}
|
||||
Response response = client().performRequest("delete", url);
|
||||
Response response = client().performRequest(new Request("DELETE", url));
|
||||
responses.put(Thread.currentThread().getId(), response);
|
||||
} catch (ResponseException re) {
|
||||
responseExceptions.put(Thread.currentThread().getId(), re);
|
||||
@ -640,11 +552,12 @@ public class MlJobIT extends ESRestTestCase {
|
||||
}
|
||||
|
||||
for (Response response : responses.values()) {
|
||||
assertEquals(responseEntityToString(response), 200, response.getStatusLine().getStatusCode());
|
||||
assertEquals(EntityUtils.toString(response.getEntity()), 200, response.getStatusLine().getStatusCode());
|
||||
}
|
||||
|
||||
assertNotNull(recreationResponse.get());
|
||||
assertEquals(responseEntityToString(recreationResponse.get()), 200, recreationResponse.get().getStatusLine().getStatusCode());
|
||||
assertEquals(EntityUtils.toString(recreationResponse.get().getEntity()),
|
||||
200, recreationResponse.get().getStatusLine().getStatusCode());
|
||||
|
||||
if (recreationException.get() != null) {
|
||||
assertNull(recreationException.get().getMessage(), recreationException.get());
|
||||
@ -656,7 +569,7 @@ public class MlJobIT extends ESRestTestCase {
|
||||
// but in the case that it does not the job that is recreated may get deleted.
|
||||
// It is not a error if the job does not exist but the following assertions
|
||||
// will fail in that case.
|
||||
client().performRequest("get", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId);
|
||||
client().performRequest(new Request("GET", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId));
|
||||
|
||||
// Check that the job aliases exist. These are the last thing to be deleted when a job is deleted, so
|
||||
// if there's been a race between deletion and recreation these are what will be missing.
|
||||
@ -682,15 +595,8 @@ public class MlJobIT extends ESRestTestCase {
|
||||
}
|
||||
|
||||
private String getAliases() throws IOException {
|
||||
Response response = client().performRequest("get", "_aliases");
|
||||
assertEquals(200, response.getStatusLine().getStatusCode());
|
||||
return responseEntityToString(response);
|
||||
}
|
||||
|
||||
private static String responseEntityToString(Response response) throws IOException {
|
||||
try (BufferedReader reader = new BufferedReader(new InputStreamReader(response.getEntity().getContent(), StandardCharsets.UTF_8))) {
|
||||
return reader.lines().collect(Collectors.joining("\n"));
|
||||
}
|
||||
Response response = client().performRequest(new Request("GET", "/_aliases"));
|
||||
return EntityUtils.toString(response.getEntity());
|
||||
}
|
||||
|
||||
@After
|
||||
|
@ -5,9 +5,8 @@
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.transforms;
|
||||
|
||||
import org.apache.http.entity.ContentType;
|
||||
import org.apache.http.entity.StringEntity;
|
||||
import org.apache.http.util.EntityUtils;
|
||||
import org.elasticsearch.client.Request;
|
||||
import org.elasticsearch.client.Response;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.Strings;
|
||||
@ -18,7 +17,6 @@ import org.elasticsearch.xpack.ml.utils.DomainSplitFunction;
|
||||
import org.joda.time.DateTime;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
@ -185,9 +183,10 @@ public class PainlessDomainSplitIT extends ESRestTestCase {
|
||||
.put(IndexMetaData.INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), 0);
|
||||
|
||||
createIndex("painless", settings.build());
|
||||
client().performRequest("PUT", "painless/test/1", Collections.emptyMap(),
|
||||
new StringEntity("{\"test\": \"test\"}", ContentType.APPLICATION_JSON));
|
||||
client().performRequest("POST", "painless/_refresh");
|
||||
Request createDoc = new Request("PUT", "/painless/test/1");
|
||||
createDoc.setJsonEntity("{\"test\": \"test\"}");
|
||||
createDoc.addParameter("refresh", "true");
|
||||
client().performRequest(createDoc);
|
||||
|
||||
Pattern pattern = Pattern.compile("domain_split\":\\[(.*?),(.*?)\\]");
|
||||
|
||||
@ -198,7 +197,9 @@ public class PainlessDomainSplitIT extends ESRestTestCase {
|
||||
String mapAsJson = Strings.toString(jsonBuilder().map(params));
|
||||
logger.info("params={}", mapAsJson);
|
||||
|
||||
StringEntity body = new StringEntity("{\n" +
|
||||
Request searchRequest = new Request("GET", "/painless/test/_search");
|
||||
searchRequest.setJsonEntity(
|
||||
"{\n" +
|
||||
" \"query\" : {\n" +
|
||||
" \"match_all\": {}\n" +
|
||||
" },\n" +
|
||||
@ -212,10 +213,8 @@ public class PainlessDomainSplitIT extends ESRestTestCase {
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
"}", ContentType.APPLICATION_JSON);
|
||||
|
||||
Response response = client().performRequest("GET", "painless/test/_search", Collections.emptyMap(), body);
|
||||
String responseBody = EntityUtils.toString(response.getEntity());
|
||||
"}");
|
||||
String responseBody = EntityUtils.toString(client().performRequest(searchRequest).getEntity());
|
||||
Matcher m = pattern.matcher(responseBody);
|
||||
|
||||
String actualSubDomain = "";
|
||||
@ -242,24 +241,23 @@ public class PainlessDomainSplitIT extends ESRestTestCase {
|
||||
|
||||
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32966")
|
||||
public void testHRDSplit() throws Exception {
|
||||
|
||||
// Create job
|
||||
String job = "{\n" +
|
||||
" \"description\":\"Domain splitting\",\n" +
|
||||
" \"analysis_config\" : {\n" +
|
||||
" \"bucket_span\":\"3600s\",\n" +
|
||||
" \"detectors\" :[{\"function\":\"count\", \"by_field_name\" : \"domain_split\"}]\n" +
|
||||
" },\n" +
|
||||
" \"data_description\" : {\n" +
|
||||
" \"field_delimiter\":\",\",\n" +
|
||||
" \"time_field\":\"time\"\n" +
|
||||
" \n" +
|
||||
" }\n" +
|
||||
" }";
|
||||
|
||||
client().performRequest("PUT", MachineLearning.BASE_PATH + "anomaly_detectors/hrd-split-job", Collections.emptyMap(),
|
||||
new StringEntity(job, ContentType.APPLICATION_JSON));
|
||||
client().performRequest("POST", MachineLearning.BASE_PATH + "anomaly_detectors/hrd-split-job/_open");
|
||||
Request createJobRequest = new Request("PUT", MachineLearning.BASE_PATH + "anomaly_detectors/hrd-split-job");
|
||||
createJobRequest.setJsonEntity(
|
||||
"{\n" +
|
||||
" \"description\":\"Domain splitting\",\n" +
|
||||
" \"analysis_config\" : {\n" +
|
||||
" \"bucket_span\":\"3600s\",\n" +
|
||||
" \"detectors\" :[{\"function\":\"count\", \"by_field_name\" : \"domain_split\"}]\n" +
|
||||
" },\n" +
|
||||
" \"data_description\" : {\n" +
|
||||
" \"field_delimiter\":\",\",\n" +
|
||||
" \"time_field\":\"time\"\n" +
|
||||
" \n" +
|
||||
" }\n" +
|
||||
"}");
|
||||
client().performRequest(createJobRequest);
|
||||
client().performRequest(new Request("POST", MachineLearning.BASE_PATH + "anomaly_detectors/hrd-split-job/_open"));
|
||||
|
||||
// Create index to hold data
|
||||
Settings.Builder settings = Settings.builder()
|
||||
@ -284,44 +282,43 @@ public class PainlessDomainSplitIT extends ESRestTestCase {
|
||||
if (i == 64) {
|
||||
// Anomaly has 100 docs, but we don't care about the value
|
||||
for (int j = 0; j < 100; j++) {
|
||||
client().performRequest("PUT", "painless/test/" + time.toDateTimeISO() + "_" + j,
|
||||
Collections.emptyMap(),
|
||||
new StringEntity("{\"domain\": \"" + "bar.bar.com\", \"time\": \"" + time.toDateTimeISO()
|
||||
+ "\"}", ContentType.APPLICATION_JSON));
|
||||
Request createDocRequest = new Request("PUT", "/painless/test/" + time.toDateTimeISO() + "_" + j);
|
||||
createDocRequest.setJsonEntity("{\"domain\": \"" + "bar.bar.com\", \"time\": \"" + time.toDateTimeISO() + "\"}");
|
||||
client().performRequest(createDocRequest);
|
||||
}
|
||||
} else {
|
||||
// Non-anomalous values will be what's seen when the anomaly is reported
|
||||
client().performRequest("PUT", "painless/test/" + time.toDateTimeISO(),
|
||||
Collections.emptyMap(),
|
||||
new StringEntity("{\"domain\": \"" + test.hostName + "\", \"time\": \"" + time.toDateTimeISO()
|
||||
+ "\"}", ContentType.APPLICATION_JSON));
|
||||
Request createDocRequest = new Request("PUT", "/painless/test/" + time.toDateTimeISO());
|
||||
createDocRequest.setJsonEntity("{\"domain\": \"" + test.hostName + "\", \"time\": \"" + time.toDateTimeISO() + "\"}");
|
||||
client().performRequest(createDocRequest);
|
||||
}
|
||||
}
|
||||
|
||||
client().performRequest("POST", "painless/_refresh");
|
||||
client().performRequest(new Request("POST", "/painless/_refresh"));
|
||||
|
||||
// Create and start datafeed
|
||||
String body = "{\n" +
|
||||
" \"job_id\":\"hrd-split-job\",\n" +
|
||||
" \"indexes\":[\"painless\"],\n" +
|
||||
" \"types\":[\"test\"],\n" +
|
||||
" \"script_fields\": {\n" +
|
||||
" \"domain_split\": {\n" +
|
||||
" \"script\": \"return domainSplit(doc['domain'].value, params);\"\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" }";
|
||||
Request createFeedRequest = new Request("PUT", MachineLearning.BASE_PATH + "datafeeds/hrd-split-datafeed");
|
||||
createFeedRequest.setJsonEntity(
|
||||
"{\n" +
|
||||
" \"job_id\":\"hrd-split-job\",\n" +
|
||||
" \"indexes\":[\"painless\"],\n" +
|
||||
" \"types\":[\"test\"],\n" +
|
||||
" \"script_fields\": {\n" +
|
||||
" \"domain_split\": {\n" +
|
||||
" \"script\": \"return domainSplit(doc['domain'].value, params);\"\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
"}");
|
||||
|
||||
client().performRequest("PUT", MachineLearning.BASE_PATH + "datafeeds/hrd-split-datafeed", Collections.emptyMap(),
|
||||
new StringEntity(body, ContentType.APPLICATION_JSON));
|
||||
client().performRequest("POST", MachineLearning.BASE_PATH + "datafeeds/hrd-split-datafeed/_start");
|
||||
client().performRequest(createFeedRequest);
|
||||
client().performRequest(new Request("POST", MachineLearning.BASE_PATH + "datafeeds/hrd-split-datafeed/_start"));
|
||||
|
||||
boolean passed = awaitBusy(() -> {
|
||||
try {
|
||||
client().performRequest("POST", "/_refresh");
|
||||
client().performRequest(new Request("POST", "/_refresh"));
|
||||
|
||||
Response response = client().performRequest("GET",
|
||||
MachineLearning.BASE_PATH + "anomaly_detectors/hrd-split-job/results/records");
|
||||
Response response = client().performRequest(new Request("GET",
|
||||
MachineLearning.BASE_PATH + "anomaly_detectors/hrd-split-job/results/records"));
|
||||
String responseBody = EntityUtils.toString(response.getEntity());
|
||||
|
||||
if (responseBody.contains("\"count\":2")) {
|
||||
|
@ -120,8 +120,10 @@ public class MonitoringIT extends ESSingleNodeTestCase {
|
||||
|
||||
// REST is the realistic way that these operations happen, so it's the most realistic way to integration test it too
|
||||
// Use Monitoring Bulk API to index 3 documents
|
||||
//final Response bulkResponse = getRestClient().performRequest("POST", "/_xpack/monitoring/_bulk",
|
||||
// parameters, createBulkEntity());
|
||||
//final Request bulkRequest = new Request("POST", "/_xpack/monitoring/_bulk");
|
||||
//<<add all parameters>
|
||||
//bulkRequest.setJsonEntity(createBulkEntity());
|
||||
//final Response bulkResponse = getRestClient().performRequest(request);
|
||||
|
||||
final MonitoringBulkResponse bulkResponse =
|
||||
new MonitoringBulkRequestBuilder(client())
|
||||
|
@ -418,7 +418,7 @@ class IndicesAndAliasesResolver {
|
||||
|
||||
private RemoteClusterResolver(Settings settings, ClusterSettings clusterSettings) {
|
||||
super(settings);
|
||||
clusters = new CopyOnWriteArraySet<>(buildRemoteClustersSeeds(settings).keySet());
|
||||
clusters = new CopyOnWriteArraySet<>(buildRemoteClustersDynamicConfig(settings).keySet());
|
||||
listenForUpdates(clusterSettings);
|
||||
}
|
||||
|
||||
@ -428,7 +428,7 @@ class IndicesAndAliasesResolver {
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void updateRemoteCluster(String clusterAlias, List<String> addresses) {
|
||||
protected void updateRemoteCluster(String clusterAlias, List<String> addresses, String proxyAddress) {
|
||||
if (addresses.isEmpty()) {
|
||||
clusters.remove(clusterAlias);
|
||||
} else {
|
||||
|
@ -35,6 +35,9 @@ import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Request to close Machine Learning Jobs
|
||||
*/
|
||||
public class CloseJobRequest extends ActionRequest implements ToXContentObject {
|
||||
|
||||
public static final ParseField JOB_ID = new ParseField("job_id");
|
||||
@ -98,49 +101,44 @@ public class CloseJobRequest extends ActionRequest implements ToXContentObject {
|
||||
return jobIds;
|
||||
}
|
||||
|
||||
/**
|
||||
* How long to wait for the close request to complete before timing out.
|
||||
*
|
||||
* Default: 30 minutes
|
||||
*/
|
||||
public TimeValue getTimeout() {
|
||||
return timeout;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link CloseJobRequest#getTimeout()}
|
||||
* How long to wait for the close request to complete before timing out.
|
||||
*
|
||||
* @param timeout Default value: 30 minutes
|
||||
*/
|
||||
public void setTimeout(TimeValue timeout) {
|
||||
this.timeout = timeout;
|
||||
}
|
||||
|
||||
public Boolean isForce() {
|
||||
return force;
|
||||
}
|
||||
|
||||
/**
|
||||
* Should the closing be forced.
|
||||
*
|
||||
* Use to close a failed job, or to forcefully close a job which has not responded to its initial close request.
|
||||
*/
|
||||
public Boolean isForce() {
|
||||
return force;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link CloseJobRequest#isForce()}
|
||||
*
|
||||
* @param force When {@code true} forcefully close the job. Defaults to {@code false}
|
||||
*/
|
||||
public void setForce(boolean force) {
|
||||
this.force = force;
|
||||
}
|
||||
|
||||
public Boolean isAllowNoJobs() {
|
||||
return this.allowNoJobs;
|
||||
}
|
||||
|
||||
/**
|
||||
* Whether to ignore if a wildcard expression matches no jobs.
|
||||
*
|
||||
* This includes `_all` string or when no jobs have been specified
|
||||
*/
|
||||
public Boolean isAllowNoJobs() {
|
||||
return this.allowNoJobs;
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link CloseJobRequest#isAllowNoJobs()}
|
||||
*
|
||||
* @param allowNoJobs When {@code true} ignore if wildcard or `_all` matches no jobs. Defaults to {@code true}
|
||||
*/
|
||||
public void setAllowNoJobs(boolean allowNoJobs) {
|
||||
this.allowNoJobs = allowNoJobs;
|
||||
|
@ -20,7 +20,7 @@ package org.elasticsearch.protocol.xpack.ml;
|
||||
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
@ -28,22 +28,22 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Response indicating if the Job(s) closed or not
|
||||
*/
|
||||
public class CloseJobResponse extends ActionResponse implements ToXContentObject {
|
||||
|
||||
private static final ParseField CLOSED = new ParseField("closed");
|
||||
|
||||
public static final ObjectParser<CloseJobResponse, Void> PARSER =
|
||||
new ObjectParser<>("close_job_response", true, CloseJobResponse::new);
|
||||
public static final ConstructingObjectParser<CloseJobResponse, Void> PARSER =
|
||||
new ConstructingObjectParser<>("close_job_response", true, (a) -> new CloseJobResponse((Boolean)a[0]));
|
||||
|
||||
static {
|
||||
PARSER.declareBoolean(CloseJobResponse::setClosed, CLOSED);
|
||||
PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), CLOSED);
|
||||
}
|
||||
|
||||
private boolean closed;
|
||||
|
||||
CloseJobResponse() {
|
||||
}
|
||||
|
||||
public CloseJobResponse(boolean closed) {
|
||||
this.closed = closed;
|
||||
}
|
||||
@ -52,14 +52,14 @@ public class CloseJobResponse extends ActionResponse implements ToXContentObject
|
||||
return PARSER.parse(parser, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Has the job closed or not
|
||||
* @return boolean value indicating the job closed status
|
||||
*/
|
||||
public boolean isClosed() {
|
||||
return closed;
|
||||
}
|
||||
|
||||
public void setClosed(boolean closed) {
|
||||
this.closed = closed;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (this == other) {
|
||||
|
@ -23,6 +23,9 @@ import org.elasticsearch.action.ActionRequestValidationException;
|
||||
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Request to delete a Machine Learning Job via its ID
|
||||
*/
|
||||
public class DeleteJobRequest extends ActionRequest {
|
||||
|
||||
private String jobId;
|
||||
@ -36,6 +39,10 @@ public class DeleteJobRequest extends ActionRequest {
|
||||
return jobId;
|
||||
}
|
||||
|
||||
/**
|
||||
* The jobId which to delete
|
||||
* @param jobId unique jobId to delete, must not be null
|
||||
*/
|
||||
public void setJobId(String jobId) {
|
||||
this.jobId = Objects.requireNonNull(jobId, "[job_id] must not be null");
|
||||
}
|
||||
@ -44,6 +51,12 @@ public class DeleteJobRequest extends ActionRequest {
|
||||
return force;
|
||||
}
|
||||
|
||||
/**
|
||||
* Used to forcefully delete an opened job.
|
||||
* This method is quicker than closing and deleting the job.
|
||||
*
|
||||
* @param force When {@code true} forcefully delete an opened job. Defaults to {@code false}
|
||||
*/
|
||||
public void setForce(boolean force) {
|
||||
this.force = force;
|
||||
}
|
||||
|
@ -24,6 +24,9 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Response acknowledging the Machine Learning Job request
|
||||
*/
|
||||
public class DeleteJobResponse extends AcknowledgedResponse {
|
||||
|
||||
public DeleteJobResponse(boolean acknowledged) {
|
||||
|
@ -87,20 +87,15 @@ public class GetJobRequest extends ActionRequest implements ToXContentObject {
|
||||
return jobIds;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* See {@link GetJobRequest#isAllowNoJobs()}
|
||||
* @param allowNoJobs
|
||||
* Whether to ignore if a wildcard expression matches no jobs.
|
||||
*
|
||||
* @param allowNoJobs If this is {@code false}, then an error is returned when a wildcard (or `_all`) does not match any jobs
|
||||
*/
|
||||
public void setAllowNoJobs(boolean allowNoJobs) {
|
||||
this.allowNoJobs = allowNoJobs;
|
||||
}
|
||||
|
||||
/**
|
||||
* Whether to ignore if a wildcard expression matches no jobs.
|
||||
*
|
||||
* If this is `false`, then an error is returned when a wildcard (or `_all`) does not match any jobs
|
||||
*/
|
||||
public Boolean isAllowNoJobs() {
|
||||
return allowNoJobs;
|
||||
}
|
||||
|
@ -33,6 +33,9 @@ import org.elasticsearch.protocol.xpack.ml.job.config.Job;
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Request to open a Machine Learning Job
|
||||
*/
|
||||
public class OpenJobRequest extends ActionRequest implements ToXContentObject {
|
||||
|
||||
public static final ParseField TIMEOUT = new ParseField("timeout");
|
||||
@ -51,6 +54,11 @@ public class OpenJobRequest extends ActionRequest implements ToXContentObject {
|
||||
private String jobId;
|
||||
private TimeValue timeout;
|
||||
|
||||
/**
|
||||
* Create a new request with the desired jobId
|
||||
*
|
||||
* @param jobId unique jobId, must not be null
|
||||
*/
|
||||
public OpenJobRequest(String jobId) {
|
||||
this.jobId = Objects.requireNonNull(jobId, "[job_id] must not be null");
|
||||
}
|
||||
@ -59,6 +67,11 @@ public class OpenJobRequest extends ActionRequest implements ToXContentObject {
|
||||
return jobId;
|
||||
}
|
||||
|
||||
/**
|
||||
* The jobId to open
|
||||
*
|
||||
* @param jobId unique jobId, must not be null
|
||||
*/
|
||||
public void setJobId(String jobId) {
|
||||
this.jobId = Objects.requireNonNull(jobId, "[job_id] must not be null");
|
||||
}
|
||||
@ -67,6 +80,11 @@ public class OpenJobRequest extends ActionRequest implements ToXContentObject {
|
||||
return timeout;
|
||||
}
|
||||
|
||||
/**
|
||||
* How long to wait for job to open before timing out the request
|
||||
*
|
||||
* @param timeout default value of 30 minutes
|
||||
*/
|
||||
public void setTimeout(TimeValue timeout) {
|
||||
this.timeout = timeout;
|
||||
}
|
||||
|
@ -20,7 +20,7 @@ package org.elasticsearch.protocol.xpack.ml;
|
||||
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
@ -28,22 +28,23 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Response indicating if the Machine Learning Job is now opened or not
|
||||
*/
|
||||
public class OpenJobResponse extends ActionResponse implements ToXContentObject {
|
||||
|
||||
private static final ParseField OPENED = new ParseField("opened");
|
||||
|
||||
public static final ObjectParser<OpenJobResponse, Void> PARSER = new ObjectParser<>("open_job_response", true, OpenJobResponse::new);
|
||||
public static final ConstructingObjectParser<OpenJobResponse, Void> PARSER =
|
||||
new ConstructingObjectParser<>("open_job_response", true, (a) -> new OpenJobResponse((Boolean)a[0]));
|
||||
|
||||
static {
|
||||
PARSER.declareBoolean(OpenJobResponse::setOpened, OPENED);
|
||||
PARSER.declareBoolean(ConstructingObjectParser.constructorArg(), OPENED);
|
||||
}
|
||||
|
||||
private boolean opened;
|
||||
|
||||
OpenJobResponse() {
|
||||
}
|
||||
|
||||
public OpenJobResponse(boolean opened) {
|
||||
OpenJobResponse(boolean opened) {
|
||||
this.opened = opened;
|
||||
}
|
||||
|
||||
@ -51,14 +52,15 @@ public class OpenJobResponse extends ActionResponse implements ToXContentObject
|
||||
return PARSER.parse(parser, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Has the job opened or not
|
||||
*
|
||||
* @return boolean value indicating the job opened status
|
||||
*/
|
||||
public boolean isOpened() {
|
||||
return opened;
|
||||
}
|
||||
|
||||
public void setOpened(boolean opened) {
|
||||
this.opened = opened;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other) {
|
||||
if (this == other) {
|
||||
|
@ -28,10 +28,18 @@ import org.elasticsearch.protocol.xpack.ml.job.config.Job;
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Request to create a new Machine Learning Job given a {@link Job} configuration
|
||||
*/
|
||||
public class PutJobRequest extends ActionRequest implements ToXContentObject {
|
||||
|
||||
private final Job job;
|
||||
|
||||
/**
|
||||
* Construct a new PutJobRequest
|
||||
*
|
||||
* @param job a {@link Job} configuration to create
|
||||
*/
|
||||
public PutJobRequest(Job job) {
|
||||
this.job = job;
|
||||
}
|
||||
|
@ -27,6 +27,9 @@ import org.elasticsearch.protocol.xpack.ml.job.config.Job;
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* Response containing the newly created {@link Job}
|
||||
*/
|
||||
public class PutJobResponse implements ToXContentObject {
|
||||
|
||||
private Job job;
|
||||
@ -35,7 +38,7 @@ public class PutJobResponse implements ToXContentObject {
|
||||
return new PutJobResponse(Job.PARSER.parse(parser, null).build());
|
||||
}
|
||||
|
||||
public PutJobResponse(Job job) {
|
||||
PutJobResponse(Job job) {
|
||||
this.job = job;
|
||||
}
|
||||
|
||||
|
@ -24,8 +24,6 @@ import org.apache.http.client.utils.URLEncodedUtils;
|
||||
import org.apache.http.cookie.Cookie;
|
||||
import org.apache.http.cookie.CookieOrigin;
|
||||
import org.apache.http.cookie.MalformedCookieException;
|
||||
import org.apache.http.entity.ContentType;
|
||||
import org.apache.http.entity.StringEntity;
|
||||
import org.apache.http.impl.client.CloseableHttpClient;
|
||||
import org.apache.http.impl.client.HttpClients;
|
||||
import org.apache.http.impl.cookie.DefaultCookieSpec;
|
||||
@ -39,6 +37,8 @@ import org.apache.http.util.CharArrayBuffer;
|
||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.cli.SuppressForbidden;
|
||||
import org.elasticsearch.client.Request;
|
||||
import org.elasticsearch.client.RequestOptions;
|
||||
import org.elasticsearch.client.Response;
|
||||
import org.elasticsearch.common.CheckedFunction;
|
||||
import org.elasticsearch.common.Strings;
|
||||
@ -85,7 +85,6 @@ import java.util.concurrent.ExecutorService;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import static java.util.Collections.emptyMap;
|
||||
import static org.elasticsearch.common.xcontent.XContentHelper.convertToMap;
|
||||
import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue;
|
||||
import static org.hamcrest.Matchers.contains;
|
||||
@ -176,9 +175,9 @@ public class SamlAuthenticationIT extends ESRestTestCase {
|
||||
*/
|
||||
@Before
|
||||
public void setKibanaPassword() throws IOException {
|
||||
final HttpEntity json = new StringEntity("{ \"password\" : \"" + KIBANA_PASSWORD + "\" }", ContentType.APPLICATION_JSON);
|
||||
final Response response = adminClient().performRequest("PUT", "/_xpack/security/user/kibana/_password", emptyMap(), json);
|
||||
assertOK(response);
|
||||
Request request = new Request("PUT", "/_xpack/security/user/kibana/_password");
|
||||
request.setJsonEntity("{ \"password\" : \"" + KIBANA_PASSWORD + "\" }");
|
||||
adminClient().performRequest(request);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -188,21 +187,19 @@ public class SamlAuthenticationIT extends ESRestTestCase {
|
||||
*/
|
||||
@Before
|
||||
public void setupRoleMapping() throws IOException {
|
||||
final StringEntity json = new StringEntity(Strings // top-level
|
||||
.toString(XContentBuilder.builder(XContentType.JSON.xContent())
|
||||
.startObject()
|
||||
.array("roles", new String[] { "kibana_user"} )
|
||||
.field("enabled", true)
|
||||
.startObject("rules")
|
||||
Request request = new Request("PUT", "/_xpack/security/role_mapping/thor-kibana");
|
||||
request.setJsonEntity(Strings.toString(XContentBuilder.builder(XContentType.JSON.xContent())
|
||||
.startObject()
|
||||
.array("roles", new String[] { "kibana_user"} )
|
||||
.field("enabled", true)
|
||||
.startObject("rules")
|
||||
.startArray("all")
|
||||
.startObject().startObject("field").field("username", "thor").endObject().endObject()
|
||||
.startObject().startObject("field").field("realm.name", "shibboleth").endObject().endObject()
|
||||
.startObject().startObject("field").field("username", "thor").endObject().endObject()
|
||||
.startObject().startObject("field").field("realm.name", "shibboleth").endObject().endObject()
|
||||
.endArray() // "all"
|
||||
.endObject() // "rules"
|
||||
.endObject()), ContentType.APPLICATION_JSON);
|
||||
|
||||
final Response response = adminClient().performRequest("PUT", "/_xpack/security/role_mapping/thor-kibana", emptyMap(), json);
|
||||
assertOK(response);
|
||||
.endObject() // "rules"
|
||||
.endObject()));
|
||||
adminClient().performRequest(request);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -251,10 +248,11 @@ public class SamlAuthenticationIT extends ESRestTestCase {
|
||||
* is for the expected user with the expected name and roles.
|
||||
*/
|
||||
private void verifyElasticsearchAccessToken(String accessToken) throws IOException {
|
||||
final BasicHeader authorization = new BasicHeader("Authorization", "Bearer " + accessToken);
|
||||
final Response response = client().performRequest("GET", "/_xpack/security/_authenticate", authorization);
|
||||
assertOK(response);
|
||||
final Map<String, Object> map = parseResponseAsMap(response.getEntity());
|
||||
Request request = new Request("GET", "/_xpack/security/_authenticate");
|
||||
RequestOptions.Builder options = request.getOptions().toBuilder();
|
||||
options.addHeader("Authorization", "Bearer " + accessToken);
|
||||
request.setOptions(options);
|
||||
final Map<String, Object> map = entityAsMap(client().performRequest(request));
|
||||
assertThat(map.get("username"), equalTo("thor"));
|
||||
assertThat(map.get("full_name"), equalTo("Thor Odinson"));
|
||||
assertSingletonList(map.get("roles"), "kibana_user");
|
||||
@ -272,12 +270,11 @@ public class SamlAuthenticationIT extends ESRestTestCase {
|
||||
* can be used to get a new valid access token and refresh token.
|
||||
*/
|
||||
private void verifyElasticsearchRefreshToken(String refreshToken) throws IOException {
|
||||
final String body = "{ \"grant_type\":\"refresh_token\", \"refresh_token\":\"" + refreshToken + "\" }";
|
||||
final Response response = client().performRequest("POST", "/_xpack/security/oauth2/token",
|
||||
emptyMap(), new StringEntity(body, ContentType.APPLICATION_JSON), kibanaAuth());
|
||||
assertOK(response);
|
||||
Request request = new Request("POST", "/_xpack/security/oauth2/token");
|
||||
request.setJsonEntity("{ \"grant_type\":\"refresh_token\", \"refresh_token\":\"" + refreshToken + "\" }");
|
||||
kibanaAuth(request);
|
||||
|
||||
final Map<String, Object> result = parseResponseAsMap(response.getEntity());
|
||||
final Map<String, Object> result = entityAsMap(client().performRequest(request));
|
||||
final Object newRefreshToken = result.get("refresh_token");
|
||||
assertThat(newRefreshToken, notNullValue());
|
||||
assertThat(newRefreshToken, instanceOf(String.class));
|
||||
@ -463,10 +460,10 @@ public class SamlAuthenticationIT extends ESRestTestCase {
|
||||
* sends a redirect to that page.
|
||||
*/
|
||||
private void httpLogin(HttpExchange http) throws IOException {
|
||||
final Response prepare = client().performRequest("POST", "/_xpack/security/saml/prepare",
|
||||
emptyMap(), new StringEntity("{}", ContentType.APPLICATION_JSON), kibanaAuth());
|
||||
assertOK(prepare);
|
||||
final Map<String, Object> body = parseResponseAsMap(prepare.getEntity());
|
||||
Request request = new Request("POST", "/_xpack/security/saml/prepare");
|
||||
request.setJsonEntity("{}");
|
||||
kibanaAuth(request);
|
||||
final Map<String, Object> body = entityAsMap(client().performRequest(request));
|
||||
logger.info("Created SAML authentication request {}", body);
|
||||
http.getResponseHeaders().add("Set-Cookie", REQUEST_ID_COOKIE + "=" + body.get("id"));
|
||||
http.getResponseHeaders().add("Location", (String) body.get("redirect"));
|
||||
@ -504,9 +501,10 @@ public class SamlAuthenticationIT extends ESRestTestCase {
|
||||
final String id = getCookie(REQUEST_ID_COOKIE, http);
|
||||
assertThat(id, notNullValue());
|
||||
|
||||
final String body = "{ \"content\" : \"" + saml + "\", \"ids\": [\"" + id + "\"] }";
|
||||
return client().performRequest("POST", "/_xpack/security/saml/authenticate",
|
||||
emptyMap(), new StringEntity(body, ContentType.APPLICATION_JSON), kibanaAuth());
|
||||
Request request = new Request("POST", "/_xpack/security/saml/authenticate");
|
||||
request.setJsonEntity("{ \"content\" : \"" + saml + "\", \"ids\": [\"" + id + "\"] }");
|
||||
kibanaAuth(request);
|
||||
return client().performRequest(request);
|
||||
}
|
||||
|
||||
private List<NameValuePair> parseRequestForm(HttpExchange http) throws IOException {
|
||||
@ -542,9 +540,11 @@ public class SamlAuthenticationIT extends ESRestTestCase {
|
||||
assertThat(((List<?>) value), contains(expectedElement));
|
||||
}
|
||||
|
||||
private static BasicHeader kibanaAuth() {
|
||||
final String auth = UsernamePasswordToken.basicAuthHeaderValue("kibana", new SecureString(KIBANA_PASSWORD.toCharArray()));
|
||||
return new BasicHeader(UsernamePasswordToken.BASIC_AUTH_HEADER, auth);
|
||||
private static void kibanaAuth(Request request) {
|
||||
RequestOptions.Builder options = request.getOptions().toBuilder();
|
||||
options.addHeader("Authorization",
|
||||
UsernamePasswordToken.basicAuthHeaderValue("kibana", new SecureString(KIBANA_PASSWORD.toCharArray())));
|
||||
request.setOptions(options);
|
||||
}
|
||||
|
||||
private CloseableHttpClient getHttpClient() throws Exception {
|
||||
|
@ -5,8 +5,9 @@
|
||||
*/
|
||||
package org.elasticsearch.xpack.security.authc.esnative.tool;
|
||||
|
||||
import org.apache.http.message.BasicHeader;
|
||||
import org.elasticsearch.cli.MockTerminal;
|
||||
import org.elasticsearch.client.Request;
|
||||
import org.elasticsearch.client.RequestOptions;
|
||||
import org.elasticsearch.client.Response;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
@ -52,7 +53,7 @@ public class SetupPasswordToolIT extends ESRestTestCase {
|
||||
final Path configPath = PathUtils.get(testConfigDir);
|
||||
setSystemPropsForTool(configPath);
|
||||
|
||||
Response nodesResponse = client().performRequest("GET", "/_nodes/http");
|
||||
Response nodesResponse = client().performRequest(new Request("GET", "/_nodes/http"));
|
||||
Map<String, Object> nodesMap = entityAsMap(nodesResponse);
|
||||
|
||||
Map<String,Object> nodes = (Map<String,Object>) nodesMap.get("nodes");
|
||||
@ -102,10 +103,11 @@ public class SetupPasswordToolIT extends ESRestTestCase {
|
||||
final String basicHeader = "Basic " +
|
||||
Base64.getEncoder().encodeToString((entry.getKey() + ":" + entry.getValue()).getBytes(StandardCharsets.UTF_8));
|
||||
try {
|
||||
Response authenticateResponse = client().performRequest("GET", "/_xpack/security/_authenticate",
|
||||
new BasicHeader("Authorization", basicHeader));
|
||||
assertEquals(200, authenticateResponse.getStatusLine().getStatusCode());
|
||||
Map<String, Object> userInfoMap = entityAsMap(authenticateResponse);
|
||||
Request request = new Request("GET", "/_xpack/security/_authenticate");
|
||||
RequestOptions.Builder options = request.getOptions().toBuilder();
|
||||
options.addHeader("Authorization", basicHeader);
|
||||
request.setOptions(options);
|
||||
Map<String, Object> userInfoMap = entityAsMap(client().performRequest(request));
|
||||
assertEquals(entry.getKey(), userInfoMap.get("username"));
|
||||
} catch (IOException e) {
|
||||
throw new UncheckedIOException(e);
|
||||
|
Loading…
x
Reference in New Issue
Block a user