Merge branch 'master' into pr/15724-gce-network-host-master
This commit is contained in:
commit
e9339a1960
|
@ -166,6 +166,7 @@ subprojects {
|
|||
"org.elasticsearch.client:rest:${version}": ':client:rest',
|
||||
"org.elasticsearch.client:sniffer:${version}": ':client:sniffer',
|
||||
"org.elasticsearch.client:test:${version}": ':client:test',
|
||||
"org.elasticsearch.client:transport:${version}": ':client:transport',
|
||||
"org.elasticsearch.test:framework:${version}": ':test:framework',
|
||||
"org.elasticsearch.distribution.integ-test-zip:elasticsearch:${version}": ':distribution:integ-test-zip',
|
||||
"org.elasticsearch.distribution.zip:elasticsearch:${version}": ':distribution:zip',
|
||||
|
|
|
@ -0,0 +1,170 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client.sniff;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonFactory;
|
||||
import com.fasterxml.jackson.core.JsonParser;
|
||||
import com.fasterxml.jackson.core.JsonToken;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.http.HttpEntity;
|
||||
import org.apache.http.HttpHost;
|
||||
import org.elasticsearch.client.Response;
|
||||
import org.elasticsearch.client.RestClient;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.net.URI;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
/**
|
||||
* Class responsible for sniffing the http hosts from elasticsearch through the nodes info api and returning them back.
|
||||
* Compatible with elasticsearch 5.x and 2.x.
|
||||
*/
|
||||
public final class ElasticsearchHostsSniffer implements HostsSniffer {
|
||||
|
||||
private static final Log logger = LogFactory.getLog(ElasticsearchHostsSniffer.class);
|
||||
|
||||
public static final long DEFAULT_SNIFF_REQUEST_TIMEOUT = TimeUnit.SECONDS.toMillis(1);
|
||||
|
||||
private final RestClient restClient;
|
||||
private final Map<String, String> sniffRequestParams;
|
||||
private final Scheme scheme;
|
||||
private final JsonFactory jsonFactory = new JsonFactory();
|
||||
|
||||
/**
|
||||
* Creates a new instance of the Elasticsearch sniffer. It will use the provided {@link RestClient} to fetch the hosts,
|
||||
* through the nodes info api, the default sniff request timeout value {@link #DEFAULT_SNIFF_REQUEST_TIMEOUT} and http
|
||||
* as the scheme for all the hosts.
|
||||
* @param restClient client used to fetch the hosts from elasticsearch through nodes info api. Usually the same instance
|
||||
* that is also provided to {@link Sniffer#builder(RestClient)}, so that the hosts are set to the same
|
||||
* client that was used to fetch them.
|
||||
*/
|
||||
public ElasticsearchHostsSniffer(RestClient restClient) {
|
||||
this(restClient, DEFAULT_SNIFF_REQUEST_TIMEOUT, ElasticsearchHostsSniffer.Scheme.HTTP);
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new instance of the Elasticsearch sniffer. It will use the provided {@link RestClient} to fetch the hosts
|
||||
* through the nodes info api, the provided sniff request timeout value and scheme.
|
||||
* @param restClient client used to fetch the hosts from elasticsearch through nodes info api. Usually the same instance
|
||||
* that is also provided to {@link Sniffer#builder(RestClient)}, so that the hosts are set to the same
|
||||
* client that was used to sniff them.
|
||||
* @param sniffRequestTimeoutMillis the sniff request timeout (in milliseconds) to be passed in as a query string parameter
|
||||
* to elasticsearch. Allows to halt the request without any failure, as only the nodes
|
||||
* that have responded within this timeout will be returned.
|
||||
* @param scheme the scheme to associate sniffed nodes with (as it is not returned by elasticsearch)
|
||||
*/
|
||||
public ElasticsearchHostsSniffer(RestClient restClient, long sniffRequestTimeoutMillis, Scheme scheme) {
|
||||
this.restClient = Objects.requireNonNull(restClient, "restClient cannot be null");
|
||||
if (sniffRequestTimeoutMillis < 0) {
|
||||
throw new IllegalArgumentException("sniffRequestTimeoutMillis must be greater than 0");
|
||||
}
|
||||
this.sniffRequestParams = Collections.<String, String>singletonMap("timeout", sniffRequestTimeoutMillis + "ms");
|
||||
this.scheme = Objects.requireNonNull(scheme, "scheme cannot be null");
|
||||
}
|
||||
|
||||
/**
|
||||
* Calls the elasticsearch nodes info api, parses the response and returns all the found http hosts
|
||||
*/
|
||||
public List<HttpHost> sniffHosts() throws IOException {
|
||||
Response response = restClient.performRequest("get", "/_nodes/http", sniffRequestParams);
|
||||
return readHosts(response.getEntity());
|
||||
}
|
||||
|
||||
private List<HttpHost> readHosts(HttpEntity entity) throws IOException {
|
||||
try (InputStream inputStream = entity.getContent()) {
|
||||
JsonParser parser = jsonFactory.createParser(inputStream);
|
||||
if (parser.nextToken() != JsonToken.START_OBJECT) {
|
||||
throw new IOException("expected data to start with an object");
|
||||
}
|
||||
List<HttpHost> hosts = new ArrayList<>();
|
||||
while (parser.nextToken() != JsonToken.END_OBJECT) {
|
||||
if (parser.getCurrentToken() == JsonToken.START_OBJECT) {
|
||||
if ("nodes".equals(parser.getCurrentName())) {
|
||||
while (parser.nextToken() != JsonToken.END_OBJECT) {
|
||||
JsonToken token = parser.nextToken();
|
||||
assert token == JsonToken.START_OBJECT;
|
||||
String nodeId = parser.getCurrentName();
|
||||
HttpHost sniffedHost = readHost(nodeId, parser, this.scheme);
|
||||
if (sniffedHost != null) {
|
||||
logger.trace("adding node [" + nodeId + "]");
|
||||
hosts.add(sniffedHost);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
parser.skipChildren();
|
||||
}
|
||||
}
|
||||
}
|
||||
return hosts;
|
||||
}
|
||||
}
|
||||
|
||||
private static HttpHost readHost(String nodeId, JsonParser parser, Scheme scheme) throws IOException {
|
||||
HttpHost httpHost = null;
|
||||
String fieldName = null;
|
||||
while (parser.nextToken() != JsonToken.END_OBJECT) {
|
||||
if (parser.getCurrentToken() == JsonToken.FIELD_NAME) {
|
||||
fieldName = parser.getCurrentName();
|
||||
} else if (parser.getCurrentToken() == JsonToken.START_OBJECT) {
|
||||
if ("http".equals(fieldName)) {
|
||||
while (parser.nextToken() != JsonToken.END_OBJECT) {
|
||||
if (parser.getCurrentToken() == JsonToken.VALUE_STRING && "publish_address".equals(parser.getCurrentName())) {
|
||||
URI boundAddressAsURI = URI.create(scheme + "://" + parser.getValueAsString());
|
||||
httpHost = new HttpHost(boundAddressAsURI.getHost(), boundAddressAsURI.getPort(),
|
||||
boundAddressAsURI.getScheme());
|
||||
} else if (parser.getCurrentToken() == JsonToken.START_OBJECT) {
|
||||
parser.skipChildren();
|
||||
}
|
||||
}
|
||||
} else {
|
||||
parser.skipChildren();
|
||||
}
|
||||
}
|
||||
}
|
||||
//http section is not present if http is not enabled on the node, ignore such nodes
|
||||
if (httpHost == null) {
|
||||
logger.debug("skipping node [" + nodeId + "] with http disabled");
|
||||
return null;
|
||||
}
|
||||
return httpHost;
|
||||
}
|
||||
|
||||
public enum Scheme {
|
||||
HTTP("http"), HTTPS("https");
|
||||
|
||||
private final String name;
|
||||
|
||||
Scheme(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return name;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -19,175 +19,17 @@
|
|||
|
||||
package org.elasticsearch.client.sniff;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonFactory;
|
||||
import com.fasterxml.jackson.core.JsonParser;
|
||||
import com.fasterxml.jackson.core.JsonToken;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.http.HttpEntity;
|
||||
import org.apache.http.HttpHost;
|
||||
import org.elasticsearch.client.Response;
|
||||
import org.elasticsearch.client.RestClient;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.net.URI;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
/**
|
||||
* Class responsible for sniffing the http hosts from elasticsearch through the nodes info api and returning them back.
|
||||
* Compatible with elasticsearch 5.x and 2.x.
|
||||
* Responsible for sniffing the http hosts
|
||||
*/
|
||||
public class HostsSniffer {
|
||||
|
||||
private static final Log logger = LogFactory.getLog(HostsSniffer.class);
|
||||
|
||||
private final RestClient restClient;
|
||||
private final Map<String, String> sniffRequestParams;
|
||||
private final Scheme scheme;
|
||||
private final JsonFactory jsonFactory = new JsonFactory();
|
||||
|
||||
protected HostsSniffer(RestClient restClient, long sniffRequestTimeoutMillis, Scheme scheme) {
|
||||
this.restClient = restClient;
|
||||
this.sniffRequestParams = Collections.<String, String>singletonMap("timeout", sniffRequestTimeoutMillis + "ms");
|
||||
this.scheme = scheme;
|
||||
}
|
||||
|
||||
public interface HostsSniffer {
|
||||
/**
|
||||
* Calls the elasticsearch nodes info api, parses the response and returns all the found http hosts
|
||||
* Returns the sniffed http hosts
|
||||
*/
|
||||
public List<HttpHost> sniffHosts() throws IOException {
|
||||
Response response = restClient.performRequest("get", "/_nodes/http", sniffRequestParams);
|
||||
return readHosts(response.getEntity());
|
||||
}
|
||||
|
||||
private List<HttpHost> readHosts(HttpEntity entity) throws IOException {
|
||||
try (InputStream inputStream = entity.getContent()) {
|
||||
JsonParser parser = jsonFactory.createParser(inputStream);
|
||||
if (parser.nextToken() != JsonToken.START_OBJECT) {
|
||||
throw new IOException("expected data to start with an object");
|
||||
}
|
||||
List<HttpHost> hosts = new ArrayList<>();
|
||||
while (parser.nextToken() != JsonToken.END_OBJECT) {
|
||||
if (parser.getCurrentToken() == JsonToken.START_OBJECT) {
|
||||
if ("nodes".equals(parser.getCurrentName())) {
|
||||
while (parser.nextToken() != JsonToken.END_OBJECT) {
|
||||
JsonToken token = parser.nextToken();
|
||||
assert token == JsonToken.START_OBJECT;
|
||||
String nodeId = parser.getCurrentName();
|
||||
HttpHost sniffedHost = readHost(nodeId, parser, this.scheme);
|
||||
if (sniffedHost != null) {
|
||||
logger.trace("adding node [" + nodeId + "]");
|
||||
hosts.add(sniffedHost);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
parser.skipChildren();
|
||||
}
|
||||
}
|
||||
}
|
||||
return hosts;
|
||||
}
|
||||
}
|
||||
|
||||
private static HttpHost readHost(String nodeId, JsonParser parser, Scheme scheme) throws IOException {
|
||||
HttpHost httpHost = null;
|
||||
String fieldName = null;
|
||||
while (parser.nextToken() != JsonToken.END_OBJECT) {
|
||||
if (parser.getCurrentToken() == JsonToken.FIELD_NAME) {
|
||||
fieldName = parser.getCurrentName();
|
||||
} else if (parser.getCurrentToken() == JsonToken.START_OBJECT) {
|
||||
if ("http".equals(fieldName)) {
|
||||
while (parser.nextToken() != JsonToken.END_OBJECT) {
|
||||
if (parser.getCurrentToken() == JsonToken.VALUE_STRING && "publish_address".equals(parser.getCurrentName())) {
|
||||
URI boundAddressAsURI = URI.create(scheme + "://" + parser.getValueAsString());
|
||||
httpHost = new HttpHost(boundAddressAsURI.getHost(), boundAddressAsURI.getPort(),
|
||||
boundAddressAsURI.getScheme());
|
||||
} else if (parser.getCurrentToken() == JsonToken.START_OBJECT) {
|
||||
parser.skipChildren();
|
||||
}
|
||||
}
|
||||
} else {
|
||||
parser.skipChildren();
|
||||
}
|
||||
}
|
||||
}
|
||||
//http section is not present if http is not enabled on the node, ignore such nodes
|
||||
if (httpHost == null) {
|
||||
logger.debug("skipping node [" + nodeId + "] with http disabled");
|
||||
return null;
|
||||
}
|
||||
return httpHost;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a new {@link Builder} to help with {@link HostsSniffer} creation.
|
||||
*/
|
||||
public static Builder builder(RestClient restClient) {
|
||||
return new Builder(restClient);
|
||||
}
|
||||
|
||||
public enum Scheme {
|
||||
HTTP("http"), HTTPS("https");
|
||||
|
||||
private final String name;
|
||||
|
||||
Scheme(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return name;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* HostsSniffer builder. Helps creating a new {@link HostsSniffer}.
|
||||
*/
|
||||
public static class Builder {
|
||||
public static final long DEFAULT_SNIFF_REQUEST_TIMEOUT = TimeUnit.SECONDS.toMillis(1);
|
||||
|
||||
private final RestClient restClient;
|
||||
private long sniffRequestTimeoutMillis = DEFAULT_SNIFF_REQUEST_TIMEOUT;
|
||||
private Scheme scheme = Scheme.HTTP;
|
||||
|
||||
private Builder(RestClient restClient) {
|
||||
Objects.requireNonNull(restClient, "restClient cannot be null");
|
||||
this.restClient = restClient;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the sniff request timeout (in milliseconds) to be passed in as a query string parameter to elasticsearch.
|
||||
* Allows to halt the request without any failure, as only the nodes that have responded within this timeout will be returned.
|
||||
*/
|
||||
public Builder setSniffRequestTimeoutMillis(int sniffRequestTimeoutMillis) {
|
||||
if (sniffRequestTimeoutMillis <= 0) {
|
||||
throw new IllegalArgumentException("sniffRequestTimeoutMillis must be greater than 0");
|
||||
}
|
||||
this.sniffRequestTimeoutMillis = sniffRequestTimeoutMillis;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the scheme to associate sniffed nodes with (as it is not returned by elasticsearch)
|
||||
*/
|
||||
public Builder setScheme(Scheme scheme) {
|
||||
Objects.requireNonNull(scheme, "scheme cannot be null");
|
||||
this.scheme = scheme;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a new {@link HostsSniffer} instance given the provided configuration
|
||||
*/
|
||||
public HostsSniffer build() {
|
||||
return new HostsSniffer(restClient, sniffRequestTimeoutMillis, scheme);
|
||||
}
|
||||
}
|
||||
List<HttpHost> sniffHosts() throws IOException;
|
||||
}
|
||||
|
|
|
@ -28,7 +28,6 @@ import org.elasticsearch.client.RestClientBuilder;
|
|||
import java.io.Closeable;
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.concurrent.Executors;
|
||||
import java.util.concurrent.ScheduledExecutorService;
|
||||
import java.util.concurrent.ScheduledFuture;
|
||||
|
@ -36,12 +35,12 @@ import java.util.concurrent.TimeUnit;
|
|||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
|
||||
/**
|
||||
* Class responsible for sniffing nodes from an elasticsearch cluster and setting them to a provided instance of {@link RestClient}.
|
||||
* Must be created via {@link Builder}, which allows to set all of the different options or rely on defaults.
|
||||
* Class responsible for sniffing nodes from some source (default is elasticsearch itself) and setting them to a provided instance of
|
||||
* {@link RestClient}. Must be created via {@link SnifferBuilder}, which allows to set all of the different options or rely on defaults.
|
||||
* A background task fetches the nodes through the {@link HostsSniffer} and sets them to the {@link RestClient} instance.
|
||||
* It is possible to perform sniffing on failure by creating a {@link SniffOnFailureListener} and providing it as an argument to
|
||||
* {@link RestClientBuilder#setFailureListener(RestClient.FailureListener)}. The Sniffer implementation
|
||||
* needs to be lazily set to the previously created SniffOnFailureListener through {@link SniffOnFailureListener#setSniffer(Sniffer)}.
|
||||
* {@link RestClientBuilder#setFailureListener(RestClient.FailureListener)}. The Sniffer implementation needs to be lazily set to the
|
||||
* previously created SniffOnFailureListener through {@link SniffOnFailureListener#setSniffer(Sniffer)}.
|
||||
*/
|
||||
public final class Sniffer implements Closeable {
|
||||
|
||||
|
@ -49,7 +48,7 @@ public final class Sniffer implements Closeable {
|
|||
|
||||
private final Task task;
|
||||
|
||||
private Sniffer(RestClient restClient, HostsSniffer hostsSniffer, long sniffInterval, long sniffAfterFailureDelay) {
|
||||
Sniffer(RestClient restClient, HostsSniffer hostsSniffer, long sniffInterval, long sniffAfterFailureDelay) {
|
||||
this.task = new Task(hostsSniffer, restClient, sniffInterval, sniffAfterFailureDelay);
|
||||
}
|
||||
|
||||
|
@ -144,64 +143,12 @@ public final class Sniffer implements Closeable {
|
|||
}
|
||||
|
||||
/**
|
||||
* Returns a new {@link Builder} to help with {@link Sniffer} creation.
|
||||
* Returns a new {@link SnifferBuilder} to help with {@link Sniffer} creation.
|
||||
*
|
||||
* @param restClient the client that gets its hosts set (via {@link RestClient#setHosts(HttpHost...)}) once they are fetched
|
||||
* @return a new instance of {@link SnifferBuilder}
|
||||
*/
|
||||
public static Builder builder(RestClient restClient, HostsSniffer hostsSniffer) {
|
||||
return new Builder(restClient, hostsSniffer);
|
||||
}
|
||||
|
||||
/**
|
||||
* Sniffer builder. Helps creating a new {@link Sniffer}.
|
||||
*/
|
||||
public static final class Builder {
|
||||
public static final long DEFAULT_SNIFF_INTERVAL = TimeUnit.MINUTES.toMillis(5);
|
||||
public static final long DEFAULT_SNIFF_AFTER_FAILURE_DELAY = TimeUnit.MINUTES.toMillis(1);
|
||||
|
||||
private final RestClient restClient;
|
||||
private final HostsSniffer hostsSniffer;
|
||||
private long sniffIntervalMillis = DEFAULT_SNIFF_INTERVAL;
|
||||
private long sniffAfterFailureDelayMillis = DEFAULT_SNIFF_AFTER_FAILURE_DELAY;
|
||||
|
||||
/**
|
||||
* Creates a new builder instance by providing the {@link RestClient} that will be used to communicate with elasticsearch,
|
||||
* and the
|
||||
*/
|
||||
private Builder(RestClient restClient, HostsSniffer hostsSniffer) {
|
||||
Objects.requireNonNull(restClient, "restClient cannot be null");
|
||||
this.restClient = restClient;
|
||||
Objects.requireNonNull(hostsSniffer, "hostsSniffer cannot be null");
|
||||
this.hostsSniffer = hostsSniffer;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the interval between consecutive ordinary sniff executions in milliseconds. Will be honoured when
|
||||
* sniffOnFailure is disabled or when there are no failures between consecutive sniff executions.
|
||||
* @throws IllegalArgumentException if sniffIntervalMillis is not greater than 0
|
||||
*/
|
||||
public Builder setSniffIntervalMillis(int sniffIntervalMillis) {
|
||||
if (sniffIntervalMillis <= 0) {
|
||||
throw new IllegalArgumentException("sniffIntervalMillis must be greater than 0");
|
||||
}
|
||||
this.sniffIntervalMillis = sniffIntervalMillis;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the delay of a sniff execution scheduled after a failure (in milliseconds)
|
||||
*/
|
||||
public Builder setSniffAfterFailureDelayMillis(int sniffAfterFailureDelayMillis) {
|
||||
if (sniffAfterFailureDelayMillis <= 0) {
|
||||
throw new IllegalArgumentException("sniffAfterFailureDelayMillis must be greater than 0");
|
||||
}
|
||||
this.sniffAfterFailureDelayMillis = sniffAfterFailureDelayMillis;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates the {@link Sniffer} based on the provided configuration.
|
||||
*/
|
||||
public Sniffer build() {
|
||||
return new Sniffer(restClient, hostsSniffer, sniffIntervalMillis, sniffAfterFailureDelayMillis);
|
||||
}
|
||||
public static SnifferBuilder builder(RestClient restClient) {
|
||||
return new SnifferBuilder(restClient);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,91 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client.sniff;
|
||||
|
||||
import org.elasticsearch.client.RestClient;
|
||||
|
||||
import java.util.Objects;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
/**
|
||||
* Sniffer builder. Helps creating a new {@link Sniffer}.
|
||||
*/
|
||||
public final class SnifferBuilder {
|
||||
public static final long DEFAULT_SNIFF_INTERVAL = TimeUnit.MINUTES.toMillis(5);
|
||||
public static final long DEFAULT_SNIFF_AFTER_FAILURE_DELAY = TimeUnit.MINUTES.toMillis(1);
|
||||
|
||||
private final RestClient restClient;
|
||||
private long sniffIntervalMillis = DEFAULT_SNIFF_INTERVAL;
|
||||
private long sniffAfterFailureDelayMillis = DEFAULT_SNIFF_AFTER_FAILURE_DELAY;
|
||||
private HostsSniffer hostsSniffer;
|
||||
|
||||
/**
|
||||
* Creates a new builder instance by providing the {@link RestClient} that will be used to communicate with elasticsearch
|
||||
*/
|
||||
SnifferBuilder(RestClient restClient) {
|
||||
Objects.requireNonNull(restClient, "restClient cannot be null");
|
||||
this.restClient = restClient;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the interval between consecutive ordinary sniff executions in milliseconds. Will be honoured when
|
||||
* sniffOnFailure is disabled or when there are no failures between consecutive sniff executions.
|
||||
* @throws IllegalArgumentException if sniffIntervalMillis is not greater than 0
|
||||
*/
|
||||
public SnifferBuilder setSniffIntervalMillis(int sniffIntervalMillis) {
|
||||
if (sniffIntervalMillis <= 0) {
|
||||
throw new IllegalArgumentException("sniffIntervalMillis must be greater than 0");
|
||||
}
|
||||
this.sniffIntervalMillis = sniffIntervalMillis;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the delay of a sniff execution scheduled after a failure (in milliseconds)
|
||||
*/
|
||||
public SnifferBuilder setSniffAfterFailureDelayMillis(int sniffAfterFailureDelayMillis) {
|
||||
if (sniffAfterFailureDelayMillis <= 0) {
|
||||
throw new IllegalArgumentException("sniffAfterFailureDelayMillis must be greater than 0");
|
||||
}
|
||||
this.sniffAfterFailureDelayMillis = sniffAfterFailureDelayMillis;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the {@link HostsSniffer} to be used to read hosts. A default instance of {@link ElasticsearchHostsSniffer}
|
||||
* is created when not provided. This method can be used to change the configuration of the {@link ElasticsearchHostsSniffer},
|
||||
* or to provide a different implementation (e.g. in case hosts need to taken from a different source).
|
||||
*/
|
||||
public SnifferBuilder setHostsSniffer(HostsSniffer hostsSniffer) {
|
||||
Objects.requireNonNull(hostsSniffer, "hostsSniffer cannot be null");
|
||||
this.hostsSniffer = hostsSniffer;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates the {@link Sniffer} based on the provided configuration.
|
||||
*/
|
||||
public Sniffer build() {
|
||||
if (hostsSniffer == null) {
|
||||
this.hostsSniffer = new ElasticsearchHostsSniffer(restClient);
|
||||
}
|
||||
return new Sniffer(restClient, hostsSniffer, sniffIntervalMillis, sniffAfterFailureDelayMillis);
|
||||
}
|
||||
}
|
|
@ -60,17 +60,17 @@ import static org.junit.Assert.fail;
|
|||
|
||||
//animal-sniffer doesn't like our usage of com.sun.net.httpserver.* classes
|
||||
@IgnoreJRERequirement
|
||||
public class HostsSnifferTests extends RestClientTestCase {
|
||||
public class ElasticsearchHostsSnifferTests extends RestClientTestCase {
|
||||
|
||||
private int sniffRequestTimeout;
|
||||
private HostsSniffer.Scheme scheme;
|
||||
private ElasticsearchHostsSniffer.Scheme scheme;
|
||||
private SniffResponse sniffResponse;
|
||||
private HttpServer httpServer;
|
||||
|
||||
@Before
|
||||
public void startHttpServer() throws IOException {
|
||||
this.sniffRequestTimeout = RandomInts.randomIntBetween(getRandom(), 1000, 10000);
|
||||
this.scheme = RandomPicks.randomFrom(getRandom(), HostsSniffer.Scheme.values());
|
||||
this.scheme = RandomPicks.randomFrom(getRandom(), ElasticsearchHostsSniffer.Scheme.values());
|
||||
if (rarely()) {
|
||||
this.sniffResponse = SniffResponse.buildFailure();
|
||||
} else {
|
||||
|
@ -85,14 +85,35 @@ public class HostsSnifferTests extends RestClientTestCase {
|
|||
httpServer.stop(0);
|
||||
}
|
||||
|
||||
public void testConstructorValidation() throws IOException {
|
||||
try {
|
||||
new ElasticsearchHostsSniffer(null, 1, ElasticsearchHostsSniffer.Scheme.HTTP);
|
||||
fail("should have failed");
|
||||
} catch(NullPointerException e) {
|
||||
assertEquals("restClient cannot be null", e.getMessage());
|
||||
}
|
||||
HttpHost httpHost = new HttpHost(httpServer.getAddress().getHostString(), httpServer.getAddress().getPort());
|
||||
try (RestClient restClient = RestClient.builder(httpHost).build()) {
|
||||
try {
|
||||
new ElasticsearchHostsSniffer(restClient, 1, null);
|
||||
fail("should have failed");
|
||||
} catch (NullPointerException e) {
|
||||
assertEquals(e.getMessage(), "scheme cannot be null");
|
||||
}
|
||||
try {
|
||||
new ElasticsearchHostsSniffer(restClient, RandomInts.randomIntBetween(getRandom(), Integer.MIN_VALUE, 0),
|
||||
ElasticsearchHostsSniffer.Scheme.HTTP);
|
||||
fail("should have failed");
|
||||
} catch (IllegalArgumentException e) {
|
||||
assertEquals(e.getMessage(), "sniffRequestTimeoutMillis must be greater than 0");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void testSniffNodes() throws IOException {
|
||||
HttpHost httpHost = new HttpHost(httpServer.getAddress().getHostString(), httpServer.getAddress().getPort());
|
||||
try (RestClient restClient = RestClient.builder(httpHost).build()) {
|
||||
HostsSniffer.Builder builder = HostsSniffer.builder(restClient).setSniffRequestTimeoutMillis(sniffRequestTimeout);
|
||||
if (scheme != HostsSniffer.Scheme.HTTP || randomBoolean()) {
|
||||
builder.setScheme(scheme);
|
||||
}
|
||||
HostsSniffer sniffer = builder.build();
|
||||
ElasticsearchHostsSniffer sniffer = new ElasticsearchHostsSniffer(restClient, sniffRequestTimeout, scheme);
|
||||
try {
|
||||
List<HttpHost> sniffedHosts = sniffer.sniffHosts();
|
||||
if (sniffResponse.isFailure) {
|
||||
|
@ -153,7 +174,7 @@ public class HostsSnifferTests extends RestClientTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
private static SniffResponse buildSniffResponse(HostsSniffer.Scheme scheme) throws IOException {
|
||||
private static SniffResponse buildSniffResponse(ElasticsearchHostsSniffer.Scheme scheme) throws IOException {
|
||||
int numNodes = RandomInts.randomIntBetween(getRandom(), 1, 5);
|
||||
List<HttpHost> hosts = new ArrayList<>(numNodes);
|
||||
JsonFactory jsonFactory = new JsonFactory();
|
|
@ -1,73 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.client.sniff;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.generators.RandomInts;
|
||||
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
|
||||
import org.apache.http.HttpHost;
|
||||
import org.elasticsearch.client.RestClient;
|
||||
import org.elasticsearch.client.RestClientTestCase;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
public class HostsSnifferBuilderTests extends RestClientTestCase {
|
||||
|
||||
public void testBuild() throws Exception {
|
||||
try {
|
||||
HostsSniffer.builder(null);
|
||||
fail("should have failed");
|
||||
} catch(NullPointerException e) {
|
||||
assertEquals(e.getMessage(), "restClient cannot be null");
|
||||
}
|
||||
|
||||
int numNodes = RandomInts.randomIntBetween(getRandom(), 1, 5);
|
||||
HttpHost[] hosts = new HttpHost[numNodes];
|
||||
for (int i = 0; i < numNodes; i++) {
|
||||
hosts[i] = new HttpHost("localhost", 9200 + i);
|
||||
}
|
||||
|
||||
try (RestClient client = RestClient.builder(hosts).build()) {
|
||||
try {
|
||||
HostsSniffer.builder(client).setScheme(null);
|
||||
fail("should have failed");
|
||||
} catch(NullPointerException e) {
|
||||
assertEquals(e.getMessage(), "scheme cannot be null");
|
||||
}
|
||||
|
||||
try {
|
||||
HostsSniffer.builder(client).setSniffRequestTimeoutMillis(RandomInts.randomIntBetween(getRandom(), Integer.MIN_VALUE, 0));
|
||||
fail("should have failed");
|
||||
} catch(IllegalArgumentException e) {
|
||||
assertEquals(e.getMessage(), "sniffRequestTimeoutMillis must be greater than 0");
|
||||
}
|
||||
|
||||
HostsSniffer.Builder builder = HostsSniffer.builder(client);
|
||||
if (getRandom().nextBoolean()) {
|
||||
builder.setScheme(RandomPicks.randomFrom(getRandom(), HostsSniffer.Scheme.values()));
|
||||
}
|
||||
if (getRandom().nextBoolean()) {
|
||||
builder.setSniffRequestTimeoutMillis(RandomInts.randomIntBetween(getRandom(), 1, Integer.MAX_VALUE));
|
||||
}
|
||||
assertNotNull(builder.build());
|
||||
}
|
||||
}
|
||||
}
|
|
@ -22,18 +22,15 @@ package org.elasticsearch.client.sniff;
|
|||
import org.apache.http.HttpHost;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
class MockHostsSniffer extends HostsSniffer {
|
||||
MockHostsSniffer() {
|
||||
super(null, -1, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Mock implementation of {@link HostsSniffer}. Useful to prevent any connection attempt while testing builders etc.
|
||||
*/
|
||||
class MockHostsSniffer implements HostsSniffer {
|
||||
@Override
|
||||
public List<HttpHost> sniffHosts() throws IOException {
|
||||
List<HttpHost> hosts = new ArrayList<>();
|
||||
hosts.add(new HttpHost("localhost", 9200));
|
||||
return hosts;
|
||||
return Collections.singletonList(new HttpHost("localhost", 9200));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -46,7 +46,7 @@ public class SniffOnFailureListenerTests extends RestClientTestCase {
|
|||
}
|
||||
|
||||
try (RestClient restClient = RestClient.builder(new HttpHost("localhost", 9200)).build()) {
|
||||
try (Sniffer sniffer = Sniffer.builder(restClient, new MockHostsSniffer()).build()) {
|
||||
try (Sniffer sniffer = Sniffer.builder(restClient).setHostsSniffer(new MockHostsSniffer()).build()) {
|
||||
listener.setSniffer(sniffer);
|
||||
try {
|
||||
listener.setSniffer(sniffer);
|
||||
|
|
|
@ -37,50 +37,52 @@ public class SnifferBuilderTests extends RestClientTestCase {
|
|||
hosts[i] = new HttpHost("localhost", 9200 + i);
|
||||
}
|
||||
|
||||
HostsSniffer hostsSniffer = new MockHostsSniffer();
|
||||
|
||||
try (RestClient client = RestClient.builder(hosts).build()) {
|
||||
try {
|
||||
Sniffer.builder(null, hostsSniffer).build();
|
||||
Sniffer.builder(null).build();
|
||||
fail("should have failed");
|
||||
} catch(NullPointerException e) {
|
||||
assertEquals("restClient cannot be null", e.getMessage());
|
||||
}
|
||||
|
||||
try {
|
||||
Sniffer.builder(client, null).build();
|
||||
fail("should have failed");
|
||||
} catch(NullPointerException e) {
|
||||
assertEquals("hostsSniffer cannot be null", e.getMessage());
|
||||
}
|
||||
|
||||
try {
|
||||
Sniffer.builder(client, hostsSniffer)
|
||||
.setSniffIntervalMillis(RandomInts.randomIntBetween(getRandom(), Integer.MIN_VALUE, 0));
|
||||
Sniffer.builder(client).setSniffIntervalMillis(RandomInts.randomIntBetween(getRandom(), Integer.MIN_VALUE, 0));
|
||||
fail("should have failed");
|
||||
} catch(IllegalArgumentException e) {
|
||||
assertEquals("sniffIntervalMillis must be greater than 0", e.getMessage());
|
||||
}
|
||||
|
||||
try {
|
||||
Sniffer.builder(client, hostsSniffer)
|
||||
.setSniffAfterFailureDelayMillis(RandomInts.randomIntBetween(getRandom(), Integer.MIN_VALUE, 0));
|
||||
Sniffer.builder(client).setSniffAfterFailureDelayMillis(RandomInts.randomIntBetween(getRandom(), Integer.MIN_VALUE, 0));
|
||||
fail("should have failed");
|
||||
} catch(IllegalArgumentException e) {
|
||||
assertEquals("sniffAfterFailureDelayMillis must be greater than 0", e.getMessage());
|
||||
}
|
||||
|
||||
try (Sniffer sniffer = Sniffer.builder(client, hostsSniffer).build()) {
|
||||
|
||||
try {
|
||||
Sniffer.builder(client).setHostsSniffer(null);
|
||||
fail("should have failed");
|
||||
} catch(NullPointerException e) {
|
||||
assertEquals("hostsSniffer cannot be null", e.getMessage());
|
||||
}
|
||||
|
||||
|
||||
try (Sniffer sniffer = Sniffer.builder(client).build()) {
|
||||
assertNotNull(sniffer);
|
||||
}
|
||||
|
||||
Sniffer.Builder builder = Sniffer.builder(client, hostsSniffer);
|
||||
SnifferBuilder builder = Sniffer.builder(client);
|
||||
if (getRandom().nextBoolean()) {
|
||||
builder.setSniffIntervalMillis(RandomInts.randomIntBetween(getRandom(), 1, Integer.MAX_VALUE));
|
||||
}
|
||||
if (getRandom().nextBoolean()) {
|
||||
builder.setSniffAfterFailureDelayMillis(RandomInts.randomIntBetween(getRandom(), 1, Integer.MAX_VALUE));
|
||||
}
|
||||
if (getRandom().nextBoolean()) {
|
||||
builder.setHostsSniffer(new MockHostsSniffer());
|
||||
}
|
||||
|
||||
try (Sniffer sniffer = builder.build()) {
|
||||
assertNotNull(sniffer);
|
||||
}
|
||||
|
|
|
@ -25,6 +25,7 @@ import org.elasticsearch.action.support.replication.ReplicationResponse;
|
|||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.xcontent.StatusToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
|
@ -32,29 +33,83 @@ import org.elasticsearch.index.shard.ShardId;
|
|||
import org.elasticsearch.rest.RestStatus;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
|
||||
/**
|
||||
* A base class for the response of a write operation that involves a single doc
|
||||
*/
|
||||
public abstract class DocWriteResponse extends ReplicationResponse implements WriteResponse, StatusToXContent {
|
||||
|
||||
public enum Operation implements Writeable {
|
||||
CREATE(0),
|
||||
INDEX(1),
|
||||
DELETE(2),
|
||||
NOOP(3);
|
||||
|
||||
private final byte op;
|
||||
private final String lowercase;
|
||||
|
||||
Operation(int op) {
|
||||
this.op = (byte) op;
|
||||
this.lowercase = this.toString().toLowerCase(Locale.ENGLISH);
|
||||
}
|
||||
|
||||
public byte getOp() {
|
||||
return op;
|
||||
}
|
||||
|
||||
public String getLowercase() {
|
||||
return lowercase;
|
||||
}
|
||||
|
||||
public static Operation readFrom(StreamInput in) throws IOException{
|
||||
Byte opcode = in.readByte();
|
||||
switch(opcode){
|
||||
case 0:
|
||||
return CREATE;
|
||||
case 1:
|
||||
return INDEX;
|
||||
case 2:
|
||||
return DELETE;
|
||||
case 3:
|
||||
return NOOP;
|
||||
default:
|
||||
throw new IllegalArgumentException("Unknown operation code: " + opcode);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeByte(op);
|
||||
}
|
||||
}
|
||||
|
||||
private ShardId shardId;
|
||||
private String id;
|
||||
private String type;
|
||||
private long version;
|
||||
private boolean forcedRefresh;
|
||||
protected Operation operation;
|
||||
|
||||
public DocWriteResponse(ShardId shardId, String type, String id, long version) {
|
||||
public DocWriteResponse(ShardId shardId, String type, String id, long version, Operation operation) {
|
||||
this.shardId = shardId;
|
||||
this.type = type;
|
||||
this.id = id;
|
||||
this.version = version;
|
||||
this.operation = operation;
|
||||
}
|
||||
|
||||
// needed for deserialization
|
||||
protected DocWriteResponse() {
|
||||
}
|
||||
|
||||
/**
|
||||
* The change that occurred to the document.
|
||||
*/
|
||||
public Operation getOperation() {
|
||||
return operation;
|
||||
}
|
||||
|
||||
/**
|
||||
* The index the document was changed in.
|
||||
*/
|
||||
|
@ -143,6 +198,7 @@ public abstract class DocWriteResponse extends ReplicationResponse implements Wr
|
|||
id = in.readString();
|
||||
version = in.readZLong();
|
||||
forcedRefresh = in.readBoolean();
|
||||
operation = Operation.readFrom(in);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -153,22 +209,17 @@ public abstract class DocWriteResponse extends ReplicationResponse implements Wr
|
|||
out.writeString(id);
|
||||
out.writeZLong(version);
|
||||
out.writeBoolean(forcedRefresh);
|
||||
}
|
||||
|
||||
static final class Fields {
|
||||
static final String _INDEX = "_index";
|
||||
static final String _TYPE = "_type";
|
||||
static final String _ID = "_id";
|
||||
static final String _VERSION = "_version";
|
||||
operation.writeTo(out);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
ReplicationResponse.ShardInfo shardInfo = getShardInfo();
|
||||
builder.field(Fields._INDEX, shardId.getIndexName())
|
||||
.field(Fields._TYPE, type)
|
||||
.field(Fields._ID, id)
|
||||
.field(Fields._VERSION, version)
|
||||
builder.field("_index", shardId.getIndexName())
|
||||
.field("_type", type)
|
||||
.field("_id", id)
|
||||
.field("_version", version)
|
||||
.field("_operation", getOperation().getLowercase())
|
||||
.field("forced_refresh", forcedRefresh);
|
||||
shardInfo.toXContent(builder, params);
|
||||
return builder;
|
||||
|
|
|
@ -248,7 +248,7 @@ public class TransportShardBulkAction extends TransportWriteAction<BulkShardRequ
|
|||
BytesReference indexSourceAsBytes = indexRequest.source();
|
||||
// add the response
|
||||
IndexResponse indexResponse = result.getResponse();
|
||||
UpdateResponse updateResponse = new UpdateResponse(indexResponse.getShardInfo(), indexResponse.getShardId(), indexResponse.getType(), indexResponse.getId(), indexResponse.getVersion(), indexResponse.isCreated());
|
||||
UpdateResponse updateResponse = new UpdateResponse(indexResponse.getShardInfo(), indexResponse.getShardId(), indexResponse.getType(), indexResponse.getId(), indexResponse.getVersion(), indexResponse.getOperation());
|
||||
if (updateRequest.fields() != null && updateRequest.fields().length > 0) {
|
||||
Tuple<XContentType, Map<String, Object>> sourceAndContent = XContentHelper.convertToMap(indexSourceAsBytes, true);
|
||||
updateResponse.setGetResult(updateHelper.extractGetResult(updateRequest, request.index(), indexResponse.getVersion(), sourceAndContent.v2(), sourceAndContent.v1(), indexSourceAsBytes));
|
||||
|
@ -261,7 +261,7 @@ public class TransportShardBulkAction extends TransportWriteAction<BulkShardRequ
|
|||
WriteResult<DeleteResponse> writeResult = updateResult.writeResult;
|
||||
DeleteResponse response = writeResult.getResponse();
|
||||
DeleteRequest deleteRequest = updateResult.request();
|
||||
updateResponse = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(), response.getId(), response.getVersion(), false);
|
||||
updateResponse = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(), response.getId(), response.getVersion(), response.getOperation());
|
||||
updateResponse.setGetResult(updateHelper.extractGetResult(updateRequest, request.index(), response.getVersion(), updateResult.result.updatedSourceAsMap(), updateResult.result.updateSourceContentType(), null));
|
||||
// Replace the update request to the translated delete request to execute on the replica.
|
||||
item = request.items()[requestIndex] = new BulkItemRequest(request.items()[requestIndex].id(), deleteRequest);
|
||||
|
|
|
@ -20,8 +20,6 @@
|
|||
package org.elasticsearch.action.delete;
|
||||
|
||||
import org.elasticsearch.action.DocWriteResponse;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
|
@ -36,52 +34,29 @@ import java.io.IOException;
|
|||
*/
|
||||
public class DeleteResponse extends DocWriteResponse {
|
||||
|
||||
private boolean found;
|
||||
|
||||
public DeleteResponse() {
|
||||
|
||||
}
|
||||
|
||||
public DeleteResponse(ShardId shardId, String type, String id, long version, boolean found) {
|
||||
super(shardId, type, id, version);
|
||||
this.found = found;
|
||||
super(shardId, type, id, version, found ? Operation.DELETE : Operation.NOOP);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Returns <tt>true</tt> if a doc was found to delete.
|
||||
*/
|
||||
public boolean isFound() {
|
||||
return found;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
found = in.readBoolean();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeBoolean(found);
|
||||
return operation == Operation.DELETE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public RestStatus status() {
|
||||
if (found == false) {
|
||||
return RestStatus.NOT_FOUND;
|
||||
}
|
||||
return super.status();
|
||||
}
|
||||
|
||||
static final class Fields {
|
||||
static final String FOUND = "found";
|
||||
return isFound() ? super.status() : RestStatus.NOT_FOUND;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.field(Fields.FOUND, isFound());
|
||||
builder.field("found", isFound());
|
||||
super.toXContent(builder, params);
|
||||
return builder;
|
||||
}
|
||||
|
@ -94,7 +69,7 @@ public class DeleteResponse extends DocWriteResponse {
|
|||
builder.append(",type=").append(getType());
|
||||
builder.append(",id=").append(getId());
|
||||
builder.append(",version=").append(getVersion());
|
||||
builder.append(",found=").append(found);
|
||||
builder.append(",operation=").append(getOperation().getLowercase());
|
||||
builder.append(",shards=").append(getShardInfo());
|
||||
return builder.append("]").toString();
|
||||
}
|
||||
|
|
|
@ -36,42 +36,24 @@ import java.io.IOException;
|
|||
*/
|
||||
public class IndexResponse extends DocWriteResponse {
|
||||
|
||||
private boolean created;
|
||||
|
||||
public IndexResponse() {
|
||||
|
||||
}
|
||||
|
||||
public IndexResponse(ShardId shardId, String type, String id, long version, boolean created) {
|
||||
super(shardId, type, id, version);
|
||||
this.created = created;
|
||||
super(shardId, type, id, version, created ? Operation.CREATE : Operation.INDEX);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if the document was created, false if updated.
|
||||
*/
|
||||
public boolean isCreated() {
|
||||
return this.created;
|
||||
return this.operation == Operation.CREATE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public RestStatus status() {
|
||||
if (created) {
|
||||
return RestStatus.CREATED;
|
||||
}
|
||||
return super.status();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
created = in.readBoolean();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeBoolean(created);
|
||||
return isCreated() ? RestStatus.CREATED : super.status();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -82,19 +64,15 @@ public class IndexResponse extends DocWriteResponse {
|
|||
builder.append(",type=").append(getType());
|
||||
builder.append(",id=").append(getId());
|
||||
builder.append(",version=").append(getVersion());
|
||||
builder.append(",created=").append(created);
|
||||
builder.append(",operation=").append(getOperation().getLowercase());
|
||||
builder.append(",shards=").append(getShardInfo());
|
||||
return builder.append("]").toString();
|
||||
}
|
||||
|
||||
static final class Fields {
|
||||
static final String CREATED = "created";
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
super.toXContent(builder, params);
|
||||
builder.field(Fields.CREATED, isCreated());
|
||||
builder.field("created", isCreated());
|
||||
return builder;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,26 +21,25 @@ package org.elasticsearch.action.ingest;
|
|||
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.support.master.MasterNodeReadRequest;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Objects;
|
||||
|
||||
import static org.elasticsearch.action.ValidateActions.addValidationError;
|
||||
|
||||
public class GetPipelineRequest extends MasterNodeReadRequest<GetPipelineRequest> {
|
||||
|
||||
private String[] ids;
|
||||
|
||||
public GetPipelineRequest(String... ids) {
|
||||
if (ids == null || ids.length == 0) {
|
||||
throw new IllegalArgumentException("No ids specified");
|
||||
if (ids == null) {
|
||||
throw new IllegalArgumentException("ids cannot be null");
|
||||
}
|
||||
this.ids = ids;
|
||||
}
|
||||
|
||||
GetPipelineRequest() {
|
||||
this.ids = Strings.EMPTY_ARRAY;
|
||||
}
|
||||
|
||||
public String[] getIds() {
|
||||
|
|
|
@ -41,15 +41,14 @@ final class WriteableIngestDocument implements Writeable, ToXContent {
|
|||
|
||||
WriteableIngestDocument(StreamInput in) throws IOException {
|
||||
Map<String, Object> sourceAndMetadata = in.readMap();
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<String, String> ingestMetadata = (Map<String, String>) in.readGenericValue();
|
||||
Map<String, Object> ingestMetadata = in.readMap();
|
||||
this.ingestDocument = new IngestDocument(sourceAndMetadata, ingestMetadata);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeMap(ingestDocument.getSourceAndMetadata());
|
||||
out.writeGenericValue(ingestDocument.getIngestMetadata());
|
||||
out.writeMap(ingestDocument.getIngestMetadata());
|
||||
}
|
||||
|
||||
IngestDocument getIngestDocument() {
|
||||
|
@ -66,11 +65,7 @@ final class WriteableIngestDocument implements Writeable, ToXContent {
|
|||
}
|
||||
}
|
||||
builder.field("_source", ingestDocument.getSourceAndMetadata());
|
||||
builder.startObject("_ingest");
|
||||
for (Map.Entry<String, String> ingestMetadata : ingestDocument.getIngestMetadata().entrySet()) {
|
||||
builder.field(ingestMetadata.getKey(), ingestMetadata.getValue());
|
||||
}
|
||||
builder.endObject();
|
||||
builder.field("_ingest", ingestDocument.getIngestMetadata());
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
|
|
@ -22,6 +22,7 @@ package org.elasticsearch.action.update;
|
|||
import org.elasticsearch.ExceptionsHelper;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.ActionRunnable;
|
||||
import org.elasticsearch.action.DocWriteResponse;
|
||||
import org.elasticsearch.action.RoutingMissingException;
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;
|
||||
|
@ -185,7 +186,7 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio
|
|||
indexAction.execute(upsertRequest, new ActionListener<IndexResponse>() {
|
||||
@Override
|
||||
public void onResponse(IndexResponse response) {
|
||||
UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(), response.getId(), response.getVersion(), response.isCreated());
|
||||
UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(), response.getId(), response.getVersion(), response.getOperation());
|
||||
if (request.fields() != null && request.fields().length > 0) {
|
||||
Tuple<XContentType, Map<String, Object>> sourceAndContent = XContentHelper.convertToMap(upsertSourceBytes, true);
|
||||
update.setGetResult(updateHelper.extractGetResult(request, request.concreteIndex(), response.getVersion(), sourceAndContent.v2(), sourceAndContent.v1(), upsertSourceBytes));
|
||||
|
@ -223,7 +224,7 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio
|
|||
indexAction.execute(indexRequest, new ActionListener<IndexResponse>() {
|
||||
@Override
|
||||
public void onResponse(IndexResponse response) {
|
||||
UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(), response.getId(), response.getVersion(), response.isCreated());
|
||||
UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(), response.getId(), response.getVersion(), response.getOperation());
|
||||
update.setGetResult(updateHelper.extractGetResult(request, request.concreteIndex(), response.getVersion(), result.updatedSourceAsMap(), result.updateSourceContentType(), indexSourceBytes));
|
||||
update.setForcedRefresh(response.forcedRefresh());
|
||||
listener.onResponse(update);
|
||||
|
@ -252,7 +253,7 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio
|
|||
deleteAction.execute(deleteRequest, new ActionListener<DeleteResponse>() {
|
||||
@Override
|
||||
public void onResponse(DeleteResponse response) {
|
||||
UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(), response.getId(), response.getVersion(), false);
|
||||
UpdateResponse update = new UpdateResponse(response.getShardInfo(), response.getShardId(), response.getType(), response.getId(), response.getVersion(), response.getOperation());
|
||||
update.setGetResult(updateHelper.extractGetResult(request, request.concreteIndex(), response.getVersion(), result.updatedSourceAsMap(), result.updateSourceContentType(), null));
|
||||
update.setForcedRefresh(response.forcedRefresh());
|
||||
listener.onResponse(update);
|
||||
|
|
|
@ -116,7 +116,7 @@ public class UpdateHelper extends AbstractComponent {
|
|||
request.script.getScript());
|
||||
}
|
||||
UpdateResponse update = new UpdateResponse(shardId, getResult.getType(), getResult.getId(),
|
||||
getResult.getVersion(), false);
|
||||
getResult.getVersion(), UpdateResponse.convert(Operation.NONE));
|
||||
update.setGetResult(getResult);
|
||||
return new Result(update, Operation.NONE, upsertDoc, XContentType.JSON);
|
||||
}
|
||||
|
@ -234,12 +234,12 @@ public class UpdateHelper extends AbstractComponent {
|
|||
.setRefreshPolicy(request.getRefreshPolicy());
|
||||
return new Result(deleteRequest, Operation.DELETE, updatedSourceAsMap, updateSourceContentType);
|
||||
} else if ("none".equals(operation)) {
|
||||
UpdateResponse update = new UpdateResponse(shardId, getResult.getType(), getResult.getId(), getResult.getVersion(), false);
|
||||
UpdateResponse update = new UpdateResponse(shardId, getResult.getType(), getResult.getId(), getResult.getVersion(), UpdateResponse.convert(Operation.NONE));
|
||||
update.setGetResult(extractGetResult(request, request.index(), getResult.getVersion(), updatedSourceAsMap, updateSourceContentType, getResult.internalSourceRef()));
|
||||
return new Result(update, Operation.NONE, updatedSourceAsMap, updateSourceContentType);
|
||||
} else {
|
||||
logger.warn("Used update operation [{}] for script [{}], doing nothing...", operation, request.script.getScript());
|
||||
UpdateResponse update = new UpdateResponse(shardId, getResult.getType(), getResult.getId(), getResult.getVersion(), false);
|
||||
UpdateResponse update = new UpdateResponse(shardId, getResult.getType(), getResult.getId(), getResult.getVersion(), UpdateResponse.convert(Operation.NONE));
|
||||
return new Result(update, Operation.NONE, updatedSourceAsMap, updateSourceContentType);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -29,11 +29,8 @@ import org.elasticsearch.rest.RestStatus;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
*/
|
||||
public class UpdateResponse extends DocWriteResponse {
|
||||
|
||||
private boolean created;
|
||||
private GetResult getResult;
|
||||
|
||||
public UpdateResponse() {
|
||||
|
@ -43,14 +40,28 @@ public class UpdateResponse extends DocWriteResponse {
|
|||
* Constructor to be used when a update didn't translate in a write.
|
||||
* For example: update script with operation set to none
|
||||
*/
|
||||
public UpdateResponse(ShardId shardId, String type, String id, long version, boolean created) {
|
||||
this(new ShardInfo(0, 0), shardId, type, id, version, created);
|
||||
public UpdateResponse(ShardId shardId, String type, String id, long version, Operation operation) {
|
||||
this(new ShardInfo(0, 0), shardId, type, id, version, operation);
|
||||
}
|
||||
|
||||
public UpdateResponse(ShardInfo shardInfo, ShardId shardId, String type, String id, long version, boolean created) {
|
||||
super(shardId, type, id, version);
|
||||
public UpdateResponse(ShardInfo shardInfo, ShardId shardId, String type, String id,
|
||||
long version, Operation operation) {
|
||||
super(shardId, type, id, version, operation);
|
||||
setShardInfo(shardInfo);
|
||||
this.created = created;
|
||||
}
|
||||
|
||||
public static Operation convert(UpdateHelper.Operation op) {
|
||||
switch(op) {
|
||||
case UPSERT:
|
||||
return Operation.CREATE;
|
||||
case INDEX:
|
||||
return Operation.INDEX;
|
||||
case DELETE:
|
||||
return Operation.DELETE;
|
||||
case NONE:
|
||||
return Operation.NOOP;
|
||||
}
|
||||
throw new IllegalArgumentException();
|
||||
}
|
||||
|
||||
public void setGetResult(GetResult getResult) {
|
||||
|
@ -65,22 +76,17 @@ public class UpdateResponse extends DocWriteResponse {
|
|||
* Returns true if document was created due to an UPSERT operation
|
||||
*/
|
||||
public boolean isCreated() {
|
||||
return this.created;
|
||||
|
||||
return this.operation == Operation.CREATE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public RestStatus status() {
|
||||
if (created) {
|
||||
return RestStatus.CREATED;
|
||||
}
|
||||
return super.status();
|
||||
return isCreated() ? RestStatus.CREATED : super.status();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void readFrom(StreamInput in) throws IOException {
|
||||
super.readFrom(in);
|
||||
created = in.readBoolean();
|
||||
if (in.readBoolean()) {
|
||||
getResult = GetResult.readGetResult(in);
|
||||
}
|
||||
|
@ -89,7 +95,6 @@ public class UpdateResponse extends DocWriteResponse {
|
|||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
super.writeTo(out);
|
||||
out.writeBoolean(created);
|
||||
if (getResult == null) {
|
||||
out.writeBoolean(false);
|
||||
} else {
|
||||
|
@ -122,7 +127,7 @@ public class UpdateResponse extends DocWriteResponse {
|
|||
builder.append(",type=").append(getType());
|
||||
builder.append(",id=").append(getId());
|
||||
builder.append(",version=").append(getVersion());
|
||||
builder.append(",created=").append(created);
|
||||
builder.append(",operation=").append(getOperation().getLowercase());
|
||||
builder.append(",shards=").append(getShardInfo());
|
||||
return builder.append("]").toString();
|
||||
}
|
||||
|
|
|
@ -202,8 +202,10 @@ public class MetaDataCreateIndexService extends AbstractComponent {
|
|||
if (response.isAcknowledged()) {
|
||||
activeShardsObserver.waitForActiveShards(request.index(), request.waitForActiveShards(), request.ackTimeout(),
|
||||
shardsAcked -> {
|
||||
logger.debug("[{}] index created, but the operation timed out while waiting for " +
|
||||
"enough shards to be started.", request.index());
|
||||
if (shardsAcked == false) {
|
||||
logger.debug("[{}] index created, but the operation timed out while waiting for " +
|
||||
"enough shards to be started.", request.index());
|
||||
}
|
||||
listener.onResponse(new CreateIndexClusterStateUpdateResponse(response.isAcknowledged(), shardsAcked));
|
||||
}, listener::onFailure);
|
||||
} else {
|
||||
|
|
|
@ -58,6 +58,9 @@ public class NetworkExceptionHelper {
|
|||
if (e.getMessage().equals("Socket is closed")) {
|
||||
return true;
|
||||
}
|
||||
if (e.getMessage().equals("Socket closed")) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
|
|
@ -73,9 +73,6 @@ public class QueryParseContext implements ParseFieldMatcherSupplier {
|
|||
}
|
||||
}
|
||||
}
|
||||
if (queryBuilder == null) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "Required query is missing");
|
||||
}
|
||||
return queryBuilder;
|
||||
} catch (ParsingException e) {
|
||||
throw e;
|
||||
|
@ -113,7 +110,7 @@ public class QueryParseContext implements ParseFieldMatcherSupplier {
|
|||
// move to the next START_OBJECT
|
||||
token = parser.nextToken();
|
||||
if (token != XContentParser.Token.START_OBJECT && token != XContentParser.Token.START_ARRAY) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "[_na] query malformed, no field after start_object");
|
||||
throw new ParsingException(parser.getTokenLocation(), "[_na] query malformed, no start_object after query name");
|
||||
}
|
||||
@SuppressWarnings("unchecked")
|
||||
Optional<QueryBuilder> result = (Optional<QueryBuilder>) indicesQueriesRegistry.lookup(queryName, parseFieldMatcher,
|
||||
|
|
|
@ -61,7 +61,7 @@ class Checkpoint {
|
|||
Channels.writeToChannel(buffer, channel);
|
||||
}
|
||||
|
||||
private void write(DataOutput out) throws IOException {
|
||||
void write(DataOutput out) throws IOException {
|
||||
out.writeLong(offset);
|
||||
out.writeInt(numOps);
|
||||
out.writeLong(generation);
|
||||
|
|
|
@ -113,7 +113,9 @@ public class TranslogReader extends BaseTranslogReader implements Closeable {
|
|||
headerStream.read(ref.bytes, ref.offset, ref.length);
|
||||
BytesRef uuidBytes = new BytesRef(translogUUID);
|
||||
if (uuidBytes.bytesEquals(ref) == false) {
|
||||
throw new TranslogCorruptedException("expected shard UUID [" + uuidBytes + "] but got: [" + ref + "] this translog file belongs to a different translog. path:" + path);
|
||||
throw new TranslogCorruptedException("expected shard UUID " + uuidBytes + "/" + uuidBytes.utf8ToString() +
|
||||
" but got: " + ref + "/" + ref.utf8ToString() +
|
||||
" this translog file belongs to a different translog. path:" + path);
|
||||
}
|
||||
return new TranslogReader(checkpoint.generation, channel, path, ref.length + CodecUtil.headerLength(TranslogWriter.TRANSLOG_CODEC) + Integer.BYTES, checkpoint.offset, checkpoint.numOps);
|
||||
default:
|
||||
|
|
|
@ -0,0 +1,56 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.translog;
|
||||
|
||||
import org.elasticsearch.cli.MultiCommand;
|
||||
import org.elasticsearch.cli.Terminal;
|
||||
import org.elasticsearch.common.logging.LogConfigurator;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.translog.TruncateTranslogCommand;
|
||||
import org.elasticsearch.node.internal.InternalSettingsPreparer;
|
||||
|
||||
/**
|
||||
* Class encapsulating and dispatching commands from the {@code elasticsearch-translog} command line tool
|
||||
*/
|
||||
public class TranslogToolCli extends MultiCommand {
|
||||
|
||||
public TranslogToolCli() {
|
||||
super("A CLI tool for various Elasticsearch translog actions");
|
||||
subcommands.put("truncate", new TruncateTranslogCommand());
|
||||
}
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
// initialize default for es.logger.level because we will not read the logging.yml
|
||||
String loggerLevel = System.getProperty("es.logger.level", "INFO");
|
||||
String pathHome = System.getProperty("es.path.home");
|
||||
// Set the appender for all potential log files to terminal so that other components that use the logger print out the
|
||||
// same terminal.
|
||||
Environment loggingEnvironment = InternalSettingsPreparer.prepareEnvironment(Settings.builder()
|
||||
.put("path.home", pathHome)
|
||||
.put("appender.terminal.type", "terminal")
|
||||
.put("rootLogger", "${logger.level}, terminal")
|
||||
.put("logger.level", loggerLevel)
|
||||
.build(), Terminal.DEFAULT);
|
||||
LogConfigurator.configure(loggingEnvironment.settings(), false);
|
||||
|
||||
exit(new TranslogToolCli().main(args, Terminal.DEFAULT));
|
||||
}
|
||||
}
|
|
@ -76,10 +76,16 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable {
|
|||
return getHeaderLength(new BytesRef(translogUUID).length);
|
||||
}
|
||||
|
||||
private static int getHeaderLength(int uuidLength) {
|
||||
static int getHeaderLength(int uuidLength) {
|
||||
return CodecUtil.headerLength(TRANSLOG_CODEC) + uuidLength + Integer.BYTES;
|
||||
}
|
||||
|
||||
static void writeHeader(OutputStreamDataOutput out, BytesRef ref) throws IOException {
|
||||
CodecUtil.writeHeader(out, TRANSLOG_CODEC, VERSION);
|
||||
out.writeInt(ref.length);
|
||||
out.writeBytes(ref.bytes, ref.offset, ref.length);
|
||||
}
|
||||
|
||||
public static TranslogWriter create(ShardId shardId, String translogUUID, long fileGeneration, Path file, ChannelFactory channelFactory, ByteSizeValue bufferSize) throws IOException {
|
||||
final BytesRef ref = new BytesRef(translogUUID);
|
||||
final int headerLength = getHeaderLength(ref.length);
|
||||
|
@ -88,9 +94,7 @@ public class TranslogWriter extends BaseTranslogReader implements Closeable {
|
|||
// This OutputStreamDataOutput is intentionally not closed because
|
||||
// closing it will close the FileChannel
|
||||
final OutputStreamDataOutput out = new OutputStreamDataOutput(java.nio.channels.Channels.newOutputStream(channel));
|
||||
CodecUtil.writeHeader(out, TRANSLOG_CODEC, VERSION);
|
||||
out.writeInt(ref.length);
|
||||
out.writeBytes(ref.bytes, ref.offset, ref.length);
|
||||
writeHeader(out, ref);
|
||||
channel.force(true);
|
||||
writeCheckpoint(channelFactory, headerLength, 0, file.getParent(), fileGeneration);
|
||||
final TranslogWriter writer = new TranslogWriter(channelFactory, shardId, fileGeneration, channel, file, bufferSize);
|
||||
|
|
|
@ -0,0 +1,224 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.translog;
|
||||
|
||||
import joptsimple.OptionParser;
|
||||
import joptsimple.OptionSet;
|
||||
import joptsimple.OptionSpec;
|
||||
import org.apache.lucene.index.DirectoryReader;
|
||||
import org.apache.lucene.index.IndexCommit;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.FSDirectory;
|
||||
import org.apache.lucene.store.Lock;
|
||||
import org.apache.lucene.store.LockObtainFailedException;
|
||||
import org.apache.lucene.store.NativeFSLockFactory;
|
||||
import org.apache.lucene.store.OutputStreamDataOutput;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.cli.SettingCommand;
|
||||
import org.elasticsearch.cli.Terminal;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.common.io.PathUtils;
|
||||
import org.elasticsearch.index.IndexNotFoundException;
|
||||
import org.elasticsearch.index.translog.Checkpoint;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.channels.Channels;
|
||||
import java.nio.channels.FileChannel;
|
||||
import java.nio.file.DirectoryStream;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.StandardCopyOption;
|
||||
import java.nio.file.StandardOpenOption;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
public class TruncateTranslogCommand extends SettingCommand {
|
||||
|
||||
private final OptionSpec<String> translogFolder;
|
||||
private final OptionSpec<Void> batchMode;
|
||||
|
||||
public TruncateTranslogCommand() {
|
||||
super("Truncates a translog to create a new, empty translog");
|
||||
this.translogFolder = parser.acceptsAll(Arrays.asList("d", "dir"),
|
||||
"Translog Directory location on disk")
|
||||
.withRequiredArg()
|
||||
.required();
|
||||
this.batchMode = parser.acceptsAll(Arrays.asList("b", "batch"),
|
||||
"Enable batch mode explicitly, automatic confirmation of warnings");
|
||||
}
|
||||
|
||||
// Visible for testing
|
||||
public OptionParser getParser() {
|
||||
return this.parser;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void printAdditionalHelp(Terminal terminal) {
|
||||
terminal.println("This tool truncates the translog and translog");
|
||||
terminal.println("checkpoint files to create a new translog");
|
||||
}
|
||||
|
||||
@SuppressForbidden(reason = "Necessary to use the path passed in")
|
||||
private Path getTranslogPath(OptionSet options) {
|
||||
return PathUtils.get(translogFolder.value(options), "", "");
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void execute(Terminal terminal, OptionSet options, Map<String, String> settings) throws Exception {
|
||||
boolean batch = options.has(batchMode);
|
||||
|
||||
Path translogPath = getTranslogPath(options);
|
||||
Path idxLocation = translogPath.getParent().resolve("index");
|
||||
|
||||
if (Files.exists(translogPath) == false || Files.isDirectory(translogPath) == false) {
|
||||
throw new ElasticsearchException("translog directory [" + translogPath + "], must exist and be a directory");
|
||||
}
|
||||
|
||||
if (Files.exists(idxLocation) == false || Files.isDirectory(idxLocation) == false) {
|
||||
throw new ElasticsearchException("unable to find a shard at [" + idxLocation + "], which must exist and be a directory");
|
||||
}
|
||||
|
||||
// Hold the lock open for the duration of the tool running
|
||||
try (Directory dir = FSDirectory.open(idxLocation, NativeFSLockFactory.INSTANCE);
|
||||
Lock writeLock = dir.obtainLock(IndexWriter.WRITE_LOCK_NAME)) {
|
||||
Set<Path> translogFiles;
|
||||
try {
|
||||
terminal.println("Checking existing translog files");
|
||||
translogFiles = filesInDirectory(translogPath);
|
||||
} catch (IOException e) {
|
||||
terminal.println("encountered IOException while listing directory, aborting...");
|
||||
throw new ElasticsearchException("failed to find existing translog files", e);
|
||||
}
|
||||
|
||||
// Warn about ES being stopped and files being deleted
|
||||
warnAboutDeletingFiles(terminal, translogFiles, batch);
|
||||
|
||||
List<IndexCommit> commits;
|
||||
try {
|
||||
terminal.println("Reading translog UUID information from Lucene commit from shard at [" + idxLocation + "]");
|
||||
commits = DirectoryReader.listCommits(dir);
|
||||
} catch (IndexNotFoundException infe) {
|
||||
throw new ElasticsearchException("unable to find a valid shard at [" + idxLocation + "]", infe);
|
||||
}
|
||||
|
||||
// Retrieve the generation and UUID from the existing data
|
||||
Map<String, String> commitData = commits.get(commits.size() - 1).getUserData();
|
||||
String translogGeneration = commitData.get(Translog.TRANSLOG_GENERATION_KEY);
|
||||
String translogUUID = commitData.get(Translog.TRANSLOG_UUID_KEY);
|
||||
if (translogGeneration == null || translogUUID == null) {
|
||||
throw new ElasticsearchException("shard must have a valid translog generation and UUID but got: [{}] and: [{}]",
|
||||
translogGeneration, translogUUID);
|
||||
}
|
||||
terminal.println("Translog Generation: " + translogGeneration);
|
||||
terminal.println("Translog UUID : " + translogUUID);
|
||||
|
||||
Path tempEmptyCheckpoint = translogPath.resolve("temp-" + Translog.CHECKPOINT_FILE_NAME);
|
||||
Path realEmptyCheckpoint = translogPath.resolve(Translog.CHECKPOINT_FILE_NAME);
|
||||
Path tempEmptyTranslog = translogPath.resolve("temp-" + Translog.TRANSLOG_FILE_PREFIX +
|
||||
translogGeneration + Translog.TRANSLOG_FILE_SUFFIX);
|
||||
Path realEmptyTranslog = translogPath.resolve(Translog.TRANSLOG_FILE_PREFIX +
|
||||
translogGeneration + Translog.TRANSLOG_FILE_SUFFIX);
|
||||
|
||||
// Write empty checkpoint and translog to empty files
|
||||
long gen = Long.parseLong(translogGeneration);
|
||||
int translogLen = writeEmptyTranslog(tempEmptyTranslog, translogUUID);
|
||||
writeEmptyCheckpoint(tempEmptyCheckpoint, translogLen, gen);
|
||||
|
||||
terminal.println("Removing existing translog files");
|
||||
IOUtils.rm(translogFiles.toArray(new Path[]{}));
|
||||
|
||||
terminal.println("Creating new empty checkpoint at [" + realEmptyCheckpoint + "]");
|
||||
Files.move(tempEmptyCheckpoint, realEmptyCheckpoint, StandardCopyOption.ATOMIC_MOVE);
|
||||
terminal.println("Creating new empty translog at [" + realEmptyTranslog + "]");
|
||||
Files.move(tempEmptyTranslog, realEmptyTranslog, StandardCopyOption.ATOMIC_MOVE);
|
||||
|
||||
// Fsync the translog directory after rename
|
||||
IOUtils.fsync(translogPath, true);
|
||||
|
||||
} catch (LockObtainFailedException lofe) {
|
||||
throw new ElasticsearchException("Failed to lock shard's directory at [" + idxLocation + "], is Elasticsearch still running?");
|
||||
}
|
||||
|
||||
terminal.println("Done.");
|
||||
}
|
||||
|
||||
/** Write a checkpoint file to the given location with the given generation */
|
||||
public static void writeEmptyCheckpoint(Path filename, int translogLength, long translogGeneration) throws IOException {
|
||||
try (FileChannel fc = FileChannel.open(filename, StandardOpenOption.WRITE, StandardOpenOption.READ, StandardOpenOption.CREATE_NEW);
|
||||
OutputStreamDataOutput out = new OutputStreamDataOutput(Channels.newOutputStream(fc))) {
|
||||
Checkpoint emptyCheckpoint = new Checkpoint(translogLength, 0, translogGeneration);
|
||||
emptyCheckpoint.write(out);
|
||||
fc.force(true);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Write a translog containing the given translog UUID to the given location. Returns the number of bytes written.
|
||||
*/
|
||||
public static int writeEmptyTranslog(Path filename, String translogUUID) throws IOException {
|
||||
final BytesRef translogRef = new BytesRef(translogUUID);
|
||||
try (FileChannel fc = FileChannel.open(filename, StandardOpenOption.WRITE, StandardOpenOption.READ, StandardOpenOption.CREATE_NEW);
|
||||
OutputStreamDataOutput out = new OutputStreamDataOutput(Channels.newOutputStream(fc))) {
|
||||
TranslogWriter.writeHeader(out, translogRef);
|
||||
fc.force(true);
|
||||
}
|
||||
return TranslogWriter.getHeaderLength(translogRef.length);
|
||||
}
|
||||
|
||||
/** Show a warning about deleting files, asking for a confirmation if {@code batchMode} is false */
|
||||
public static void warnAboutDeletingFiles(Terminal terminal, Set<Path> files, boolean batchMode) {
|
||||
terminal.println("!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!");
|
||||
terminal.println("! WARNING: Elasticsearch MUST be stopped before running this tool !");
|
||||
terminal.println("! !");
|
||||
terminal.println("! WARNING: Documents inside of translog files will be lost !");
|
||||
terminal.println("! !");
|
||||
terminal.println("! WARNING: The following files will be DELETED! !");
|
||||
terminal.println("!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!");
|
||||
for (Path file : files) {
|
||||
terminal.println("--> " + file);
|
||||
}
|
||||
terminal.println("");
|
||||
if (batchMode == false) {
|
||||
String text = terminal.readText("Continue and DELETE files? [y/N] ");
|
||||
if (!text.equalsIgnoreCase("y")) {
|
||||
throw new ElasticsearchException("aborted by user");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** Return a Set of all files in a given directory */
|
||||
public static Set<Path> filesInDirectory(Path directory) throws IOException {
|
||||
Set<Path> files = new HashSet<>();
|
||||
try (DirectoryStream<Path> stream = Files.newDirectoryStream(directory)) {
|
||||
for (Path file : stream) {
|
||||
files.add(file);
|
||||
}
|
||||
}
|
||||
return files;
|
||||
}
|
||||
|
||||
}
|
|
@ -135,14 +135,14 @@ public class CompoundProcessor implements Processor {
|
|||
List<String> processorTagHeader = cause.getHeader("processor_tag");
|
||||
String failedProcessorType = (processorTypeHeader != null) ? processorTypeHeader.get(0) : null;
|
||||
String failedProcessorTag = (processorTagHeader != null) ? processorTagHeader.get(0) : null;
|
||||
Map<String, String> ingestMetadata = ingestDocument.getIngestMetadata();
|
||||
Map<String, Object> ingestMetadata = ingestDocument.getIngestMetadata();
|
||||
ingestMetadata.put(ON_FAILURE_MESSAGE_FIELD, cause.getRootCause().getMessage());
|
||||
ingestMetadata.put(ON_FAILURE_PROCESSOR_TYPE_FIELD, failedProcessorType);
|
||||
ingestMetadata.put(ON_FAILURE_PROCESSOR_TAG_FIELD, failedProcessorTag);
|
||||
}
|
||||
|
||||
private void removeFailureMetadata(IngestDocument ingestDocument) {
|
||||
Map<String, String> ingestMetadata = ingestDocument.getIngestMetadata();
|
||||
Map<String, Object> ingestMetadata = ingestDocument.getIngestMetadata();
|
||||
ingestMetadata.remove(ON_FAILURE_MESSAGE_FIELD);
|
||||
ingestMetadata.remove(ON_FAILURE_PROCESSOR_TYPE_FIELD);
|
||||
ingestMetadata.remove(ON_FAILURE_PROCESSOR_TAG_FIELD);
|
||||
|
|
|
@ -54,7 +54,7 @@ public final class IngestDocument {
|
|||
static final String TIMESTAMP = "timestamp";
|
||||
|
||||
private final Map<String, Object> sourceAndMetadata;
|
||||
private final Map<String, String> ingestMetadata;
|
||||
private final Map<String, Object> ingestMetadata;
|
||||
|
||||
public IngestDocument(String index, String type, String id, String routing, String parent, String timestamp,
|
||||
String ttl, Map<String, Object> source) {
|
||||
|
@ -94,7 +94,7 @@ public final class IngestDocument {
|
|||
* source and ingest metadata. This is needed because the ingest metadata will be initialized with the current timestamp at
|
||||
* init time, which makes equality comparisons impossible in tests.
|
||||
*/
|
||||
public IngestDocument(Map<String, Object> sourceAndMetadata, Map<String, String> ingestMetadata) {
|
||||
public IngestDocument(Map<String, Object> sourceAndMetadata, Map<String, Object> ingestMetadata) {
|
||||
this.sourceAndMetadata = sourceAndMetadata;
|
||||
this.ingestMetadata = ingestMetadata;
|
||||
}
|
||||
|
@ -517,7 +517,7 @@ public final class IngestDocument {
|
|||
* Returns the available ingest metadata fields, by default only timestamp, but it is possible to set additional ones.
|
||||
* Use only for reading values, modify them instead using {@link #setFieldValue(String, Object)} and {@link #removeField(String)}
|
||||
*/
|
||||
public Map<String, String> getIngestMetadata() {
|
||||
public Map<String, Object> getIngestMetadata() {
|
||||
return this.ingestMetadata;
|
||||
}
|
||||
|
||||
|
|
|
@ -207,6 +207,11 @@ public class PipelineStore extends AbstractComponent implements ClusterStateList
|
|||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
// if we didn't ask for _any_ ID, then we get them all (this is the same as if they ask for '*')
|
||||
if (ids.length == 0) {
|
||||
return new ArrayList<>(ingestMetadata.getPipelines().values());
|
||||
}
|
||||
|
||||
List<PipelineConfiguration> result = new ArrayList<>(ids.length);
|
||||
for (String id : ids) {
|
||||
if (Regex.isSimpleMatchPattern(id)) {
|
||||
|
|
|
@ -35,6 +35,7 @@ public class RestGetPipelineAction extends BaseRestHandler {
|
|||
@Inject
|
||||
public RestGetPipelineAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
controller.registerHandler(RestRequest.Method.GET, "/_ingest/pipeline", this);
|
||||
controller.registerHandler(RestRequest.Method.GET, "/_ingest/pipeline/{id}", this);
|
||||
}
|
||||
|
||||
|
|
|
@ -121,7 +121,7 @@ public class ExceptionSerializationTests extends ESTestCase {
|
|||
final Path startPath = PathUtils.get(ElasticsearchException.class.getProtectionDomain().getCodeSource().getLocation().toURI())
|
||||
.resolve("org").resolve("elasticsearch");
|
||||
final Set<? extends Class<?>> ignore = Sets.newHashSet(
|
||||
org.elasticsearch.test.rest.parser.RestTestParseException.class,
|
||||
org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException.class,
|
||||
CancellableThreadsTests.CustomException.class,
|
||||
org.elasticsearch.rest.BytesRestResponseTests.WithHeadersException.class,
|
||||
AbstractClientHeadersTestCase.InternalException.class);
|
||||
|
|
|
@ -19,12 +19,13 @@
|
|||
|
||||
package org.elasticsearch.action;
|
||||
|
||||
import org.elasticsearch.action.DocWriteResponse.Operation;
|
||||
import org.elasticsearch.index.shard.ShardId;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
public class DocWriteResponseTests extends ESTestCase {
|
||||
public void testGetLocation() {
|
||||
DocWriteResponse response = new DocWriteResponse(new ShardId("index", "uuid", 0), "type", "id", 0) {
|
||||
DocWriteResponse response = new DocWriteResponse(new ShardId("index", "uuid", 0), "type", "id", 0, Operation.CREATE) {
|
||||
// DocWriteResponse is abstract so we have to sneak a subclass in here to test it.
|
||||
};
|
||||
assertEquals("/index/type/id", response.getLocation(null));
|
||||
|
|
|
@ -145,7 +145,7 @@ public class SimulateExecutionServiceTests extends ESTestCase {
|
|||
assertThat(simulateDocumentVerboseResult.getProcessorResults().get(1).getIngestDocument(), not(sameInstance(ingestDocument)));
|
||||
|
||||
IngestDocument ingestDocumentWithOnFailureMetadata = new IngestDocument(ingestDocument);
|
||||
Map<String, String> metadata = ingestDocumentWithOnFailureMetadata.getIngestMetadata();
|
||||
Map<String, Object> metadata = ingestDocumentWithOnFailureMetadata.getIngestMetadata();
|
||||
metadata.put(CompoundProcessor.ON_FAILURE_PROCESSOR_TYPE_FIELD, "mock");
|
||||
metadata.put(CompoundProcessor.ON_FAILURE_PROCESSOR_TAG_FIELD, "processor_0");
|
||||
metadata.put(CompoundProcessor.ON_FAILURE_MESSAGE_FIELD, "processor failed");
|
||||
|
|
|
@ -111,7 +111,7 @@ public class TrackingResultProcessorTests extends ESTestCase {
|
|||
assertThat(resultList.get(0).getFailure(), equalTo(exception));
|
||||
assertThat(resultList.get(0).getProcessorTag(), equalTo(expectedFailResult.getProcessorTag()));
|
||||
|
||||
Map<String, String> metadata = resultList.get(1).getIngestDocument().getIngestMetadata();
|
||||
Map<String, Object> metadata = resultList.get(1).getIngestDocument().getIngestMetadata();
|
||||
assertThat(metadata.get(ON_FAILURE_MESSAGE_FIELD), equalTo("fail"));
|
||||
assertThat(metadata.get(ON_FAILURE_PROCESSOR_TYPE_FIELD), equalTo("test"));
|
||||
assertThat(metadata.get(ON_FAILURE_PROCESSOR_TAG_FIELD), equalTo("fail"));
|
||||
|
|
|
@ -47,7 +47,7 @@ public class WriteableIngestDocumentTests extends ESTestCase {
|
|||
for (int i = 0; i < numFields; i++) {
|
||||
sourceAndMetadata.put(randomFrom(IngestDocument.MetaData.values()).getFieldName(), randomAsciiOfLengthBetween(5, 10));
|
||||
}
|
||||
Map<String, String> ingestMetadata = new HashMap<>();
|
||||
Map<String, Object> ingestMetadata = new HashMap<>();
|
||||
numFields = randomIntBetween(1, 5);
|
||||
for (int i = 0; i < numFields; i++) {
|
||||
ingestMetadata.put(randomAsciiOfLengthBetween(5, 10), randomAsciiOfLengthBetween(5, 10));
|
||||
|
@ -70,7 +70,7 @@ public class WriteableIngestDocumentTests extends ESTestCase {
|
|||
changed = true;
|
||||
}
|
||||
|
||||
Map<String, String> otherIngestMetadata;
|
||||
Map<String, Object> otherIngestMetadata;
|
||||
if (randomBoolean()) {
|
||||
otherIngestMetadata = new HashMap<>();
|
||||
numFields = randomIntBetween(1, 5);
|
||||
|
@ -103,7 +103,7 @@ public class WriteableIngestDocumentTests extends ESTestCase {
|
|||
for (int i = 0; i < numFields; i++) {
|
||||
sourceAndMetadata.put(randomFrom(IngestDocument.MetaData.values()).getFieldName(), randomAsciiOfLengthBetween(5, 10));
|
||||
}
|
||||
Map<String, String> ingestMetadata = new HashMap<>();
|
||||
Map<String, Object> ingestMetadata = new HashMap<>();
|
||||
numFields = randomIntBetween(1, 5);
|
||||
for (int i = 0; i < numFields; i++) {
|
||||
ingestMetadata.put(randomAsciiOfLengthBetween(5, 10), randomAsciiOfLengthBetween(5, 10));
|
||||
|
@ -131,7 +131,7 @@ public class WriteableIngestDocumentTests extends ESTestCase {
|
|||
|
||||
Map<String, Object> toXContentDoc = (Map<String, Object>) toXContentMap.get("doc");
|
||||
Map<String, Object> toXContentSource = (Map<String, Object>) toXContentDoc.get("_source");
|
||||
Map<String, String> toXContentIngestMetadata = (Map<String, String>) toXContentDoc.get("_ingest");
|
||||
Map<String, Object> toXContentIngestMetadata = (Map<String, Object>) toXContentDoc.get("_ingest");
|
||||
|
||||
Map<IngestDocument.MetaData, String> metadataMap = ingestDocument.extractMetadata();
|
||||
for (Map.Entry<IngestDocument.MetaData, String> metadata : metadataMap.entrySet()) {
|
||||
|
|
|
@ -0,0 +1,128 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.indices.query.IndicesQueriesRegistry;
|
||||
import org.elasticsearch.search.SearchModule;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Optional;
|
||||
|
||||
import static java.util.Collections.emptyList;
|
||||
|
||||
public class QueryParseContextTests extends ESTestCase {
|
||||
|
||||
private static IndicesQueriesRegistry indicesQueriesRegistry;
|
||||
|
||||
@BeforeClass
|
||||
public static void init() {
|
||||
indicesQueriesRegistry = new SearchModule(Settings.EMPTY, new NamedWriteableRegistry(), false, emptyList())
|
||||
.getQueryParserRegistry();
|
||||
}
|
||||
|
||||
public void testParseTopLevelBuilder() throws IOException {
|
||||
QueryBuilder query = new MatchQueryBuilder("foo", "bar");
|
||||
String requestBody = "{ \"query\" : " + query.toString() + "}";
|
||||
try (XContentParser parser = XContentFactory.xContent(requestBody).createParser(requestBody)) {
|
||||
QueryParseContext context = new QueryParseContext(indicesQueriesRegistry, parser, ParseFieldMatcher.STRICT);
|
||||
QueryBuilder actual = context.parseTopLevelQueryBuilder();
|
||||
assertEquals(query, actual);
|
||||
}
|
||||
}
|
||||
|
||||
public void testParseTopLevelBuilderEmptyObject() throws IOException {
|
||||
String requestBody = "{}";
|
||||
try (XContentParser parser = XContentFactory.xContent(requestBody).createParser(requestBody)) {
|
||||
QueryParseContext context = new QueryParseContext(indicesQueriesRegistry, parser, ParseFieldMatcher.STRICT);
|
||||
QueryBuilder query = context.parseTopLevelQueryBuilder();
|
||||
assertNull(query);
|
||||
}
|
||||
}
|
||||
|
||||
public void testParseTopLevelBuilderUnknownParameter() throws IOException {
|
||||
String requestBody = "{ \"foo\" : \"bar\"}";
|
||||
try (XContentParser parser = XContentFactory.xContent(requestBody).createParser(requestBody)) {
|
||||
QueryParseContext context = new QueryParseContext(indicesQueriesRegistry, parser, ParseFieldMatcher.STRICT);
|
||||
ParsingException exception = expectThrows(ParsingException.class, () -> context.parseTopLevelQueryBuilder());
|
||||
assertEquals("request does not support [foo]", exception.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
public void testParseInnerQueryBuilder() throws IOException {
|
||||
QueryBuilder query = new MatchQueryBuilder("foo", "bar");
|
||||
String source = query.toString();
|
||||
try (XContentParser parser = XContentFactory.xContent(source).createParser(source)) {
|
||||
QueryParseContext context = new QueryParseContext(indicesQueriesRegistry, parser, ParseFieldMatcher.STRICT);
|
||||
Optional<QueryBuilder> actual = context.parseInnerQueryBuilder();
|
||||
assertEquals(query, actual.get());
|
||||
}
|
||||
}
|
||||
|
||||
public void testParseInnerQueryBuilderEmptyBody() throws IOException {
|
||||
String source = "{}";
|
||||
try (XContentParser parser = XContentFactory.xContent(source).createParser(source)) {
|
||||
QueryParseContext context = new QueryParseContext(indicesQueriesRegistry, parser, ParseFieldMatcher.EMPTY);
|
||||
Optional<QueryBuilder> emptyQuery = context.parseInnerQueryBuilder();
|
||||
assertFalse(emptyQuery.isPresent());
|
||||
}
|
||||
}
|
||||
|
||||
public void testParseInnerQueryBuilderExceptions() throws IOException {
|
||||
String source = "{ \"foo\": \"bar\" }";
|
||||
try (XContentParser parser = JsonXContent.jsonXContent.createParser(source)) {
|
||||
parser.nextToken();
|
||||
parser.nextToken(); // don't start with START_OBJECT to provoke exception
|
||||
QueryParseContext context = new QueryParseContext(indicesQueriesRegistry, parser, ParseFieldMatcher.STRICT);
|
||||
ParsingException exception = expectThrows(ParsingException.class, () -> context.parseInnerQueryBuilder());
|
||||
assertEquals("[_na] query malformed, must start with start_object", exception.getMessage());
|
||||
}
|
||||
|
||||
source = "{}";
|
||||
try (XContentParser parser = JsonXContent.jsonXContent.createParser(source)) {
|
||||
QueryParseContext context = new QueryParseContext(indicesQueriesRegistry, parser, ParseFieldMatcher.STRICT);
|
||||
IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> context.parseInnerQueryBuilder());
|
||||
assertEquals("query malformed, empty clause found at [1:2]", exception.getMessage());
|
||||
}
|
||||
|
||||
source = "{ \"foo\" : \"bar\" }";
|
||||
try (XContentParser parser = JsonXContent.jsonXContent.createParser(source)) {
|
||||
QueryParseContext context = new QueryParseContext(indicesQueriesRegistry, parser, ParseFieldMatcher.STRICT);
|
||||
ParsingException exception = expectThrows(ParsingException.class, () -> context.parseInnerQueryBuilder());
|
||||
assertEquals("[_na] query malformed, no start_object after query name", exception.getMessage());
|
||||
}
|
||||
|
||||
source = "{ \"foo\" : {} }";
|
||||
try (XContentParser parser = JsonXContent.jsonXContent.createParser(source)) {
|
||||
QueryParseContext context = new QueryParseContext(indicesQueriesRegistry, parser, ParseFieldMatcher.STRICT);
|
||||
ParsingException exception = expectThrows(ParsingException.class, () -> context.parseInnerQueryBuilder());
|
||||
assertEquals("no [query] registered for [foo]", exception.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -70,6 +70,7 @@ import org.elasticsearch.test.CorruptionUtils;
|
|||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.InternalSettingsPlugin;
|
||||
import org.elasticsearch.test.MockIndexEventListener;
|
||||
import org.elasticsearch.test.junit.annotations.TestLogging;
|
||||
import org.elasticsearch.test.store.MockFSIndexStore;
|
||||
import org.elasticsearch.test.transport.MockTransportService;
|
||||
import org.elasticsearch.transport.TransportException;
|
||||
|
@ -471,6 +472,7 @@ public class CorruptedFileIT extends ESIntegTestCase {
|
|||
* TODO once checksum verification on snapshotting is implemented this test needs to be fixed or split into several
|
||||
* parts... We should also corrupt files on the actual snapshot and check that we don't restore the corrupted shard.
|
||||
*/
|
||||
@TestLogging("monitor.fs:DEBUG")
|
||||
public void testCorruptFileThenSnapshotAndRestore() throws ExecutionException, InterruptedException, IOException {
|
||||
int numDocs = scaledRandomIntBetween(100, 1000);
|
||||
internalCluster().ensureAtLeastNumDataNodes(2);
|
||||
|
|
|
@ -0,0 +1,256 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.translog;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
|
||||
import joptsimple.OptionParser;
|
||||
import joptsimple.OptionSet;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.FSDirectory;
|
||||
import org.apache.lucene.store.Lock;
|
||||
import org.apache.lucene.store.LockObtainFailedException;
|
||||
import org.apache.lucene.store.NativeFSLockFactory;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.action.admin.cluster.node.stats.NodesStatsResponse;
|
||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchPhaseExecutionException;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.cli.MockTerminal;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.routing.GroupShardsIterator;
|
||||
import org.elasticsearch.cluster.routing.ShardIterator;
|
||||
import org.elasticsearch.cluster.routing.ShardRouting;
|
||||
import org.elasticsearch.common.Priority;
|
||||
import org.elasticsearch.common.io.PathUtils;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.ByteSizeUnit;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.MockEngineFactoryPlugin;
|
||||
import org.elasticsearch.index.translog.TruncateTranslogCommand;
|
||||
import org.elasticsearch.monitor.fs.FsInfo;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.engine.MockEngineSupport;
|
||||
import org.elasticsearch.test.hamcrest.ElasticsearchAssertions;
|
||||
import org.elasticsearch.test.transport.MockTransportService;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.nio.channels.FileChannel;
|
||||
import java.nio.file.DirectoryStream;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.StandardOpenOption;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.TreeSet;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static org.elasticsearch.common.util.CollectionUtils.iterableAsArrayList;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
|
||||
@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.SUITE, numDataNodes = 0)
|
||||
public class TruncateTranslogIT extends ESIntegTestCase {
|
||||
|
||||
@Override
|
||||
protected Collection<Class<? extends Plugin>> nodePlugins() {
|
||||
return pluginList(MockTransportService.TestPlugin.class, MockEngineFactoryPlugin.class);
|
||||
}
|
||||
|
||||
public void testCorruptTranslogTruncation() throws Exception {
|
||||
internalCluster().startNodesAsync(1, Settings.EMPTY).get();
|
||||
|
||||
assertAcked(prepareCreate("test").setSettings(Settings.builder()
|
||||
.put("index.number_of_shards", 1)
|
||||
.put("index.number_of_replicas", 0)
|
||||
.put("index.refresh_interval", "-1")
|
||||
.put(MockEngineSupport.DISABLE_FLUSH_ON_CLOSE.getKey(), true) // never flush - always recover from translog
|
||||
));
|
||||
ensureYellow();
|
||||
|
||||
// Index some documents
|
||||
int numDocs = scaledRandomIntBetween(100, 1000);
|
||||
IndexRequestBuilder[] builders = new IndexRequestBuilder[numDocs];
|
||||
for (int i = 0; i < builders.length; i++) {
|
||||
builders[i] = client().prepareIndex("test", "type").setSource("foo", "bar");
|
||||
}
|
||||
disableTranslogFlush("test");
|
||||
indexRandom(false, false, false, Arrays.asList(builders));
|
||||
Set<Path> translogDirs = getTranslogDirs("test");
|
||||
|
||||
TruncateTranslogCommand ttc = new TruncateTranslogCommand();
|
||||
MockTerminal t = new MockTerminal();
|
||||
OptionParser parser = ttc.getParser();
|
||||
|
||||
for (Path translogDir : translogDirs) {
|
||||
OptionSet options = parser.parse("-d", translogDir.toAbsolutePath().toString(), "-b");
|
||||
// Try running it before the shard is closed, it should flip out because it can't acquire the lock
|
||||
try {
|
||||
logger.info("--> running truncate while index is open on [{}]", translogDir.toAbsolutePath());
|
||||
ttc.execute(t, options, new HashMap<String, String>());
|
||||
fail("expected the truncate command to fail not being able to acquire the lock");
|
||||
} catch (Exception e) {
|
||||
assertThat(e.getMessage(), containsString("Failed to lock shard's directory"));
|
||||
}
|
||||
}
|
||||
|
||||
// Corrupt the translog file(s)
|
||||
logger.info("--> corrupting translog");
|
||||
corruptRandomTranslogFiles("test");
|
||||
|
||||
// Restart the single node
|
||||
logger.info("--> restarting node");
|
||||
internalCluster().fullRestart();
|
||||
client().admin().cluster().prepareHealth().setWaitForYellowStatus()
|
||||
.setTimeout(new TimeValue(1000, TimeUnit.MILLISECONDS))
|
||||
.setWaitForEvents(Priority.LANGUID)
|
||||
.get();
|
||||
|
||||
try {
|
||||
client().prepareSearch("test").setQuery(matchAllQuery()).get();
|
||||
fail("all shards should be failed due to a corrupted translog");
|
||||
} catch (SearchPhaseExecutionException e) {
|
||||
// Good, all shards should be failed because there is only a
|
||||
// single shard and its translog is corrupt
|
||||
}
|
||||
|
||||
// Close the index so we can actually truncate the translog
|
||||
logger.info("--> closing 'test' index");
|
||||
client().admin().indices().prepareClose("test").get();
|
||||
|
||||
for (Path translogDir : translogDirs) {
|
||||
final Path idxLocation = translogDir.getParent().resolve("index");
|
||||
assertBusy(() -> {
|
||||
logger.info("--> checking that lock has been released for {}", idxLocation);
|
||||
try (Directory dir = FSDirectory.open(idxLocation, NativeFSLockFactory.INSTANCE);
|
||||
Lock writeLock = dir.obtainLock(IndexWriter.WRITE_LOCK_NAME)) {
|
||||
// Great, do nothing, we just wanted to obtain the lock
|
||||
} catch (LockObtainFailedException lofe) {
|
||||
throw new ElasticsearchException("Still waiting for lock release at [" + idxLocation + "]");
|
||||
} catch (IOException ioe) {
|
||||
fail("Got an IOException: " + ioe);
|
||||
}
|
||||
});
|
||||
|
||||
OptionSet options = parser.parse("-d", translogDir.toAbsolutePath().toString(), "-b");
|
||||
logger.info("--> running truncate translog command for [{}]", translogDir.toAbsolutePath());
|
||||
ttc.execute(t, options, new HashMap<String, String>());
|
||||
logger.info("--> output:\n{}", t.getOutput());
|
||||
}
|
||||
|
||||
// Re-open index
|
||||
logger.info("--> opening 'test' index");
|
||||
client().admin().indices().prepareOpen("test").get();
|
||||
ensureYellow("test");
|
||||
|
||||
// Run a search and make sure it succeeds
|
||||
SearchResponse resp = client().prepareSearch("test").setQuery(matchAllQuery()).get();
|
||||
ElasticsearchAssertions.assertNoFailures(resp);
|
||||
}
|
||||
|
||||
private Set<Path> getTranslogDirs(String indexName) throws IOException {
|
||||
ClusterState state = client().admin().cluster().prepareState().get().getState();
|
||||
GroupShardsIterator shardIterators = state.getRoutingTable().activePrimaryShardsGrouped(new String[]{indexName}, false);
|
||||
final Index idx = state.metaData().index(indexName).getIndex();
|
||||
List<ShardIterator> iterators = iterableAsArrayList(shardIterators);
|
||||
ShardIterator shardIterator = RandomPicks.randomFrom(random(), iterators);
|
||||
ShardRouting shardRouting = shardIterator.nextOrNull();
|
||||
assertNotNull(shardRouting);
|
||||
assertTrue(shardRouting.primary());
|
||||
assertTrue(shardRouting.assignedToNode());
|
||||
String nodeId = shardRouting.currentNodeId();
|
||||
NodesStatsResponse nodeStatses = client().admin().cluster().prepareNodesStats(nodeId).setFs(true).get();
|
||||
Set<Path> translogDirs = new TreeSet<>(); // treeset makes sure iteration order is deterministic
|
||||
for (FsInfo.Path fsPath : nodeStatses.getNodes().get(0).getFs()) {
|
||||
String path = fsPath.getPath();
|
||||
final String relativeDataLocationPath = "indices/"+ idx.getUUID() +"/" + Integer.toString(shardRouting.getId()) + "/translog";
|
||||
Path translogPath = PathUtils.get(path).resolve(relativeDataLocationPath);
|
||||
if (Files.isDirectory(translogPath)) {
|
||||
translogDirs.add(translogPath);
|
||||
}
|
||||
}
|
||||
return translogDirs;
|
||||
}
|
||||
|
||||
private void corruptRandomTranslogFiles(String indexName) throws IOException {
|
||||
Set<Path> translogDirs = getTranslogDirs(indexName);
|
||||
Set<Path> files = new TreeSet<>(); // treeset makes sure iteration order is deterministic
|
||||
for (Path translogDir : translogDirs) {
|
||||
if (Files.isDirectory(translogDir)) {
|
||||
logger.info("--> path: {}", translogDir);
|
||||
try (DirectoryStream<Path> stream = Files.newDirectoryStream(translogDir)) {
|
||||
for (Path item : stream) {
|
||||
logger.info("--> File: {}", item);
|
||||
if (Files.isRegularFile(item) && item.getFileName().toString().startsWith("translog-")) {
|
||||
files.add(item);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Path fileToCorrupt = null;
|
||||
if (!files.isEmpty()) {
|
||||
int corruptions = randomIntBetween(5, 20);
|
||||
for (int i = 0; i < corruptions; i++) {
|
||||
fileToCorrupt = RandomPicks.randomFrom(random(), files);
|
||||
try (FileChannel raf = FileChannel.open(fileToCorrupt, StandardOpenOption.READ, StandardOpenOption.WRITE)) {
|
||||
// read
|
||||
raf.position(randomIntBetween(0, (int) Math.min(Integer.MAX_VALUE, raf.size() - 1)));
|
||||
long filePointer = raf.position();
|
||||
ByteBuffer bb = ByteBuffer.wrap(new byte[1]);
|
||||
raf.read(bb);
|
||||
bb.flip();
|
||||
|
||||
// corrupt
|
||||
byte oldValue = bb.get(0);
|
||||
byte newValue = (byte) (oldValue + 1);
|
||||
bb.put(0, newValue);
|
||||
|
||||
// rewrite
|
||||
raf.position(filePointer);
|
||||
raf.write(bb);
|
||||
logger.info("--> corrupting file {} -- flipping at position {} from {} to {} file: {}",
|
||||
fileToCorrupt, filePointer, Integer.toHexString(oldValue),
|
||||
Integer.toHexString(newValue), fileToCorrupt);
|
||||
}
|
||||
}
|
||||
}
|
||||
assertThat("no file corrupted", fileToCorrupt, notNullValue());
|
||||
}
|
||||
|
||||
/** Disables translog flushing for the specified index */
|
||||
private static void disableTranslogFlush(String index) {
|
||||
Settings settings = Settings.builder()
|
||||
.put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING.getKey(), new ByteSizeValue(1, ByteSizeUnit.PB))
|
||||
.build();
|
||||
client().admin().indices().prepareUpdateSettings(index).setSettings(settings).get();
|
||||
}
|
||||
|
||||
}
|
|
@ -86,7 +86,7 @@ public class CompoundProcessorTests extends ESTestCase {
|
|||
public void testSingleProcessorWithOnFailureProcessor() throws Exception {
|
||||
TestProcessor processor1 = new TestProcessor("id", "first", ingestDocument -> {throw new RuntimeException("error");});
|
||||
TestProcessor processor2 = new TestProcessor(ingestDocument -> {
|
||||
Map<String, String> ingestMetadata = ingestDocument.getIngestMetadata();
|
||||
Map<String, Object> ingestMetadata = ingestDocument.getIngestMetadata();
|
||||
assertThat(ingestMetadata.size(), equalTo(3));
|
||||
assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_MESSAGE_FIELD), equalTo("error"));
|
||||
assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_TYPE_FIELD), equalTo("first"));
|
||||
|
@ -104,7 +104,7 @@ public class CompoundProcessorTests extends ESTestCase {
|
|||
public void testSingleProcessorWithNestedFailures() throws Exception {
|
||||
TestProcessor processor = new TestProcessor("id", "first", ingestDocument -> {throw new RuntimeException("error");});
|
||||
TestProcessor processorToFail = new TestProcessor("id2", "second", ingestDocument -> {
|
||||
Map<String, String> ingestMetadata = ingestDocument.getIngestMetadata();
|
||||
Map<String, Object> ingestMetadata = ingestDocument.getIngestMetadata();
|
||||
assertThat(ingestMetadata.size(), equalTo(3));
|
||||
assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_MESSAGE_FIELD), equalTo("error"));
|
||||
assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_TYPE_FIELD), equalTo("first"));
|
||||
|
@ -112,7 +112,7 @@ public class CompoundProcessorTests extends ESTestCase {
|
|||
throw new RuntimeException("error");
|
||||
});
|
||||
TestProcessor lastProcessor = new TestProcessor(ingestDocument -> {
|
||||
Map<String, String> ingestMetadata = ingestDocument.getIngestMetadata();
|
||||
Map<String, Object> ingestMetadata = ingestDocument.getIngestMetadata();
|
||||
assertThat(ingestMetadata.size(), equalTo(3));
|
||||
assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_MESSAGE_FIELD), equalTo("error"));
|
||||
assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_TYPE_FIELD), equalTo("second"));
|
||||
|
@ -131,7 +131,7 @@ public class CompoundProcessorTests extends ESTestCase {
|
|||
public void testCompoundProcessorExceptionFailWithoutOnFailure() throws Exception {
|
||||
TestProcessor firstProcessor = new TestProcessor("id1", "first", ingestDocument -> {throw new RuntimeException("error");});
|
||||
TestProcessor secondProcessor = new TestProcessor("id3", "second", ingestDocument -> {
|
||||
Map<String, String> ingestMetadata = ingestDocument.getIngestMetadata();
|
||||
Map<String, Object> ingestMetadata = ingestDocument.getIngestMetadata();
|
||||
assertThat(ingestMetadata.entrySet(), hasSize(3));
|
||||
assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_MESSAGE_FIELD), equalTo("error"));
|
||||
assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_TYPE_FIELD), equalTo("first"));
|
||||
|
@ -153,7 +153,7 @@ public class CompoundProcessorTests extends ESTestCase {
|
|||
TestProcessor failProcessor =
|
||||
new TestProcessor("tag_fail", "fail", ingestDocument -> {throw new RuntimeException("custom error message");});
|
||||
TestProcessor secondProcessor = new TestProcessor("id3", "second", ingestDocument -> {
|
||||
Map<String, String> ingestMetadata = ingestDocument.getIngestMetadata();
|
||||
Map<String, Object> ingestMetadata = ingestDocument.getIngestMetadata();
|
||||
assertThat(ingestMetadata.entrySet(), hasSize(3));
|
||||
assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_MESSAGE_FIELD), equalTo("custom error message"));
|
||||
assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_TYPE_FIELD), equalTo("fail"));
|
||||
|
@ -176,7 +176,7 @@ public class CompoundProcessorTests extends ESTestCase {
|
|||
TestProcessor failProcessor =
|
||||
new TestProcessor("tag_fail", "fail", ingestDocument -> {throw new RuntimeException("custom error message");});
|
||||
TestProcessor secondProcessor = new TestProcessor("id3", "second", ingestDocument -> {
|
||||
Map<String, String> ingestMetadata = ingestDocument.getIngestMetadata();
|
||||
Map<String, Object> ingestMetadata = ingestDocument.getIngestMetadata();
|
||||
assertThat(ingestMetadata.entrySet(), hasSize(3));
|
||||
assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_MESSAGE_FIELD), equalTo("custom error message"));
|
||||
assertThat(ingestMetadata.get(CompoundProcessor.ON_FAILURE_PROCESSOR_TYPE_FIELD), equalTo("fail"));
|
||||
|
|
|
@ -907,7 +907,7 @@ public class IngestDocumentTests extends ESTestCase {
|
|||
for (int i = 0; i < numFields; i++) {
|
||||
sourceAndMetadata.put(randomFrom(IngestDocument.MetaData.values()).getFieldName(), randomAsciiOfLengthBetween(5, 10));
|
||||
}
|
||||
Map<String, String> ingestMetadata = new HashMap<>();
|
||||
Map<String, Object> ingestMetadata = new HashMap<>();
|
||||
numFields = randomIntBetween(1, 5);
|
||||
for (int i = 0; i < numFields; i++) {
|
||||
ingestMetadata.put(randomAsciiOfLengthBetween(5, 10), randomAsciiOfLengthBetween(5, 10));
|
||||
|
@ -930,7 +930,7 @@ public class IngestDocumentTests extends ESTestCase {
|
|||
changed = true;
|
||||
}
|
||||
|
||||
Map<String, String> otherIngestMetadata;
|
||||
Map<String, Object> otherIngestMetadata;
|
||||
if (randomBoolean()) {
|
||||
otherIngestMetadata = new HashMap<>();
|
||||
numFields = randomIntBetween(1, 5);
|
||||
|
@ -962,7 +962,7 @@ public class IngestDocumentTests extends ESTestCase {
|
|||
long before = System.currentTimeMillis();
|
||||
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
|
||||
long after = System.currentTimeMillis();
|
||||
String timestampString = ingestDocument.getIngestMetadata().get("timestamp");
|
||||
String timestampString = (String) ingestDocument.getIngestMetadata().get("timestamp");
|
||||
assertThat(timestampString, notNullValue());
|
||||
assertThat(timestampString, endsWith("+0000"));
|
||||
DateFormat df = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZZ", Locale.ROOT);
|
||||
|
|
|
@ -28,11 +28,9 @@ import org.elasticsearch.cluster.ClusterName;
|
|||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.cluster.service.ClusterService;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.transport.LocalTransportAddress;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.junit.Before;
|
||||
|
||||
|
@ -48,7 +46,6 @@ import static org.hamcrest.Matchers.equalTo;
|
|||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.notNullValue;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
import static org.mockito.Mockito.mock;
|
||||
|
||||
public class PipelineStoreTests extends ESTestCase {
|
||||
|
||||
|
@ -216,6 +213,19 @@ public class PipelineStoreTests extends ESTestCase {
|
|||
assertThat(pipelines.size(), equalTo(2));
|
||||
assertThat(pipelines.get(0).getId(), equalTo("_id1"));
|
||||
assertThat(pipelines.get(1).getId(), equalTo("_id2"));
|
||||
|
||||
// get all variants: (no IDs or '*')
|
||||
pipelines = store.innerGetPipelines(ingestMetadata);
|
||||
pipelines.sort((o1, o2) -> o1.getId().compareTo(o2.getId()));
|
||||
assertThat(pipelines.size(), equalTo(2));
|
||||
assertThat(pipelines.get(0).getId(), equalTo("_id1"));
|
||||
assertThat(pipelines.get(1).getId(), equalTo("_id2"));
|
||||
|
||||
pipelines = store.innerGetPipelines(ingestMetadata, "*");
|
||||
pipelines.sort((o1, o2) -> o1.getId().compareTo(o2.getId()));
|
||||
assertThat(pipelines.size(), equalTo(2));
|
||||
assertThat(pipelines.get(0).getId(), equalTo("_id1"));
|
||||
assertThat(pipelines.get(1).getId(), equalTo("_id2"));
|
||||
}
|
||||
|
||||
public void testCrud() throws Exception {
|
||||
|
|
|
@ -20,18 +20,21 @@
|
|||
package org.elasticsearch.test.rest;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||
import org.elasticsearch.test.rest.parser.RestTestParseException;
|
||||
|
||||
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
|
||||
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
|
||||
import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/** Rest integration test. Runs against a cluster started by {@code gradle integTest} */
|
||||
public class RestIT extends ESClientYamlSuiteTestCase {
|
||||
public RestIT(RestTestCandidate testCandidate) {
|
||||
public class DebClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase {
|
||||
public DebClientYamlTestSuiteIT(ClientYamlTestCandidate testCandidate) {
|
||||
super(testCandidate);
|
||||
}
|
||||
|
||||
@ParametersFactory
|
||||
public static Iterable<Object[]> parameters() throws IOException, RestTestParseException {
|
||||
public static Iterable<Object[]> parameters() throws IOException, ClientYamlTestParseException {
|
||||
return createParameters(0, 1);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,40 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.test.rest;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||
|
||||
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
|
||||
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
|
||||
import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/** Rest integration test. Runs against a cluster started by {@code gradle integTest} */
|
||||
public class IntegTestZipClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase {
|
||||
public IntegTestZipClientYamlTestSuiteIT(ClientYamlTestCandidate testCandidate) {
|
||||
super(testCandidate);
|
||||
}
|
||||
|
||||
@ParametersFactory
|
||||
public static Iterable<Object[]> parameters() throws IOException, ClientYamlTestParseException {
|
||||
return createParameters(0, 1);
|
||||
}
|
||||
}
|
|
@ -20,18 +20,21 @@
|
|||
package org.elasticsearch.test.rest;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||
import org.elasticsearch.test.rest.parser.RestTestParseException;
|
||||
|
||||
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
|
||||
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
|
||||
import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/** Rest integration test. Runs against a cluster started by {@code gradle integTest} */
|
||||
public class RestIT extends ESClientYamlSuiteTestCase {
|
||||
public RestIT(RestTestCandidate testCandidate) {
|
||||
public class RpmClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase {
|
||||
public RpmClientYamlTestSuiteIT(ClientYamlTestCandidate testCandidate) {
|
||||
super(testCandidate);
|
||||
}
|
||||
|
||||
@ParametersFactory
|
||||
public static Iterable<Object[]> parameters() throws IOException, RestTestParseException {
|
||||
public static Iterable<Object[]> parameters() throws IOException, ClientYamlTestParseException {
|
||||
return createParameters(0, 1);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,90 @@
|
|||
#!/bin/bash
|
||||
|
||||
CDPATH=""
|
||||
SCRIPT="$0"
|
||||
|
||||
# SCRIPT may be an arbitrarily deep series of symlinks. Loop until we have the concrete path.
|
||||
while [ -h "$SCRIPT" ] ; do
|
||||
ls=`ls -ld "$SCRIPT"`
|
||||
# Drop everything prior to ->
|
||||
link=`expr "$ls" : '.*-> \(.*\)$'`
|
||||
if expr "$link" : '/.*' > /dev/null; then
|
||||
SCRIPT="$link"
|
||||
else
|
||||
SCRIPT=`dirname "$SCRIPT"`/"$link"
|
||||
fi
|
||||
done
|
||||
|
||||
# determine elasticsearch home
|
||||
ES_HOME=`dirname "$SCRIPT"`/..
|
||||
|
||||
# make ELASTICSEARCH_HOME absolute
|
||||
ES_HOME=`cd "$ES_HOME"; pwd`
|
||||
|
||||
|
||||
# Sets the default values for elasticsearch variables used in this script
|
||||
if [ -z "$CONF_DIR" ]; then
|
||||
CONF_DIR="${path.conf}"
|
||||
fi
|
||||
|
||||
# The default env file is defined at building/packaging time.
|
||||
# For a ${project.name} package, the value is "${path.env}".
|
||||
ES_ENV_FILE="${path.env}"
|
||||
|
||||
# If an include is specified with the ES_INCLUDE environment variable, use it
|
||||
if [ -n "$ES_INCLUDE" ]; then
|
||||
ES_ENV_FILE="$ES_INCLUDE"
|
||||
fi
|
||||
|
||||
# Source the environment file
|
||||
if [ -n "$ES_ENV_FILE" ]; then
|
||||
|
||||
# If the ES_ENV_FILE is not found, try to resolve the path
|
||||
# against the ES_HOME directory
|
||||
if [ ! -f "$ES_ENV_FILE" ]; then
|
||||
ES_ENV_FILE="$ELASTIC_HOME/$ES_ENV_FILE"
|
||||
fi
|
||||
|
||||
. "$ES_ENV_FILE"
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "Unable to source environment file: $ES_ENV_FILE" >&2
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# don't let JAVA_TOOL_OPTIONS slip in (e.g. crazy agents in ubuntu)
|
||||
# works around https://bugs.launchpad.net/ubuntu/+source/jayatana/+bug/1441487
|
||||
if [ "x$JAVA_TOOL_OPTIONS" != "x" ]; then
|
||||
echo "Warning: Ignoring JAVA_TOOL_OPTIONS=$JAVA_TOOL_OPTIONS"
|
||||
unset JAVA_TOOL_OPTIONS
|
||||
fi
|
||||
|
||||
# CONF_FILE setting was removed
|
||||
if [ ! -z "$CONF_FILE" ]; then
|
||||
echo "CONF_FILE setting is no longer supported. elasticsearch.yml must be placed in the config directory and cannot be renamed."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -x "$JAVA_HOME/bin/java" ]; then
|
||||
JAVA=$JAVA_HOME/bin/java
|
||||
else
|
||||
JAVA=`which java`
|
||||
fi
|
||||
|
||||
if [ ! -x "$JAVA" ]; then
|
||||
echo "Could not find any executable java binary. Please install java in your PATH or set JAVA_HOME"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# full hostname passed through cut for portability on systems that do not support hostname -s
|
||||
# export on separate line for shells that do not support combining definition and export
|
||||
HOSTNAME=`hostname | cut -d. -f1`
|
||||
export HOSTNAME
|
||||
|
||||
declare -a args=("$@")
|
||||
|
||||
if [ -e "$CONF_DIR" ]; then
|
||||
args=("${args[@]}" -Edefault.path.conf="$CONF_DIR")
|
||||
fi
|
||||
|
||||
exec "$JAVA" $ES_JAVA_OPTS -Delasticsearch -Des.path.home="$ES_HOME" -cp "$ES_HOME/lib/*" org.elasticsearch.index.translog.TranslogToolCli "${args[@]}"
|
|
@ -0,0 +1,61 @@
|
|||
@echo off
|
||||
|
||||
SETLOCAL enabledelayedexpansion
|
||||
|
||||
IF DEFINED JAVA_HOME (
|
||||
set JAVA=%JAVA_HOME%\bin\java.exe
|
||||
) ELSE (
|
||||
FOR %%I IN (java.exe) DO set JAVA=%%~$PATH:I
|
||||
)
|
||||
IF NOT EXIST "%JAVA%" (
|
||||
ECHO Could not find any executable java binary. Please install java in your PATH or set JAVA_HOME 1>&2
|
||||
EXIT /B 1
|
||||
)
|
||||
|
||||
set SCRIPT_DIR=%~dp0
|
||||
for %%I in ("%SCRIPT_DIR%..") do set ES_HOME=%%~dpfI
|
||||
|
||||
TITLE Elasticsearch Plugin Manager ${project.version}
|
||||
|
||||
SET properties=
|
||||
SET args=
|
||||
|
||||
:loop
|
||||
SET "current=%~1"
|
||||
SHIFT
|
||||
IF "x!current!" == "x" GOTO breakloop
|
||||
|
||||
IF "!current:~0,2%!" == "-D" (
|
||||
ECHO "!current!" | FINDSTR /C:"=">nul && (
|
||||
:: current matches -D*=*
|
||||
IF "x!properties!" NEQ "x" (
|
||||
SET properties=!properties! "!current!"
|
||||
) ELSE (
|
||||
SET properties="!current!"
|
||||
)
|
||||
) || (
|
||||
:: current matches -D*
|
||||
IF "x!properties!" NEQ "x" (
|
||||
SET properties=!properties! "!current!=%~1"
|
||||
) ELSE (
|
||||
SET properties="!current!=%~1"
|
||||
)
|
||||
SHIFT
|
||||
)
|
||||
) ELSE (
|
||||
:: current matches *
|
||||
IF "x!args!" NEQ "x" (
|
||||
SET args=!args! "!current!"
|
||||
) ELSE (
|
||||
SET args="!current!"
|
||||
)
|
||||
)
|
||||
|
||||
GOTO loop
|
||||
:breakloop
|
||||
|
||||
SET HOSTNAME=%COMPUTERNAME%
|
||||
|
||||
"%JAVA%" %ES_JAVA_OPTS% -Des.path.home="%ES_HOME%" !properties! -cp "%ES_HOME%/lib/*;" "org.elasticsearch.index.translog.TranslogToolCli" !args!
|
||||
|
||||
ENDLOCAL
|
|
@ -20,18 +20,21 @@
|
|||
package org.elasticsearch.test.rest;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||
import org.elasticsearch.test.rest.parser.RestTestParseException;
|
||||
|
||||
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
|
||||
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
|
||||
import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/** Rest integration test. Runs against a cluster started by {@code gradle integTest} */
|
||||
public class RestIT extends ESClientYamlSuiteTestCase {
|
||||
public RestIT(RestTestCandidate testCandidate) {
|
||||
public class TarClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase {
|
||||
public TarClientYamlTestSuiteIT(ClientYamlTestCandidate testCandidate) {
|
||||
super(testCandidate);
|
||||
}
|
||||
|
||||
@ParametersFactory
|
||||
public static Iterable<Object[]> parameters() throws IOException, RestTestParseException {
|
||||
public static Iterable<Object[]> parameters() throws IOException, ClientYamlTestParseException {
|
||||
return createParameters(0, 1);
|
||||
}
|
||||
}
|
|
@ -1,37 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.test.rest;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||
import org.elasticsearch.test.rest.parser.RestTestParseException;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/** Rest integration test. Runs against a cluster started by {@code gradle integTest} */
|
||||
public class RestIT extends ESClientYamlSuiteTestCase {
|
||||
public RestIT(RestTestCandidate testCandidate) {
|
||||
super(testCandidate);
|
||||
}
|
||||
|
||||
@ParametersFactory
|
||||
public static Iterable<Object[]> parameters() throws IOException, RestTestParseException {
|
||||
return createParameters(0, 1);
|
||||
}
|
||||
}
|
|
@ -20,18 +20,21 @@
|
|||
package org.elasticsearch.test.rest;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||
import org.elasticsearch.test.rest.parser.RestTestParseException;
|
||||
|
||||
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
|
||||
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
|
||||
import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/** Rest integration test. Runs against a cluster started by {@code gradle integTest} */
|
||||
public class RestIT extends ESClientYamlSuiteTestCase {
|
||||
public RestIT(RestTestCandidate testCandidate) {
|
||||
public class ZipClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase {
|
||||
public ZipClientYamlTestSuiteIT(ClientYamlTestCandidate testCandidate) {
|
||||
super(testCandidate);
|
||||
}
|
||||
|
||||
@ParametersFactory
|
||||
public static Iterable<Object[]> parameters() throws IOException, RestTestParseException {
|
||||
public static Iterable<Object[]> parameters() throws IOException, ClientYamlTestParseException {
|
||||
return createParameters(0, 1);
|
||||
}
|
||||
}
|
|
@ -63,6 +63,13 @@ buildRestTests.docs = fileTree(projectDir) {
|
|||
|
||||
Closure setupTwitter = { String name, int count ->
|
||||
buildRestTests.setups[name] = '''
|
||||
- do:
|
||||
indices.create:
|
||||
index: twitter
|
||||
body:
|
||||
settings:
|
||||
number_of_shards: 1
|
||||
number_of_replicas: 1
|
||||
- do:
|
||||
bulk:
|
||||
index: twitter
|
||||
|
|
|
@ -190,7 +190,9 @@ This balance can be controlled using a `compression` parameter:
|
|||
"load_time_outlier" : {
|
||||
"percentiles" : {
|
||||
"field" : "load_time",
|
||||
"compression" : 200 <1>
|
||||
"tdigest": {
|
||||
"compression" : 200 <1>
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -218,11 +220,11 @@ the TDigest will use less memory.
|
|||
|
||||
experimental[]
|
||||
|
||||
https://github.com/HdrHistogram/HdrHistogram[HDR Histogram] (High Dynamic Range Histogram) is an alternative implementation
|
||||
that can be useful when calculating percentiles for latency measurements as it can be faster than the t-digest implementation
|
||||
with the trade-off of a larger memory footprint. This implementation maintains a fixed worse-case percentage error (specified
|
||||
as a number of significant digits). This means that if data is recorded with values from 1 microsecond up to 1 hour
|
||||
(3,600,000,000 microseconds) in a histogram set to 3 significant digits, it will maintain a value resolution of 1 microsecond
|
||||
https://github.com/HdrHistogram/HdrHistogram[HDR Histogram] (High Dynamic Range Histogram) is an alternative implementation
|
||||
that can be useful when calculating percentiles for latency measurements as it can be faster than the t-digest implementation
|
||||
with the trade-off of a larger memory footprint. This implementation maintains a fixed worse-case percentage error (specified
|
||||
as a number of significant digits). This means that if data is recorded with values from 1 microsecond up to 1 hour
|
||||
(3,600,000,000 microseconds) in a histogram set to 3 significant digits, it will maintain a value resolution of 1 microsecond
|
||||
for values up to 1 millisecond and 3.6 seconds (or better) for the maximum tracked value (1 hour).
|
||||
|
||||
The HDR Histogram can be used by specifying the `method` parameter in the request:
|
||||
|
@ -235,17 +237,18 @@ The HDR Histogram can be used by specifying the `method` parameter in the reques
|
|||
"percentiles" : {
|
||||
"field" : "load_time",
|
||||
"percents" : [95, 99, 99.9],
|
||||
"method" : "hdr", <1>
|
||||
"number_of_significant_value_digits" : 3 <2>
|
||||
"hdr": { <1>
|
||||
"number_of_significant_value_digits" : 3 <2>
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
<1> The `method` parameter is set to `hdr` to indicate that HDR Histogram should be used to calculate the percentiles
|
||||
<1> `hdr` object indicates that HDR Histogram should be used to calculate the percentiles and specific settings for this algorithm can be specified inside the object
|
||||
<2> `number_of_significant_value_digits` specifies the resolution of values for the histogram in number of significant digits
|
||||
|
||||
The HDRHistogram only supports positive values and will error if it is passed a negative value. It is also not a good idea to use
|
||||
The HDRHistogram only supports positive values and will error if it is passed a negative value. It is also not a good idea to use
|
||||
the HDRHistogram if the range of values is unknown as this could lead to high memory usage.
|
||||
|
||||
==== Missing value
|
||||
|
|
|
@ -115,11 +115,11 @@ TIP: for indexed scripts replace the `file` parameter with an `id` parameter.
|
|||
|
||||
experimental[]
|
||||
|
||||
https://github.com/HdrHistogram/HdrHistogram[HDR Histogram] (High Dynamic Range Histogram) is an alternative implementation
|
||||
that can be useful when calculating percentile ranks for latency measurements as it can be faster than the t-digest implementation
|
||||
with the trade-off of a larger memory footprint. This implementation maintains a fixed worse-case percentage error (specified as a
|
||||
number of significant digits). This means that if data is recorded with values from 1 microsecond up to 1 hour (3,600,000,000
|
||||
microseconds) in a histogram set to 3 significant digits, it will maintain a value resolution of 1 microsecond for values up to
|
||||
https://github.com/HdrHistogram/HdrHistogram[HDR Histogram] (High Dynamic Range Histogram) is an alternative implementation
|
||||
that can be useful when calculating percentile ranks for latency measurements as it can be faster than the t-digest implementation
|
||||
with the trade-off of a larger memory footprint. This implementation maintains a fixed worse-case percentage error (specified as a
|
||||
number of significant digits). This means that if data is recorded with values from 1 microsecond up to 1 hour (3,600,000,000
|
||||
microseconds) in a histogram set to 3 significant digits, it will maintain a value resolution of 1 microsecond for values up to
|
||||
1 millisecond and 3.6 seconds (or better) for the maximum tracked value (1 hour).
|
||||
|
||||
The HDR Histogram can be used by specifying the `method` parameter in the request:
|
||||
|
@ -132,17 +132,18 @@ The HDR Histogram can be used by specifying the `method` parameter in the reques
|
|||
"percentile_ranks" : {
|
||||
"field" : "load_time",
|
||||
"values" : [15, 30],
|
||||
"method" : "hdr", <1>
|
||||
"number_of_significant_value_digits" : 3 <2>
|
||||
"hdr": { <1>
|
||||
"number_of_significant_value_digits" : 3 <2>
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
<1> The `method` parameter is set to `hdr` to indicate that HDR Histogram should be used to calculate the percentile_ranks
|
||||
<1> `hdr` object indicates that HDR Histogram should be used to calculate the percentiles and specific settings for this algorithm can be specified inside the object
|
||||
<2> `number_of_significant_value_digits` specifies the resolution of values for the histogram in number of significant digits
|
||||
|
||||
The HDRHistogram only supports positive values and will error if it is passed a negative value. It is also not a good idea to use
|
||||
The HDRHistogram only supports positive values and will error if it is passed a negative value. It is also not a good idea to use
|
||||
the HDRHistogram if the range of values is unknown as this could lead to high memory usage.
|
||||
|
||||
==== Missing value
|
||||
|
@ -166,4 +167,3 @@ had a value.
|
|||
--------------------------------------------------
|
||||
|
||||
<1> Documents without a value in the `grade` field will fall into the same bucket as documents that have the value `10`.
|
||||
|
||||
|
|
|
@ -57,7 +57,7 @@ $ cat requests
|
|||
{ "index" : { "_index" : "test", "_type" : "type1", "_id" : "1" } }
|
||||
{ "field1" : "value1" }
|
||||
$ curl -s -XPOST localhost:9200/_bulk --data-binary "@requests"; echo
|
||||
{"took":7,"items":[{"create":{"_index":"test","_type":"type1","_id":"1","_version":1}}]}
|
||||
{"took":7, "errors": false, "items":[{"index":{"_index":"test","_type":"type1","_id":"1","_version":1,"_operation":"create","forced_refresh":false}}]}
|
||||
--------------------------------------------------
|
||||
|
||||
Because this format uses literal `\n`'s as delimiters, please be sure
|
||||
|
|
|
@ -25,7 +25,8 @@ The result of the above delete operation is:
|
|||
"_index" : "twitter",
|
||||
"_type" : "tweet",
|
||||
"_id" : "1",
|
||||
"_version" : 2
|
||||
"_version" : 2,
|
||||
"_operation: delete"
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
|
|
|
@ -31,6 +31,7 @@ The result of the above index operation is:
|
|||
"_id" : "1",
|
||||
"_version" : 1,
|
||||
"created" : true,
|
||||
"_operation" : create,
|
||||
"forced_refresh": false
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
@ -231,6 +232,7 @@ The result of the above index operation is:
|
|||
"_id" : "6a8ca01c-7896-48e9-81cc-9f70661fcb32",
|
||||
"_version" : 1,
|
||||
"created" : true,
|
||||
"_operation": "create",
|
||||
"forced_refresh": false
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
|
|
@ -132,8 +132,20 @@ curl -XPOST 'localhost:9200/test/type1/1/_update' -d '{
|
|||
}'
|
||||
--------------------------------------------------
|
||||
|
||||
If `name` was `new_name` before the request was sent then document is still
|
||||
reindexed.
|
||||
If `name` was `new_name` before the request was sent then the entire update
|
||||
request is ignored. The `operation` element in the response returns `noop` if
|
||||
the request was ignored.
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
{
|
||||
"_index": "test",
|
||||
"_type": "type1",
|
||||
"_id": "1",
|
||||
"_version": 1,
|
||||
"_operation": noop
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
[[upserts]]
|
||||
[float]
|
||||
|
|
|
@ -153,18 +153,18 @@ sync-flushed:
|
|||
--------------------------------------------------
|
||||
{
|
||||
"_shards": {
|
||||
"total": 10,
|
||||
"successful": 10,
|
||||
"total": 2,
|
||||
"successful": 2,
|
||||
"failed": 0
|
||||
},
|
||||
"twitter": {
|
||||
"total": 10,
|
||||
"successful": 10,
|
||||
"total": 2,
|
||||
"successful": 2,
|
||||
"failed": 0
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TESTRESPONSE[s/"successful": 10/"successful": 5/]
|
||||
// TESTRESPONSE[s/"successful": 2/"successful": 1/]
|
||||
|
||||
Here is what it looks like when one shard group failed due to pending operations:
|
||||
|
||||
|
|
|
@ -859,8 +859,16 @@ because it is likely that the number of elements in an array is unknown. For thi
|
|||
processor exists. By specifying the field holding array elements and a processor that
|
||||
defines what should happen to each element, array fields can easily be preprocessed.
|
||||
|
||||
A processor inside the foreach processor works in a different context, and the only valid top-level
|
||||
field is `_value`, which holds the array element value. Under this field other fields may exist.
|
||||
A processor inside the foreach processor works in the array element context and puts that in the ingest metadata
|
||||
under the `_ingest._value` key. If the array element is a json object it holds all immediate fields of that json object.
|
||||
and if the nested object is a value is `_ingest._value` just holds that value. Note that if a processor prior to the
|
||||
`foreach` processor used `_ingest._value` key then the specified value will not be available to the processor inside
|
||||
the `foreach` processor. The `foreach` processor does restore the original value, so that value is available to processors
|
||||
after the `foreach` processor.
|
||||
|
||||
Note that any other field from the document are accessible and modifiable like with all other processors. This processor
|
||||
just puts the current array element being read into `_ingest._value` ingest metadata attribute, so that it may be
|
||||
pre-processed.
|
||||
|
||||
If the `foreach` processor fails to process an element inside the array, and no `on_failure` processor has been specified,
|
||||
then it aborts the execution and leaves the array unmodified.
|
||||
|
@ -892,7 +900,7 @@ When this `foreach` processor operates on this sample document:
|
|||
"field" : "values",
|
||||
"processor" : {
|
||||
"uppercase" : {
|
||||
"field" : "_value"
|
||||
"field" : "_ingest._value"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -936,7 +944,7 @@ so the following `foreach` processor is used:
|
|||
"field" : "persons",
|
||||
"processor" : {
|
||||
"remove" : {
|
||||
"field" : "_value.id"
|
||||
"field" : "_ingest._value.id"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -959,9 +967,7 @@ After preprocessing the result is:
|
|||
}
|
||||
--------------------------------------------------
|
||||
|
||||
As for any processor, you can define `on_failure` processors
|
||||
in processors that are wrapped inside the `foreach` processor.
|
||||
|
||||
The wrapped processor can have a `on_failure` definition.
|
||||
For example, the `id` field may not exist on all person objects.
|
||||
Instead of failing the index request, you can use an `on_failure`
|
||||
block to send the document to the 'failure_index' index for later inspection:
|
||||
|
|
|
@ -309,4 +309,9 @@ transportClient.addTransportAddress(
|
|||
--------------------------------------------------
|
||||
|
||||
Also the helper methods in `QueryBuilders` class that create a `TemplateQueryBuilder` instance have been removed,
|
||||
instead the constructors on `TemplateQueryBuilder` should be used.
|
||||
instead the constructors on `TemplateQueryBuilder` should be used.
|
||||
|
||||
==== Template query
|
||||
|
||||
The `template` query has been deprecated in favour of the search template api. The `template` query is scheduled
|
||||
to be removed in the next major version.
|
|
@ -64,7 +64,7 @@ Nodes can be excluded from becoming a master by setting `node.master` to `false`
|
|||
|
||||
The `discovery.zen.minimum_master_nodes` sets the minimum
|
||||
number of master eligible nodes that need to join a newly elected master in order for an election to
|
||||
complete and for the elected node to accept it's mastership. The same setting controls the minimum number of
|
||||
complete and for the elected node to accept its mastership. The same setting controls the minimum number of
|
||||
active master eligible nodes that should be a part of any active cluster. If this requirement is not met the
|
||||
active master node will step down and a new master election will be begin.
|
||||
|
||||
|
|
|
@ -10,15 +10,25 @@ The suggest request part is either defined alongside the query part in a
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
curl -s -XPOST 'localhost:9200/_search' -d '{
|
||||
POST twitter/_search
|
||||
{
|
||||
"query" : {
|
||||
...
|
||||
"match": {
|
||||
"message": "tring out Elasticsearch"
|
||||
}
|
||||
},
|
||||
"suggest" : {
|
||||
...
|
||||
"my-suggestion" : {
|
||||
"text" : "trying out Elasticsearch",
|
||||
"term" : {
|
||||
"field" : "message"
|
||||
}
|
||||
}
|
||||
}
|
||||
}'
|
||||
}
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[setup:twitter]
|
||||
|
||||
Suggest requests executed against the `_suggest` endpoint should omit
|
||||
the surrounding `suggest` element which is only used if the suggest
|
||||
|
@ -26,15 +36,18 @@ request is part of a search.
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
curl -XPOST 'localhost:9200/_suggest' -d '{
|
||||
POST _suggest
|
||||
{
|
||||
"my-suggestion" : {
|
||||
"text" : "the amsterdma meetpu",
|
||||
"text" : "tring out Elasticsearch",
|
||||
"term" : {
|
||||
"field" : "body"
|
||||
"field" : "message"
|
||||
}
|
||||
}
|
||||
}'
|
||||
}
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[setup:twitter]
|
||||
|
||||
Several suggestions can be specified per request. Each suggestion is
|
||||
identified with an arbitrary name. In the example below two suggestions
|
||||
|
@ -43,21 +56,24 @@ the `term` suggester, but have a different `text`.
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
"suggest" : {
|
||||
POST _suggest
|
||||
{
|
||||
"my-suggest-1" : {
|
||||
"text" : "the amsterdma meetpu",
|
||||
"text" : "tring out Elasticsearch",
|
||||
"term" : {
|
||||
"field" : "body"
|
||||
"field" : "message"
|
||||
}
|
||||
},
|
||||
"my-suggest-2" : {
|
||||
"text" : "the rottredam meetpu",
|
||||
"text" : "kmichy",
|
||||
"term" : {
|
||||
"field" : "title"
|
||||
"field" : "user"
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[setup:twitter]
|
||||
|
||||
The below suggest response example includes the suggestion response for
|
||||
`my-suggest-1` and `my-suggest-2`. Each suggestion part contains
|
||||
|
@ -68,44 +84,35 @@ in the suggest text and if found an arbitrary number of options.
|
|||
[source,js]
|
||||
--------------------------------------------------
|
||||
{
|
||||
...
|
||||
"suggest": {
|
||||
"my-suggest-1": [
|
||||
{
|
||||
"text" : "amsterdma",
|
||||
"offset": 4,
|
||||
"length": 9,
|
||||
"options": [
|
||||
...
|
||||
]
|
||||
},
|
||||
...
|
||||
],
|
||||
"my-suggest-2" : [
|
||||
...
|
||||
]
|
||||
}
|
||||
...
|
||||
"_shards": ...
|
||||
"my-suggest-1": [ {
|
||||
"text": "tring",
|
||||
"offset": 0,
|
||||
"length": 5,
|
||||
"options": [ {"text": "trying", "score": 0.8, "freq": 1 } ]
|
||||
}, {
|
||||
"text": "out",
|
||||
"offset": 6,
|
||||
"length": 3,
|
||||
"options": []
|
||||
}, {
|
||||
"text": "elasticsearch",
|
||||
"offset": 10,
|
||||
"length": 13,
|
||||
"options": []
|
||||
} ],
|
||||
"my-suggest-2": ...
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TESTRESPONSE[s/"_shards": \.\.\./"_shards": "$body._shards",/]
|
||||
// TESTRESPONSE[s/"my-suggest-2": \.\.\./"my-suggest-2": "$body.my-suggest-2"/]
|
||||
|
||||
|
||||
Each options array contains an option object that includes the
|
||||
suggested text, its document frequency and score compared to the suggest
|
||||
entry text. The meaning of the score depends on the used suggester. The
|
||||
term suggester's score is based on the edit distance.
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
"options": [
|
||||
{
|
||||
"text": "amsterdam",
|
||||
"freq": 77,
|
||||
"score": 0.8888889
|
||||
},
|
||||
...
|
||||
]
|
||||
--------------------------------------------------
|
||||
|
||||
[float]
|
||||
[[global-suggest]]
|
||||
=== Global suggest text
|
||||
|
@ -116,157 +123,27 @@ and applies to the `my-suggest-1` and `my-suggest-2` suggestions.
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
"suggest" : {
|
||||
"text" : "the amsterdma meetpu",
|
||||
POST _suggest
|
||||
{
|
||||
"text" : "tring out Elasticsearch",
|
||||
"my-suggest-1" : {
|
||||
"term" : {
|
||||
"field" : "title"
|
||||
"field" : "message"
|
||||
}
|
||||
},
|
||||
"my-suggest-2" : {
|
||||
"term" : {
|
||||
"field" : "body"
|
||||
"field" : "user"
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
|
||||
The suggest text can in the above example also be specified as
|
||||
suggestion specific option. The suggest text specified on suggestion
|
||||
level override the suggest text on the global level.
|
||||
|
||||
[float]
|
||||
=== Other suggest example
|
||||
|
||||
In the below example we request suggestions for the following suggest
|
||||
text: `devloping distibutd saerch engies` on the `title` field with a
|
||||
maximum of 3 suggestions per term inside the suggest text. Note that in
|
||||
this example we set `size` to `0`. This isn't required, but a
|
||||
nice optimization. The suggestions are gathered in the `query` phase and
|
||||
in the case that we only care about suggestions (so no hits) we don't
|
||||
need to execute the `fetch` phase.
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
curl -s -XPOST 'localhost:9200/_search' -d '{
|
||||
"size": 0,
|
||||
"suggest" : {
|
||||
"my-title-suggestions-1" : {
|
||||
"text" : "devloping distibutd saerch engies",
|
||||
"term" : {
|
||||
"size" : 3,
|
||||
"field" : "title"
|
||||
}
|
||||
}
|
||||
}
|
||||
}'
|
||||
--------------------------------------------------
|
||||
|
||||
The above request could yield the response as stated in the code example
|
||||
below. As you can see if we take the first suggested options of each
|
||||
suggestion entry we get `developing distributed search engines` as
|
||||
result.
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
{
|
||||
...
|
||||
"suggest": {
|
||||
"my-title-suggestions-1": [
|
||||
{
|
||||
"text": "devloping",
|
||||
"offset": 0,
|
||||
"length": 9,
|
||||
"options": [
|
||||
{
|
||||
"text": "developing",
|
||||
"freq": 77,
|
||||
"score": 0.8888889
|
||||
},
|
||||
{
|
||||
"text": "deloping",
|
||||
"freq": 1,
|
||||
"score": 0.875
|
||||
},
|
||||
{
|
||||
"text": "deploying",
|
||||
"freq": 2,
|
||||
"score": 0.7777778
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"text": "distibutd",
|
||||
"offset": 10,
|
||||
"length": 9,
|
||||
"options": [
|
||||
{
|
||||
"text": "distributed",
|
||||
"freq": 217,
|
||||
"score": 0.7777778
|
||||
},
|
||||
{
|
||||
"text": "disributed",
|
||||
"freq": 1,
|
||||
"score": 0.7777778
|
||||
},
|
||||
{
|
||||
"text": "distribute",
|
||||
"freq": 1,
|
||||
"score": 0.7777778
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"text": "saerch",
|
||||
"offset": 20,
|
||||
"length": 6,
|
||||
"options": [
|
||||
{
|
||||
"text": "search",
|
||||
"freq": 1038,
|
||||
"score": 0.8333333
|
||||
},
|
||||
{
|
||||
"text": "smerch",
|
||||
"freq": 3,
|
||||
"score": 0.8333333
|
||||
},
|
||||
{
|
||||
"text": "serch",
|
||||
"freq": 2,
|
||||
"score": 0.8
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"text": "engies",
|
||||
"offset": 27,
|
||||
"length": 6,
|
||||
"options": [
|
||||
{
|
||||
"text": "engines",
|
||||
"freq": 568,
|
||||
"score": 0.8333333
|
||||
},
|
||||
{
|
||||
"text": "engles",
|
||||
"freq": 3,
|
||||
"score": 0.8333333
|
||||
},
|
||||
{
|
||||
"text": "eggies",
|
||||
"freq": 1,
|
||||
"score": 0.8333333
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
...
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
include::suggesters/term-suggest.asciidoc[]
|
||||
|
||||
include::suggesters/phrase-suggest.asciidoc[]
|
||||
|
@ -274,5 +151,3 @@ include::suggesters/phrase-suggest.asciidoc[]
|
|||
include::suggesters/completion-suggest.asciidoc[]
|
||||
|
||||
include::suggesters/context-suggest.asciidoc[]
|
||||
|
||||
|
||||
|
|
|
@ -17,36 +17,94 @@ co-occurrence and frequencies.
|
|||
|
||||
==== API Example
|
||||
|
||||
The `phrase` request is defined along side the query part in the json
|
||||
request:
|
||||
In general the `phrase` suggester requires special mapping up front to work.
|
||||
The `phrase` suggester examples on this page need the following mapping to
|
||||
work. The `reverse` analyzer is used only in the last example.
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
curl -XPOST 'localhost:9200/_search' -d '{
|
||||
"suggest" : {
|
||||
"text" : "Xor the Got-Jewel",
|
||||
"simple_phrase" : {
|
||||
"phrase" : {
|
||||
"analyzer" : "body",
|
||||
"field" : "bigram",
|
||||
"size" : 1,
|
||||
"real_word_error_likelihood" : 0.95,
|
||||
"max_errors" : 0.5,
|
||||
"gram_size" : 2,
|
||||
"direct_generator" : [ {
|
||||
"field" : "body",
|
||||
"suggest_mode" : "always",
|
||||
"min_word_length" : 1
|
||||
} ],
|
||||
"highlight": {
|
||||
"pre_tag": "<em>",
|
||||
"post_tag": "</em>"
|
||||
POST test
|
||||
{
|
||||
"settings": {
|
||||
"index": {
|
||||
"number_of_shards": 1,
|
||||
"analysis": {
|
||||
"analyzer": {
|
||||
"trigram": {
|
||||
"type": "custom",
|
||||
"tokenizer": "standard",
|
||||
"filter": ["standard", "shingle"]
|
||||
},
|
||||
"reverse": {
|
||||
"type": "custom",
|
||||
"tokenizer": "standard",
|
||||
"filter": ["standard", "reverse"]
|
||||
}
|
||||
},
|
||||
"filter": {
|
||||
"shingle": {
|
||||
"type": "shingle",
|
||||
"min_shingle_size": 2,
|
||||
"max_shingle_size": 3
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"mappings": {
|
||||
"test": {
|
||||
"properties": {
|
||||
"title": {
|
||||
"type": "text",
|
||||
"fields": {
|
||||
"trigram": {
|
||||
"type": "text",
|
||||
"analyzer": "trigram"
|
||||
},
|
||||
"reverse": {
|
||||
"type": "text",
|
||||
"analyzer": "reverse"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}'
|
||||
}
|
||||
POST test/test
|
||||
{"title": "noble warriors"}
|
||||
POST test/test
|
||||
{"title": "nobel prize"}
|
||||
POST _refresh
|
||||
--------------------------------------------------
|
||||
// TESTSETUP
|
||||
|
||||
Once you have the analyzers and mappings set up you can use the `phrase`
|
||||
suggester in the same spot you'd use the `term` suggester:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
POST _suggest?pretty -d'
|
||||
{
|
||||
"text": "noble prize",
|
||||
"simple_phrase": {
|
||||
"phrase": {
|
||||
"field": "title.trigram",
|
||||
"size": 1,
|
||||
"gram_size": 3,
|
||||
"direct_generator": [ {
|
||||
"field": "title.trigram",
|
||||
"suggest_mode": "always"
|
||||
} ],
|
||||
"highlight": {
|
||||
"pre_tag": "<em>",
|
||||
"post_tag": "</em>"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
|
||||
The response contains suggestions scored by the most likely spell
|
||||
correction first. In this case we received the expected correction
|
||||
|
@ -57,37 +115,23 @@ can contain misspellings (See parameter descriptions below).
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
{
|
||||
"took" : 5,
|
||||
"timed_out" : false,
|
||||
"_shards" : {
|
||||
"total" : 5,
|
||||
"successful" : 5,
|
||||
"failed" : 0
|
||||
},
|
||||
"hits" : {
|
||||
"total" : 2938,
|
||||
"max_score" : 0.0,
|
||||
"hits" : [ ]
|
||||
},
|
||||
"suggest" : {
|
||||
"simple_phrase" : [ {
|
||||
"text" : "Xor the Got-Jewel",
|
||||
{
|
||||
"_shards": ...
|
||||
"simple_phrase" : [
|
||||
{
|
||||
"text" : "noble prize",
|
||||
"offset" : 0,
|
||||
"length" : 17,
|
||||
"length" : 11,
|
||||
"options" : [ {
|
||||
"text" : "xorr the god jewel",
|
||||
"highlighted": "<em>xorr</em> the <em>god</em> jewel",
|
||||
"score" : 0.17877324
|
||||
}, {
|
||||
"text" : "xor the god jewel",
|
||||
"highlighted": "xor the <em>god</em> jewel",
|
||||
"score" : 0.14231323
|
||||
} ]
|
||||
} ]
|
||||
}
|
||||
"text" : "nobel prize",
|
||||
"highlighted": "<em>nobel</em> prize",
|
||||
"score" : 0.40765354
|
||||
}]
|
||||
}
|
||||
]
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TESTRESPONSE[s/"_shards": .../"_shards": "$body._shards",/]
|
||||
|
||||
==== Basic Phrase suggest API parameters
|
||||
|
||||
|
@ -178,34 +222,34 @@ can contain misspellings (See parameter descriptions below).
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
curl -XPOST 'localhost:9200/_search' -d {
|
||||
"suggest" : {
|
||||
"text" : "Xor the Got-Jewel",
|
||||
"simple_phrase" : {
|
||||
"phrase" : {
|
||||
"field" : "bigram",
|
||||
"size" : 1,
|
||||
"direct_generator" : [ {
|
||||
"field" : "body",
|
||||
"suggest_mode" : "always",
|
||||
"min_word_length" : 1
|
||||
} ],
|
||||
"collate": {
|
||||
"query": { <1>
|
||||
"inline" : {
|
||||
"match": {
|
||||
"{{field_name}}" : "{{suggestion}}" <2>
|
||||
}
|
||||
}
|
||||
},
|
||||
"params": {"field_name" : "title"}, <3>
|
||||
"prune": true <4>
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
POST _suggest
|
||||
{
|
||||
"text" : "noble prize",
|
||||
"simple_phrase" : {
|
||||
"phrase" : {
|
||||
"field" : "title.trigram",
|
||||
"size" : 1,
|
||||
"direct_generator" : [ {
|
||||
"field" : "title.trigram",
|
||||
"suggest_mode" : "always",
|
||||
"min_word_length" : 1
|
||||
} ],
|
||||
"collate": {
|
||||
"query": { <1>
|
||||
"inline" : {
|
||||
"match": {
|
||||
"{{field_name}}" : "{{suggestion}}" <2>
|
||||
}
|
||||
}
|
||||
},
|
||||
"params": {"field_name" : "title"}, <3>
|
||||
"prune": true <4>
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
<1> This query will be run once for every suggestion.
|
||||
<2> The `{{suggestion}}` variable will be replaced by the text
|
||||
of each suggestion.
|
||||
|
@ -342,33 +386,27 @@ accept ordinary analyzer names.
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
curl -s -XPOST 'localhost:9200/_search' -d {
|
||||
"suggest" : {
|
||||
"text" : "Xor the Got-Jewel",
|
||||
"simple_phrase" : {
|
||||
"phrase" : {
|
||||
"analyzer" : "body",
|
||||
"field" : "bigram",
|
||||
"size" : 4,
|
||||
"real_word_error_likelihood" : 0.95,
|
||||
"confidence" : 2.0,
|
||||
"gram_size" : 2,
|
||||
"direct_generator" : [ {
|
||||
"field" : "body",
|
||||
"suggest_mode" : "always",
|
||||
"min_word_length" : 1
|
||||
}, {
|
||||
"field" : "reverse",
|
||||
"suggest_mode" : "always",
|
||||
"min_word_length" : 1,
|
||||
"pre_filter" : "reverse",
|
||||
"post_filter" : "reverse"
|
||||
} ]
|
||||
}
|
||||
POST _suggest
|
||||
{
|
||||
"text" : "obel prize",
|
||||
"simple_phrase" : {
|
||||
"phrase" : {
|
||||
"field" : "title.trigram",
|
||||
"size" : 1,
|
||||
"direct_generator" : [ {
|
||||
"field" : "title.trigram",
|
||||
"suggest_mode" : "always"
|
||||
}, {
|
||||
"field" : "title.reverse",
|
||||
"suggest_mode" : "always",
|
||||
"pre_filter" : "reverse",
|
||||
"post_filter" : "reverse"
|
||||
} ]
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
|
||||
`pre_filter` and `post_filter` can also be used to inject synonyms after
|
||||
candidates are generated. For instance for the query `captain usq` we
|
||||
|
|
|
@ -21,19 +21,19 @@ And here is a sample response:
|
|||
"timed_out": false,
|
||||
"took": 62,
|
||||
"_shards":{
|
||||
"total" : 5,
|
||||
"successful" : 5,
|
||||
"total" : 1,
|
||||
"successful" : 1,
|
||||
"failed" : 0
|
||||
},
|
||||
"hits":{
|
||||
"total" : 1,
|
||||
"max_score": 0.2876821,
|
||||
"max_score": 1.3862944,
|
||||
"hits" : [
|
||||
{
|
||||
"_index" : "twitter",
|
||||
"_type" : "tweet",
|
||||
"_id" : "0",
|
||||
"_score": 0.2876821,
|
||||
"_score": 1.3862944,
|
||||
"_source" : {
|
||||
"user" : "kimchy",
|
||||
"date" : "2009-11-15T14:12:12",
|
||||
|
|
|
@ -22,21 +22,21 @@ package org.elasticsearch.smoketest;
|
|||
import com.carrotsearch.randomizedtesting.annotations.Name;
|
||||
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||
|
||||
import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase;
|
||||
import org.elasticsearch.test.rest.RestTestCandidate;
|
||||
import org.elasticsearch.test.rest.parser.RestTestParseException;
|
||||
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
|
||||
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
|
||||
import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
||||
public class SmokeTestDocsIT extends ESClientYamlSuiteTestCase {
|
||||
public class DocsClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase {
|
||||
|
||||
public SmokeTestDocsIT(@Name("yaml") RestTestCandidate testCandidate) {
|
||||
public DocsClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
|
||||
super(testCandidate);
|
||||
}
|
||||
|
||||
@ParametersFactory
|
||||
public static Iterable<Object[]> parameters() throws IOException, RestTestParseException {
|
||||
public static Iterable<Object[]> parameters() throws IOException, ClientYamlTestParseException {
|
||||
return ESClientYamlSuiteTestCase.createParameters(0, 1);
|
||||
}
|
||||
|
|
@ -20,22 +20,20 @@ package org.elasticsearch.search.aggregations.matrix;
|
|||
|
||||
import com.carrotsearch.randomizedtesting.annotations.Name;
|
||||
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||
import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase;
|
||||
import org.elasticsearch.test.rest.RestTestCandidate;
|
||||
import org.elasticsearch.test.rest.parser.RestTestParseException;
|
||||
|
||||
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
|
||||
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
|
||||
import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class MatrixAggregationRestIT extends ESClientYamlSuiteTestCase {
|
||||
public MatrixAggregationRestIT(@Name("yaml")RestTestCandidate testCandidate) {
|
||||
public class MatrixStatsClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase {
|
||||
public MatrixStatsClientYamlTestSuiteIT(@Name("yaml")ClientYamlTestCandidate testCandidate) {
|
||||
super(testCandidate);
|
||||
}
|
||||
|
||||
@ParametersFactory
|
||||
public static Iterable<Object[]> parameters() throws IOException, RestTestParseException {
|
||||
public static Iterable<Object[]> parameters() throws IOException, ClientYamlTestParseException {
|
||||
return ESClientYamlSuiteTestCase.createParameters(0, 1);
|
||||
}
|
||||
}
|
|
@ -62,11 +62,12 @@ public final class ForEachProcessor extends AbstractProcessor {
|
|||
List<Object> values = ingestDocument.getFieldValue(field, List.class);
|
||||
List<Object> newValues = new ArrayList<>(values.size());
|
||||
for (Object value : values) {
|
||||
Map<String, Object> innerSource = new HashMap<>(ingestDocument.getSourceAndMetadata());
|
||||
innerSource.put("_value", value); // scalar value to access the list item being evaluated
|
||||
IngestDocument innerIngestDocument = new IngestDocument(innerSource, ingestDocument.getIngestMetadata());
|
||||
processor.execute(innerIngestDocument);
|
||||
newValues.add(innerSource.get("_value"));
|
||||
Object previousValue = ingestDocument.getIngestMetadata().put("_value", value);
|
||||
try {
|
||||
processor.execute(ingestDocument);
|
||||
} finally {
|
||||
newValues.add(ingestDocument.getIngestMetadata().put("_value", previousValue));
|
||||
}
|
||||
}
|
||||
ingestDocument.setFieldValue(field, newValues);
|
||||
}
|
||||
|
|
|
@ -49,7 +49,7 @@ public class ForEachProcessorTests extends ESTestCase {
|
|||
);
|
||||
|
||||
ForEachProcessor processor = new ForEachProcessor(
|
||||
"_tag", "values", new UppercaseProcessor("_tag", "_value")
|
||||
"_tag", "values", new UppercaseProcessor("_tag", "_ingest._value")
|
||||
);
|
||||
processor.execute(ingestDocument);
|
||||
|
||||
|
@ -65,7 +65,7 @@ public class ForEachProcessorTests extends ESTestCase {
|
|||
);
|
||||
|
||||
TestProcessor testProcessor = new TestProcessor(id -> {
|
||||
if ("c".equals(id.getFieldValue("_value", String.class))) {
|
||||
if ("c".equals(id.getFieldValue("_ingest._value", String.class))) {
|
||||
throw new RuntimeException("failure");
|
||||
}
|
||||
});
|
||||
|
@ -80,11 +80,11 @@ public class ForEachProcessorTests extends ESTestCase {
|
|||
assertThat(ingestDocument.getFieldValue("values", List.class), equalTo(Arrays.asList("a", "b", "c")));
|
||||
|
||||
testProcessor = new TestProcessor(id -> {
|
||||
String value = id.getFieldValue("_value", String.class);
|
||||
String value = id.getFieldValue("_ingest._value", String.class);
|
||||
if ("c".equals(value)) {
|
||||
throw new RuntimeException("failure");
|
||||
} else {
|
||||
id.setFieldValue("_value", value.toUpperCase(Locale.ROOT));
|
||||
id.setFieldValue("_ingest._value", value.toUpperCase(Locale.ROOT));
|
||||
}
|
||||
});
|
||||
Processor onFailureProcessor = new TestProcessor(ingestDocument1 -> {});
|
||||
|
@ -105,9 +105,9 @@ public class ForEachProcessorTests extends ESTestCase {
|
|||
);
|
||||
|
||||
TestProcessor innerProcessor = new TestProcessor(id -> {
|
||||
id.setFieldValue("_value.index", id.getSourceAndMetadata().get("_index"));
|
||||
id.setFieldValue("_value.type", id.getSourceAndMetadata().get("_type"));
|
||||
id.setFieldValue("_value.id", id.getSourceAndMetadata().get("_id"));
|
||||
id.setFieldValue("_ingest._value.index", id.getSourceAndMetadata().get("_index"));
|
||||
id.setFieldValue("_ingest._value.type", id.getSourceAndMetadata().get("_type"));
|
||||
id.setFieldValue("_ingest._value.id", id.getSourceAndMetadata().get("_id"));
|
||||
});
|
||||
ForEachProcessor processor = new ForEachProcessor("_tag", "values", innerProcessor);
|
||||
processor.execute(ingestDocument);
|
||||
|
@ -136,7 +136,7 @@ public class ForEachProcessorTests extends ESTestCase {
|
|||
|
||||
TemplateService ts = TestTemplateService.instance();
|
||||
ForEachProcessor processor = new ForEachProcessor(
|
||||
"_tag", "values", new SetProcessor("_tag", ts.compile("_value.new_field"), (model) -> model.get("other"))
|
||||
"_tag", "values", new SetProcessor("_tag", ts.compile("_ingest._value.new_field"), (model) -> model.get("other"))
|
||||
);
|
||||
processor.execute(ingestDocument);
|
||||
|
||||
|
@ -151,8 +151,8 @@ public class ForEachProcessorTests extends ESTestCase {
|
|||
Processor innerProcessor = new Processor() {
|
||||
@Override
|
||||
public void execute(IngestDocument ingestDocument) throws Exception {
|
||||
String existingValue = ingestDocument.getFieldValue("_value", String.class);
|
||||
ingestDocument.setFieldValue("_value", existingValue + ".");
|
||||
String existingValue = ingestDocument.getFieldValue("_ingest._value", String.class);
|
||||
ingestDocument.setFieldValue("_ingest._value", existingValue + ".");
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -184,4 +184,91 @@ public class ForEachProcessorTests extends ESTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public void testModifyFieldsOutsideArray() throws Exception {
|
||||
List<Object> values = new ArrayList<>();
|
||||
values.add("string");
|
||||
values.add(1);
|
||||
values.add(null);
|
||||
IngestDocument ingestDocument = new IngestDocument(
|
||||
"_index", "_type", "_id", null, null, null, null, Collections.singletonMap("values", values)
|
||||
);
|
||||
|
||||
TemplateService ts = TestTemplateService.instance();
|
||||
|
||||
ForEachProcessor processor = new ForEachProcessor(
|
||||
"_tag", "values", new CompoundProcessor(false,
|
||||
Collections.singletonList(new UppercaseProcessor("_tag_upper", "_ingest._value")),
|
||||
Collections.singletonList(new AppendProcessor("_tag",
|
||||
ts.compile("errors"), (model) -> (Collections.singletonList("added"))))
|
||||
));
|
||||
processor.execute(ingestDocument);
|
||||
|
||||
List<String> result = ingestDocument.getFieldValue("values", List.class);
|
||||
assertThat(result.get(0), equalTo("STRING"));
|
||||
assertThat(result.get(1), equalTo(1));
|
||||
assertThat(result.get(2), equalTo(null));
|
||||
|
||||
List<String> errors = ingestDocument.getFieldValue("errors", List.class);
|
||||
assertThat(errors.size(), equalTo(2));
|
||||
}
|
||||
|
||||
public void testScalarValueAllowsUnderscoreValueFieldToRemainAccessible() throws Exception {
|
||||
List<Object> values = new ArrayList<>();
|
||||
values.add("please");
|
||||
values.add("change");
|
||||
values.add("me");
|
||||
Map<String, Object> source = new HashMap<>();
|
||||
source.put("_value", "new_value");
|
||||
source.put("values", values);
|
||||
IngestDocument ingestDocument = new IngestDocument(
|
||||
"_index", "_type", "_id", null, null, null, null, source
|
||||
);
|
||||
|
||||
TestProcessor processor = new TestProcessor(doc -> doc.setFieldValue("_ingest._value",
|
||||
doc.getFieldValue("_source._value", String.class)));
|
||||
ForEachProcessor forEachProcessor = new ForEachProcessor("_tag", "values", processor);
|
||||
forEachProcessor.execute(ingestDocument);
|
||||
|
||||
List<String> result = ingestDocument.getFieldValue("values", List.class);
|
||||
assertThat(result.get(0), equalTo("new_value"));
|
||||
assertThat(result.get(1), equalTo("new_value"));
|
||||
assertThat(result.get(2), equalTo("new_value"));
|
||||
}
|
||||
|
||||
public void testNestedForEach() throws Exception {
|
||||
List<Map<String, Object>> values = new ArrayList<>();
|
||||
List<Object> innerValues = new ArrayList<>();
|
||||
innerValues.add("abc");
|
||||
innerValues.add("def");
|
||||
Map<String, Object> value = new HashMap<>();
|
||||
value.put("values2", innerValues);
|
||||
values.add(value);
|
||||
|
||||
innerValues = new ArrayList<>();
|
||||
innerValues.add("ghi");
|
||||
innerValues.add("jkl");
|
||||
value = new HashMap<>();
|
||||
value.put("values2", innerValues);
|
||||
values.add(value);
|
||||
|
||||
IngestDocument ingestDocument = new IngestDocument(
|
||||
"_index", "_type", "_id", null, null, null, null, Collections.singletonMap("values1", values)
|
||||
);
|
||||
|
||||
TestProcessor testProcessor = new TestProcessor(
|
||||
doc -> doc.setFieldValue("_ingest._value", doc.getFieldValue("_ingest._value", String.class).toUpperCase(Locale.ENGLISH))
|
||||
);
|
||||
ForEachProcessor processor = new ForEachProcessor(
|
||||
"_tag", "values1", new ForEachProcessor("_tag", "_ingest._value.values2", testProcessor));
|
||||
processor.execute(ingestDocument);
|
||||
|
||||
List<String> result = ingestDocument.getFieldValue("values1.0.values2", List.class);
|
||||
assertThat(result.get(0), equalTo("ABC"));
|
||||
assertThat(result.get(1), equalTo("DEF"));
|
||||
|
||||
result = ingestDocument.getFieldValue("values1.1.values2", List.class);
|
||||
assertThat(result.get(0), equalTo("GHI"));
|
||||
assertThat(result.get(1), equalTo("JKL"));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -21,20 +21,21 @@ package org.elasticsearch.ingest.common;
|
|||
|
||||
import com.carrotsearch.randomizedtesting.annotations.Name;
|
||||
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||
import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase;
|
||||
import org.elasticsearch.test.rest.RestTestCandidate;
|
||||
import org.elasticsearch.test.rest.parser.RestTestParseException;
|
||||
|
||||
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
|
||||
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
|
||||
import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class IngestCommonRestIT extends ESClientYamlSuiteTestCase {
|
||||
public class IngestCommonClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase {
|
||||
|
||||
public IngestCommonRestIT(@Name("yaml") RestTestCandidate testCandidate) {
|
||||
public IngestCommonClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
|
||||
super(testCandidate);
|
||||
}
|
||||
|
||||
@ParametersFactory
|
||||
public static Iterable<Object[]> parameters() throws IOException, RestTestParseException {
|
||||
public static Iterable<Object[]> parameters() throws IOException, ClientYamlTestParseException {
|
||||
return ESClientYamlSuiteTestCase.createParameters(0, 1);
|
||||
}
|
||||
}
|
|
@ -1,3 +1,10 @@
|
|||
---
|
||||
teardown:
|
||||
- do:
|
||||
ingest.delete_pipeline:
|
||||
id: "1"
|
||||
ignore: 404
|
||||
|
||||
---
|
||||
"Test date index name processor with defaults":
|
||||
- do:
|
||||
|
|
|
@ -1,3 +1,10 @@
|
|||
---
|
||||
teardown:
|
||||
- do:
|
||||
ingest.delete_pipeline:
|
||||
id: "my_pipeline"
|
||||
ignore: 404
|
||||
|
||||
---
|
||||
"Test sort Processor":
|
||||
- do:
|
||||
|
|
|
@ -1,3 +1,10 @@
|
|||
---
|
||||
teardown:
|
||||
- do:
|
||||
ingest.delete_pipeline:
|
||||
id: "my_pipeline"
|
||||
ignore: 404
|
||||
|
||||
---
|
||||
"Test Grok Pipeline":
|
||||
- do:
|
||||
|
|
|
@ -1,3 +1,18 @@
|
|||
---
|
||||
teardown:
|
||||
- do:
|
||||
ingest.delete_pipeline:
|
||||
id: "my_pipeline"
|
||||
ignore: 404
|
||||
- do:
|
||||
ingest.delete_pipeline:
|
||||
id: "first_pipeline"
|
||||
ignore: 404
|
||||
- do:
|
||||
ingest.delete_pipeline:
|
||||
id: "second_pipeline"
|
||||
ignore: 404
|
||||
|
||||
---
|
||||
"Test basic pipeline crud":
|
||||
- do:
|
||||
|
@ -23,6 +38,11 @@
|
|||
- match: { pipelines.0.id: "my_pipeline" }
|
||||
- match: { pipelines.0.config.description: "_description" }
|
||||
|
||||
- do:
|
||||
ingest.get_pipeline: {}
|
||||
- match: { pipelines.0.id: "my_pipeline" }
|
||||
- match: { pipelines.0.config.description: "_description" }
|
||||
|
||||
- do:
|
||||
ingest.delete_pipeline:
|
||||
id: "my_pipeline"
|
||||
|
@ -33,6 +53,38 @@
|
|||
ingest.get_pipeline:
|
||||
id: "my_pipeline"
|
||||
|
||||
---
|
||||
"Test Get All Pipelines (unordered)":
|
||||
- do:
|
||||
ingest.put_pipeline:
|
||||
id: "first_pipeline"
|
||||
body: >
|
||||
{
|
||||
"description": "first",
|
||||
"processors": [
|
||||
{
|
||||
"set" : {
|
||||
"field" : "field1",
|
||||
"value": "_value"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
- do:
|
||||
ingest.put_pipeline:
|
||||
id: "second_pipeline"
|
||||
body: >
|
||||
{
|
||||
"description": "second",
|
||||
"processors": []
|
||||
}
|
||||
|
||||
# Order is not guaranteed by the response, so we check for length instead; above tests that we get appropriate values
|
||||
- do:
|
||||
ingest.get_pipeline: {}
|
||||
- length: { pipelines: 2 }
|
||||
|
||||
|
||||
---
|
||||
"Test invalid config":
|
||||
- do:
|
||||
|
|
|
@ -1,3 +1,10 @@
|
|||
---
|
||||
teardown:
|
||||
- do:
|
||||
ingest.delete_pipeline:
|
||||
id: "my_pipeline"
|
||||
ignore: 404
|
||||
|
||||
---
|
||||
"Test date processor":
|
||||
- do:
|
||||
|
|
|
@ -1,3 +1,10 @@
|
|||
---
|
||||
teardown:
|
||||
- do:
|
||||
ingest.delete_pipeline:
|
||||
id: "my_pipeline"
|
||||
ignore: 404
|
||||
|
||||
---
|
||||
"Test mutate processors":
|
||||
- do:
|
||||
|
|
|
@ -1,3 +1,10 @@
|
|||
---
|
||||
teardown:
|
||||
- do:
|
||||
ingest.delete_pipeline:
|
||||
id: "my_pipeline"
|
||||
ignore: 404
|
||||
|
||||
---
|
||||
"Test Pipeline With On Failure Block":
|
||||
- do:
|
||||
|
|
|
@ -1,3 +1,10 @@
|
|||
---
|
||||
teardown:
|
||||
- do:
|
||||
ingest.delete_pipeline:
|
||||
id: "my_pipeline"
|
||||
ignore: 404
|
||||
|
||||
---
|
||||
"Test Fail Processor":
|
||||
- do:
|
||||
|
|
|
@ -31,6 +31,17 @@ setup:
|
|||
]
|
||||
}
|
||||
|
||||
---
|
||||
teardown:
|
||||
- do:
|
||||
ingest.delete_pipeline:
|
||||
id: "pipeline1"
|
||||
ignore: 404
|
||||
- do:
|
||||
ingest.delete_pipeline:
|
||||
id: "pipeline2"
|
||||
ignore: 404
|
||||
|
||||
---
|
||||
"Test bulk request without default pipeline":
|
||||
|
||||
|
|
|
@ -1,3 +1,10 @@
|
|||
---
|
||||
teardown:
|
||||
- do:
|
||||
ingest.delete_pipeline:
|
||||
id: "my_pipeline"
|
||||
ignore: 404
|
||||
|
||||
---
|
||||
"Test foreach Processor":
|
||||
- do:
|
||||
|
@ -12,7 +19,7 @@
|
|||
"field" : "values",
|
||||
"processor" : {
|
||||
"uppercase" : {
|
||||
"field" : "_value"
|
||||
"field" : "_ingest._value"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,3 +1,10 @@
|
|||
---
|
||||
teardown:
|
||||
- do:
|
||||
ingest.delete_pipeline:
|
||||
id: "my_pipeline"
|
||||
ignore: 404
|
||||
|
||||
---
|
||||
"Test simulate with stored ingest pipeline":
|
||||
- do:
|
||||
|
|
|
@ -21,20 +21,21 @@ package org.elasticsearch.script.expression;
|
|||
|
||||
import com.carrotsearch.randomizedtesting.annotations.Name;
|
||||
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||
import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase;
|
||||
import org.elasticsearch.test.rest.RestTestCandidate;
|
||||
import org.elasticsearch.test.rest.parser.RestTestParseException;
|
||||
|
||||
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
|
||||
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
|
||||
import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class ExpressionRestIT extends ESClientYamlSuiteTestCase {
|
||||
public class LangExpressionClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase {
|
||||
|
||||
public ExpressionRestIT(@Name("yaml") RestTestCandidate testCandidate) {
|
||||
public LangExpressionClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
|
||||
super(testCandidate);
|
||||
}
|
||||
|
||||
@ParametersFactory
|
||||
public static Iterable<Object[]> parameters() throws IOException, RestTestParseException {
|
||||
public static Iterable<Object[]> parameters() throws IOException, ClientYamlTestParseException {
|
||||
return ESClientYamlSuiteTestCase.createParameters(0, 1);
|
||||
}
|
||||
}
|
|
@ -21,20 +21,21 @@ package org.elasticsearch.script.groovy;
|
|||
|
||||
import com.carrotsearch.randomizedtesting.annotations.Name;
|
||||
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||
import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase;
|
||||
import org.elasticsearch.test.rest.RestTestCandidate;
|
||||
import org.elasticsearch.test.rest.parser.RestTestParseException;
|
||||
|
||||
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
|
||||
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
|
||||
import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class GroovyRestIT extends ESClientYamlSuiteTestCase {
|
||||
public class LangGroovyClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase {
|
||||
|
||||
public GroovyRestIT(@Name("yaml") RestTestCandidate testCandidate) {
|
||||
public LangGroovyClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
|
||||
super(testCandidate);
|
||||
}
|
||||
|
||||
@ParametersFactory
|
||||
public static Iterable<Object[]> parameters() throws IOException, RestTestParseException {
|
||||
public static Iterable<Object[]> parameters() throws IOException, ClientYamlTestParseException {
|
||||
return ESClientYamlSuiteTestCase.createParameters(0, 1);
|
||||
}
|
||||
}
|
|
@ -23,6 +23,8 @@ import org.elasticsearch.common.ParsingException;
|
|||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
@ -47,22 +49,26 @@ import java.util.Optional;
|
|||
/**
|
||||
* Facilitates creating template query requests.
|
||||
* */
|
||||
@Deprecated
|
||||
// TODO remove this class in 6.0
|
||||
public class TemplateQueryBuilder extends AbstractQueryBuilder<TemplateQueryBuilder> {
|
||||
|
||||
public static final String NAME = "template";
|
||||
private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(TemplateQueryBuilder.class));
|
||||
|
||||
/** Template to fill. */
|
||||
private final Script template;
|
||||
|
||||
public TemplateQueryBuilder(String template, ScriptService.ScriptType scriptType, Map<String, Object> params) {
|
||||
this.template = new Script(template, scriptType, "mustache", params);
|
||||
this(new Script(template, scriptType, "mustache", params));
|
||||
}
|
||||
|
||||
public TemplateQueryBuilder(String template, ScriptService.ScriptType scriptType, Map<String, Object> params, XContentType ct) {
|
||||
this.template = new Script(template, scriptType, "mustache", params, ct);
|
||||
this(new Script(template, scriptType, "mustache", params, ct));
|
||||
}
|
||||
|
||||
// for tests, so that mock script can be used:
|
||||
TemplateQueryBuilder(Script template) {
|
||||
DEPRECATION_LOGGER.deprecated("[{}] query is deprecated, use search template api instead", NAME);
|
||||
if (template == null) {
|
||||
throw new IllegalArgumentException("query template cannot be null");
|
||||
}
|
||||
|
|
|
@ -21,20 +21,21 @@ package org.elasticsearch.script.mustache;
|
|||
|
||||
import com.carrotsearch.randomizedtesting.annotations.Name;
|
||||
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||
import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase;
|
||||
import org.elasticsearch.test.rest.RestTestCandidate;
|
||||
import org.elasticsearch.test.rest.parser.RestTestParseException;
|
||||
|
||||
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
|
||||
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
|
||||
import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class MustacheRestIT extends ESClientYamlSuiteTestCase {
|
||||
public class LangMustacheClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase {
|
||||
|
||||
public MustacheRestIT(@Name("yaml") RestTestCandidate testCandidate) {
|
||||
public LangMustacheClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
|
||||
super(testCandidate);
|
||||
}
|
||||
|
||||
@ParametersFactory
|
||||
public static Iterable<Object[]> parameters() throws IOException, RestTestParseException {
|
||||
public static Iterable<Object[]> parameters() throws IOException, ClientYamlTestParseException {
|
||||
return ESClientYamlSuiteTestCase.createParameters(0, 1);
|
||||
}
|
||||
}
|
|
@ -21,21 +21,22 @@ package org.elasticsearch.painless;
|
|||
|
||||
import com.carrotsearch.randomizedtesting.annotations.Name;
|
||||
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||
import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase;
|
||||
import org.elasticsearch.test.rest.RestTestCandidate;
|
||||
import org.elasticsearch.test.rest.parser.RestTestParseException;
|
||||
|
||||
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
|
||||
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
|
||||
import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/** Runs yaml rest tests */
|
||||
public class PainlessRestIT extends ESClientYamlSuiteTestCase {
|
||||
public class LangPainlessClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase {
|
||||
|
||||
public PainlessRestIT(@Name("yaml") RestTestCandidate testCandidate) {
|
||||
public LangPainlessClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
|
||||
super(testCandidate);
|
||||
}
|
||||
|
||||
@ParametersFactory
|
||||
public static Iterable<Object[]> parameters() throws IOException, RestTestParseException {
|
||||
public static Iterable<Object[]> parameters() throws IOException, ClientYamlTestParseException {
|
||||
return ESClientYamlSuiteTestCase.createParameters(0, 1);
|
||||
}
|
||||
}
|
|
@ -21,19 +21,20 @@ package org.elasticsearch.percolator;
|
|||
|
||||
import com.carrotsearch.randomizedtesting.annotations.Name;
|
||||
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||
import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase;
|
||||
import org.elasticsearch.test.rest.RestTestCandidate;
|
||||
import org.elasticsearch.test.rest.parser.RestTestParseException;
|
||||
|
||||
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
|
||||
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
|
||||
import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class PercolatorRestIT extends ESClientYamlSuiteTestCase {
|
||||
public PercolatorRestIT(@Name("yaml") RestTestCandidate testCandidate) {
|
||||
public class PercolatorClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase {
|
||||
public PercolatorClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
|
||||
super(testCandidate);
|
||||
}
|
||||
|
||||
@ParametersFactory
|
||||
public static Iterable<Object[]> parameters() throws IOException, RestTestParseException {
|
||||
public static Iterable<Object[]> parameters() throws IOException, ClientYamlTestParseException {
|
||||
return ESClientYamlSuiteTestCase.createParameters(0, 1);
|
||||
}
|
||||
}
|
|
@ -314,7 +314,7 @@ public class AsyncBulkByScrollActionTests extends ESTestCase {
|
|||
};
|
||||
ScrollableHitSource.Response response = new ScrollableHitSource.Response(false, emptyList(), 0, emptyList(), null);
|
||||
simulateScrollResponse(new DummyAbstractAsyncBulkByScrollAction(), timeValueNanos(System.nanoTime()), 10, response);
|
||||
ExecutionException e = expectThrows(ExecutionException.class, () -> listener.get());
|
||||
ExecutionException e = expectThrows(ExecutionException.class, () -> listener.get());
|
||||
assertThat(e.getMessage(), equalTo("EsRejectedExecutionException[test]"));
|
||||
assertThat(client.scrollsCleared, contains(scrollId));
|
||||
|
||||
|
@ -773,7 +773,7 @@ public class AsyncBulkByScrollActionTests extends ESTestCase {
|
|||
UpdateRequest update = (UpdateRequest) item;
|
||||
opType = "update";
|
||||
response = new UpdateResponse(shardId, update.type(), update.id(),
|
||||
randomIntBetween(0, Integer.MAX_VALUE), true);
|
||||
randomIntBetween(0, Integer.MAX_VALUE), DocWriteResponse.Operation.CREATE);
|
||||
} else if (item instanceof DeleteRequest) {
|
||||
DeleteRequest delete = (DeleteRequest) item;
|
||||
opType = "delete";
|
||||
|
|
|
@ -22,19 +22,19 @@ package org.elasticsearch.index.reindex;
|
|||
import com.carrotsearch.randomizedtesting.annotations.Name;
|
||||
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||
|
||||
import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase;
|
||||
import org.elasticsearch.test.rest.RestTestCandidate;
|
||||
import org.elasticsearch.test.rest.parser.RestTestParseException;
|
||||
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
|
||||
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
|
||||
import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class ReindexRestIT extends ESClientYamlSuiteTestCase {
|
||||
public ReindexRestIT(@Name("yaml") RestTestCandidate testCandidate) {
|
||||
public class ReindexClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase {
|
||||
public ReindexClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
|
||||
super(testCandidate);
|
||||
}
|
||||
|
||||
@ParametersFactory
|
||||
public static Iterable<Object[]> parameters() throws IOException, RestTestParseException {
|
||||
public static Iterable<Object[]> parameters() throws IOException, ClientYamlTestParseException {
|
||||
return ESClientYamlSuiteTestCase.createParameters(0, 1);
|
||||
}
|
||||
}
|
|
@ -22,20 +22,20 @@ package org.elasticsearch.http.netty3;
|
|||
import com.carrotsearch.randomizedtesting.annotations.Name;
|
||||
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||
|
||||
import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase;
|
||||
import org.elasticsearch.test.rest.RestTestCandidate;
|
||||
import org.elasticsearch.test.rest.parser.RestTestParseException;
|
||||
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
|
||||
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
|
||||
import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class Netty3RestIT extends ESClientYamlSuiteTestCase {
|
||||
public class Netty3ClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase {
|
||||
|
||||
public Netty3RestIT(@Name("yaml") RestTestCandidate testCandidate) {
|
||||
public Netty3ClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
|
||||
super(testCandidate);
|
||||
}
|
||||
|
||||
@ParametersFactory
|
||||
public static Iterable<Object[]> parameters() throws IOException, RestTestParseException {
|
||||
public static Iterable<Object[]> parameters() throws IOException, ClientYamlTestParseException {
|
||||
return ESClientYamlSuiteTestCase.createParameters(0, 1);
|
||||
}
|
||||
}
|
|
@ -21,25 +21,25 @@ package org.elasticsearch.http.netty4;
|
|||
|
||||
import com.carrotsearch.randomizedtesting.annotations.Name;
|
||||
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite;
|
||||
|
||||
import org.apache.lucene.util.TimeUnits;
|
||||
import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase;
|
||||
import org.elasticsearch.test.rest.RestTestCandidate;
|
||||
import org.elasticsearch.test.rest.parser.RestTestParseException;
|
||||
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
|
||||
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
|
||||
import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
//TODO: This is a *temporary* workaround to ensure a timeout does not mask other problems
|
||||
@TimeoutSuite(millis = 30 * TimeUnits.MINUTE)
|
||||
public class Netty4RestIT extends ESClientYamlSuiteTestCase {
|
||||
public class Netty4ClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase {
|
||||
|
||||
public Netty4RestIT(@Name("yaml") RestTestCandidate testCandidate) {
|
||||
public Netty4ClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
|
||||
super(testCandidate);
|
||||
}
|
||||
|
||||
@ParametersFactory
|
||||
public static Iterable<Object[]> parameters() throws IOException, RestTestParseException {
|
||||
public static Iterable<Object[]> parameters() throws IOException, ClientYamlTestParseException {
|
||||
return ESClientYamlSuiteTestCase.createParameters(0, 1);
|
||||
}
|
||||
|
|
@ -21,20 +21,21 @@ package org.elasticsearch.index.analysis;
|
|||
|
||||
import com.carrotsearch.randomizedtesting.annotations.Name;
|
||||
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||
import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase;
|
||||
import org.elasticsearch.test.rest.RestTestCandidate;
|
||||
import org.elasticsearch.test.rest.parser.RestTestParseException;
|
||||
|
||||
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
|
||||
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
|
||||
import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class AnalysisICURestIT extends ESClientYamlSuiteTestCase {
|
||||
public class IcuClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase {
|
||||
|
||||
public AnalysisICURestIT(@Name("yaml") RestTestCandidate testCandidate) {
|
||||
public IcuClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
|
||||
super(testCandidate);
|
||||
}
|
||||
|
||||
@ParametersFactory
|
||||
public static Iterable<Object[]> parameters() throws IOException, RestTestParseException {
|
||||
public static Iterable<Object[]> parameters() throws IOException, ClientYamlTestParseException {
|
||||
return ESClientYamlSuiteTestCase.createParameters(0, 1);
|
||||
}
|
||||
}
|
|
@ -21,20 +21,21 @@ package org.elasticsearch.index.analysis;
|
|||
|
||||
import com.carrotsearch.randomizedtesting.annotations.Name;
|
||||
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||
import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase;
|
||||
import org.elasticsearch.test.rest.RestTestCandidate;
|
||||
import org.elasticsearch.test.rest.parser.RestTestParseException;
|
||||
|
||||
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
|
||||
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
|
||||
import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class AnalysisSmartChineseRestIT extends ESClientYamlSuiteTestCase {
|
||||
public class KuromojiClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase {
|
||||
|
||||
public AnalysisSmartChineseRestIT(@Name("yaml") RestTestCandidate testCandidate) {
|
||||
public KuromojiClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
|
||||
super(testCandidate);
|
||||
}
|
||||
|
||||
@ParametersFactory
|
||||
public static Iterable<Object[]> parameters() throws IOException, RestTestParseException {
|
||||
public static Iterable<Object[]> parameters() throws IOException, ClientYamlTestParseException {
|
||||
return ESClientYamlSuiteTestCase.createParameters(0, 1);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,42 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.analysis;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.annotations.Name;
|
||||
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||
|
||||
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
|
||||
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
|
||||
import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class PhoneticClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase {
|
||||
|
||||
public PhoneticClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
|
||||
super(testCandidate);
|
||||
}
|
||||
|
||||
@ParametersFactory
|
||||
public static Iterable<Object[]> parameters() throws IOException, ClientYamlTestParseException {
|
||||
return ESClientYamlSuiteTestCase.createParameters(0, 1);
|
||||
}
|
||||
}
|
||||
|
|
@ -21,20 +21,21 @@ package org.elasticsearch.index.analysis;
|
|||
|
||||
import com.carrotsearch.randomizedtesting.annotations.Name;
|
||||
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||
import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase;
|
||||
import org.elasticsearch.test.rest.RestTestCandidate;
|
||||
import org.elasticsearch.test.rest.parser.RestTestParseException;
|
||||
|
||||
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
|
||||
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
|
||||
import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class AnalysisKuromojiRestIT extends ESClientYamlSuiteTestCase {
|
||||
public class SmartCNClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase {
|
||||
|
||||
public AnalysisKuromojiRestIT(@Name("yaml") RestTestCandidate testCandidate) {
|
||||
public SmartCNClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
|
||||
super(testCandidate);
|
||||
}
|
||||
|
||||
@ParametersFactory
|
||||
public static Iterable<Object[]> parameters() throws IOException, RestTestParseException {
|
||||
public static Iterable<Object[]> parameters() throws IOException, ClientYamlTestParseException {
|
||||
return ESClientYamlSuiteTestCase.createParameters(0, 1);
|
||||
}
|
||||
}
|
|
@ -1,41 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.analysis;
|
||||
|
||||
import com.carrotsearch.randomizedtesting.annotations.Name;
|
||||
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||
import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase;
|
||||
import org.elasticsearch.test.rest.RestTestCandidate;
|
||||
import org.elasticsearch.test.rest.parser.RestTestParseException;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class AnalysisPolishRestIT extends ESClientYamlSuiteTestCase {
|
||||
|
||||
public AnalysisPolishRestIT(@Name("yaml") RestTestCandidate testCandidate) {
|
||||
super(testCandidate);
|
||||
}
|
||||
|
||||
@ParametersFactory
|
||||
public static Iterable<Object[]> parameters() throws IOException, RestTestParseException {
|
||||
return ESClientYamlSuiteTestCase.createParameters(0, 1);
|
||||
}
|
||||
}
|
||||
|
|
@ -21,20 +21,21 @@ package org.elasticsearch.index.analysis;
|
|||
|
||||
import com.carrotsearch.randomizedtesting.annotations.Name;
|
||||
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||
import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase;
|
||||
import org.elasticsearch.test.rest.RestTestCandidate;
|
||||
import org.elasticsearch.test.rest.parser.RestTestParseException;
|
||||
|
||||
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
|
||||
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
|
||||
import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class AnalysisPhoneticRestIT extends ESClientYamlSuiteTestCase {
|
||||
public class StempelClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase {
|
||||
|
||||
public AnalysisPhoneticRestIT(@Name("yaml") RestTestCandidate testCandidate) {
|
||||
public StempelClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
|
||||
super(testCandidate);
|
||||
}
|
||||
|
||||
@ParametersFactory
|
||||
public static Iterable<Object[]> parameters() throws IOException, RestTestParseException {
|
||||
public static Iterable<Object[]> parameters() throws IOException, ClientYamlTestParseException {
|
||||
return ESClientYamlSuiteTestCase.createParameters(0, 1);
|
||||
}
|
||||
}
|
|
@ -21,20 +21,21 @@ package org.elasticsearch.discovery.azure.classic;
|
|||
|
||||
import com.carrotsearch.randomizedtesting.annotations.Name;
|
||||
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||
import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase;
|
||||
import org.elasticsearch.test.rest.RestTestCandidate;
|
||||
import org.elasticsearch.test.rest.parser.RestTestParseException;
|
||||
|
||||
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
|
||||
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
|
||||
import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class AzureDiscoveryRestIT extends ESClientYamlSuiteTestCase {
|
||||
public class DiscoveryAzureClassicClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase {
|
||||
|
||||
public AzureDiscoveryRestIT(@Name("yaml") RestTestCandidate testCandidate) {
|
||||
public DiscoveryAzureClassicClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
|
||||
super(testCandidate);
|
||||
}
|
||||
|
||||
@ParametersFactory
|
||||
public static Iterable<Object[]> parameters() throws IOException, RestTestParseException {
|
||||
public static Iterable<Object[]> parameters() throws IOException, ClientYamlTestParseException {
|
||||
return ESClientYamlSuiteTestCase.createParameters(0, 1);
|
||||
}
|
||||
}
|
|
@ -21,20 +21,21 @@ package org.elasticsearch.cloud.aws;
|
|||
|
||||
import com.carrotsearch.randomizedtesting.annotations.Name;
|
||||
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||
import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase;
|
||||
import org.elasticsearch.test.rest.RestTestCandidate;
|
||||
import org.elasticsearch.test.rest.parser.RestTestParseException;
|
||||
|
||||
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
|
||||
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
|
||||
import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class DiscoveryEc2RestIT extends ESClientYamlSuiteTestCase {
|
||||
public class CloudAwsClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase {
|
||||
|
||||
public DiscoveryEc2RestIT(@Name("yaml") RestTestCandidate testCandidate) {
|
||||
public CloudAwsClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
|
||||
super(testCandidate);
|
||||
}
|
||||
|
||||
@ParametersFactory
|
||||
public static Iterable<Object[]> parameters() throws IOException, RestTestParseException {
|
||||
public static Iterable<Object[]> parameters() throws IOException, ClientYamlTestParseException {
|
||||
return ESClientYamlSuiteTestCase.createParameters(0, 1);
|
||||
}
|
||||
}
|
|
@ -21,20 +21,21 @@ package org.elasticsearch.discovery.gce;
|
|||
|
||||
import com.carrotsearch.randomizedtesting.annotations.Name;
|
||||
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||
import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase;
|
||||
import org.elasticsearch.test.rest.RestTestCandidate;
|
||||
import org.elasticsearch.test.rest.parser.RestTestParseException;
|
||||
|
||||
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
|
||||
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
|
||||
import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class DiscoveryGCERestIT extends ESClientYamlSuiteTestCase {
|
||||
public class DiscoveryGceClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase {
|
||||
|
||||
public DiscoveryGCERestIT(@Name("yaml") RestTestCandidate testCandidate) {
|
||||
public DiscoveryGceClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
|
||||
super(testCandidate);
|
||||
}
|
||||
|
||||
@ParametersFactory
|
||||
public static Iterable<Object[]> parameters() throws IOException, RestTestParseException {
|
||||
public static Iterable<Object[]> parameters() throws IOException, ClientYamlTestParseException {
|
||||
return ESClientYamlSuiteTestCase.createParameters(0, 1);
|
||||
}
|
||||
}
|
|
@ -21,20 +21,21 @@ package org.elasticsearch.ingest.attachment;
|
|||
|
||||
import com.carrotsearch.randomizedtesting.annotations.Name;
|
||||
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||
import org.elasticsearch.test.rest.ESClientYamlSuiteTestCase;
|
||||
import org.elasticsearch.test.rest.RestTestCandidate;
|
||||
import org.elasticsearch.test.rest.parser.RestTestParseException;
|
||||
|
||||
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
|
||||
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
|
||||
import org.elasticsearch.test.rest.yaml.parser.ClientYamlTestParseException;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class IngestAttachmentRestIT extends ESClientYamlSuiteTestCase {
|
||||
public class IngestAttachmentClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase {
|
||||
|
||||
public IngestAttachmentRestIT(@Name("yaml") RestTestCandidate testCandidate) {
|
||||
public IngestAttachmentClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
|
||||
super(testCandidate);
|
||||
}
|
||||
|
||||
@ParametersFactory
|
||||
public static Iterable<Object[]> parameters() throws IOException, RestTestParseException {
|
||||
public static Iterable<Object[]> parameters() throws IOException, ClientYamlTestParseException {
|
||||
return ESClientYamlSuiteTestCase.createParameters(0, 1);
|
||||
}
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue