mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-02-18 19:05:06 +00:00
Merge remote-tracking branch 'origin/master' into index-lifecycle
This commit is contained in:
commit
6ea396a476
@ -402,6 +402,7 @@ These are the linux flavors supported, all of which we provide images for
|
|||||||
|
|
||||||
* ubuntu-1404 aka trusty
|
* ubuntu-1404 aka trusty
|
||||||
* ubuntu-1604 aka xenial
|
* ubuntu-1604 aka xenial
|
||||||
|
* ubuntu-1804 aka bionic beaver
|
||||||
* debian-8 aka jessie
|
* debian-8 aka jessie
|
||||||
* debian-9 aka stretch, the current debian stable distribution
|
* debian-9 aka stretch, the current debian stable distribution
|
||||||
* centos-6
|
* centos-6
|
||||||
|
9
Vagrantfile
vendored
9
Vagrantfile
vendored
@ -61,6 +61,15 @@ Vagrant.configure(2) do |config|
|
|||||||
SHELL
|
SHELL
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
'ubuntu-1804'.tap do |box|
|
||||||
|
config.vm.define box, define_opts do |config|
|
||||||
|
config.vm.box = 'elastic/ubuntu-18.04-x86_64'
|
||||||
|
deb_common config, box, extra: <<-SHELL
|
||||||
|
# Install Jayatana so we can work around it being present.
|
||||||
|
[ -f /usr/share/java/jayatanaag.jar ] || install jayatana
|
||||||
|
SHELL
|
||||||
|
end
|
||||||
|
end
|
||||||
# Wheezy's backports don't contain Openjdk 8 and the backflips
|
# Wheezy's backports don't contain Openjdk 8 and the backflips
|
||||||
# required to get the sun jdk on there just aren't worth it. We have
|
# required to get the sun jdk on there just aren't worth it. We have
|
||||||
# jessie and stretch for testing debian and it works fine.
|
# jessie and stretch for testing debian and it works fine.
|
||||||
|
@ -31,7 +31,8 @@ class VagrantTestPlugin implements Plugin<Project> {
|
|||||||
'opensuse-42',
|
'opensuse-42',
|
||||||
'sles-12',
|
'sles-12',
|
||||||
'ubuntu-1404',
|
'ubuntu-1404',
|
||||||
'ubuntu-1604'
|
'ubuntu-1604',
|
||||||
|
'ubuntu-1804'
|
||||||
])
|
])
|
||||||
|
|
||||||
/** All Windows boxes that we test, which may or may not be supplied **/
|
/** All Windows boxes that we test, which may or may not be supplied **/
|
||||||
|
@ -55,22 +55,8 @@ import static org.hamcrest.Matchers.greaterThan;
|
|||||||
import static org.hamcrest.Matchers.notNullValue;
|
import static org.hamcrest.Matchers.notNullValue;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This class is used to generate the Java Cluster API documentation.
|
* Documentation for Cluster APIs in the high level java client.
|
||||||
* You need to wrap your code between two tags like:
|
* Code wrapped in {@code tag} and {@code end} tags is included in the docs.
|
||||||
* // tag::example
|
|
||||||
* // end::example
|
|
||||||
*
|
|
||||||
* Where example is your tag name.
|
|
||||||
*
|
|
||||||
* Then in the documentation, you can extract what is between tag and end tags with
|
|
||||||
* ["source","java",subs="attributes,callouts,macros"]
|
|
||||||
* --------------------------------------------------
|
|
||||||
* include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[example]
|
|
||||||
* --------------------------------------------------
|
|
||||||
*
|
|
||||||
* The column width of the code block is 84. If the code contains a line longer
|
|
||||||
* than 84, the line will be cut and a horizontal scroll bar will be displayed.
|
|
||||||
* (the code indentation of the tag is not included in the width)
|
|
||||||
*/
|
*/
|
||||||
public class ClusterClientDocumentationIT extends ESRestHighLevelClientTestCase {
|
public class ClusterClientDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||||
|
|
||||||
|
@ -26,7 +26,11 @@ import org.apache.http.HttpResponse;
|
|||||||
import org.apache.http.RequestLine;
|
import org.apache.http.RequestLine;
|
||||||
import org.apache.http.StatusLine;
|
import org.apache.http.StatusLine;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
|
import java.util.regex.Matcher;
|
||||||
|
import java.util.regex.Pattern;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Holds an elasticsearch response. It wraps the {@link HttpResponse} returned and associates it with
|
* Holds an elasticsearch response. It wraps the {@link HttpResponse} returned and associates it with
|
||||||
@ -96,6 +100,46 @@ public class Response {
|
|||||||
return response.getEntity();
|
return response.getEntity();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static final Pattern WARNING_HEADER_PATTERN = Pattern.compile(
|
||||||
|
"299 " + // warn code
|
||||||
|
"Elasticsearch-\\d+\\.\\d+\\.\\d+(?:-(?:alpha|beta|rc)\\d+)?(?:-SNAPSHOT)?-(?:[a-f0-9]{7}|Unknown) " + // warn agent
|
||||||
|
"\"((?:\t| |!|[\\x23-\\x5B]|[\\x5D-\\x7E]|[\\x80-\\xFF]|\\\\|\\\\\")*)\" " + // quoted warning value, captured
|
||||||
|
// quoted RFC 1123 date format
|
||||||
|
"\"" + // opening quote
|
||||||
|
"(?:Mon|Tue|Wed|Thu|Fri|Sat|Sun), " + // weekday
|
||||||
|
"\\d{2} " + // 2-digit day
|
||||||
|
"(?:Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec) " + // month
|
||||||
|
"\\d{4} " + // 4-digit year
|
||||||
|
"\\d{2}:\\d{2}:\\d{2} " + // (two-digit hour):(two-digit minute):(two-digit second)
|
||||||
|
"GMT" + // GMT
|
||||||
|
"\""); // closing quote
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a list of all warning headers returned in the response.
|
||||||
|
*/
|
||||||
|
public List<String> getWarnings() {
|
||||||
|
List<String> warnings = new ArrayList<>();
|
||||||
|
for (Header header : response.getHeaders("Warning")) {
|
||||||
|
String warning = header.getValue();
|
||||||
|
final Matcher matcher = WARNING_HEADER_PATTERN.matcher(warning);
|
||||||
|
if (matcher.matches()) {
|
||||||
|
warnings.add(matcher.group(1));
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
warnings.add(warning);
|
||||||
|
}
|
||||||
|
return warnings;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns true if there is at least one warning header returned in the
|
||||||
|
* response.
|
||||||
|
*/
|
||||||
|
public boolean hasWarnings() {
|
||||||
|
Header[] warnings = response.getHeaders("Warning");
|
||||||
|
return warnings != null && warnings.length > 0;
|
||||||
|
}
|
||||||
|
|
||||||
HttpResponse getHttpResponse() {
|
HttpResponse getHttpResponse() {
|
||||||
return response;
|
return response;
|
||||||
}
|
}
|
||||||
|
@ -58,6 +58,10 @@ public final class ResponseException extends IOException {
|
|||||||
response.getStatusLine().toString()
|
response.getStatusLine().toString()
|
||||||
);
|
);
|
||||||
|
|
||||||
|
if (response.hasWarnings()) {
|
||||||
|
message += "\n" + "Warnings: " + response.getWarnings();
|
||||||
|
}
|
||||||
|
|
||||||
HttpEntity entity = response.getEntity();
|
HttpEntity entity = response.getEntity();
|
||||||
if (entity != null) {
|
if (entity != null) {
|
||||||
if (entity.isRepeatable() == false) {
|
if (entity.isRepeatable() == false) {
|
||||||
|
@ -110,15 +110,17 @@ public class RestClient implements Closeable {
|
|||||||
private final FailureListener failureListener;
|
private final FailureListener failureListener;
|
||||||
private final NodeSelector nodeSelector;
|
private final NodeSelector nodeSelector;
|
||||||
private volatile NodeTuple<List<Node>> nodeTuple;
|
private volatile NodeTuple<List<Node>> nodeTuple;
|
||||||
|
private final boolean strictDeprecationMode;
|
||||||
|
|
||||||
RestClient(CloseableHttpAsyncClient client, long maxRetryTimeoutMillis, Header[] defaultHeaders,
|
RestClient(CloseableHttpAsyncClient client, long maxRetryTimeoutMillis, Header[] defaultHeaders, List<Node> nodes, String pathPrefix,
|
||||||
List<Node> nodes, String pathPrefix, FailureListener failureListener, NodeSelector nodeSelector) {
|
FailureListener failureListener, NodeSelector nodeSelector, boolean strictDeprecationMode) {
|
||||||
this.client = client;
|
this.client = client;
|
||||||
this.maxRetryTimeoutMillis = maxRetryTimeoutMillis;
|
this.maxRetryTimeoutMillis = maxRetryTimeoutMillis;
|
||||||
this.defaultHeaders = Collections.unmodifiableList(Arrays.asList(defaultHeaders));
|
this.defaultHeaders = Collections.unmodifiableList(Arrays.asList(defaultHeaders));
|
||||||
this.failureListener = failureListener;
|
this.failureListener = failureListener;
|
||||||
this.pathPrefix = pathPrefix;
|
this.pathPrefix = pathPrefix;
|
||||||
this.nodeSelector = nodeSelector;
|
this.nodeSelector = nodeSelector;
|
||||||
|
this.strictDeprecationMode = strictDeprecationMode;
|
||||||
setNodes(nodes);
|
setNodes(nodes);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -296,7 +298,11 @@ public class RestClient implements Closeable {
|
|||||||
Response response = new Response(request.getRequestLine(), node.getHost(), httpResponse);
|
Response response = new Response(request.getRequestLine(), node.getHost(), httpResponse);
|
||||||
if (isSuccessfulResponse(statusCode) || ignoreErrorCodes.contains(response.getStatusLine().getStatusCode())) {
|
if (isSuccessfulResponse(statusCode) || ignoreErrorCodes.contains(response.getStatusLine().getStatusCode())) {
|
||||||
onResponse(node);
|
onResponse(node);
|
||||||
listener.onSuccess(response);
|
if (strictDeprecationMode && response.hasWarnings()) {
|
||||||
|
listener.onDefinitiveFailure(new ResponseException(response));
|
||||||
|
} else {
|
||||||
|
listener.onSuccess(response);
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
ResponseException responseException = new ResponseException(response);
|
ResponseException responseException = new ResponseException(response);
|
||||||
if (isRetryStatus(statusCode)) {
|
if (isRetryStatus(statusCode)) {
|
||||||
|
@ -56,6 +56,7 @@ public final class RestClientBuilder {
|
|||||||
private RequestConfigCallback requestConfigCallback;
|
private RequestConfigCallback requestConfigCallback;
|
||||||
private String pathPrefix;
|
private String pathPrefix;
|
||||||
private NodeSelector nodeSelector = NodeSelector.ANY;
|
private NodeSelector nodeSelector = NodeSelector.ANY;
|
||||||
|
private boolean strictDeprecationMode = false;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates a new builder instance and sets the hosts that the client will send requests to.
|
* Creates a new builder instance and sets the hosts that the client will send requests to.
|
||||||
@ -185,6 +186,15 @@ public final class RestClientBuilder {
|
|||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Whether the REST client should return any response containing at least
|
||||||
|
* one warning header as a failure.
|
||||||
|
*/
|
||||||
|
public RestClientBuilder setStrictDeprecationMode(boolean strictDeprecationMode) {
|
||||||
|
this.strictDeprecationMode = strictDeprecationMode;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates a new {@link RestClient} based on the provided configuration.
|
* Creates a new {@link RestClient} based on the provided configuration.
|
||||||
*/
|
*/
|
||||||
@ -199,7 +209,7 @@ public final class RestClientBuilder {
|
|||||||
}
|
}
|
||||||
});
|
});
|
||||||
RestClient restClient = new RestClient(httpClient, maxRetryTimeout, defaultHeaders, nodes,
|
RestClient restClient = new RestClient(httpClient, maxRetryTimeout, defaultHeaders, nodes,
|
||||||
pathPrefix, failureListener, nodeSelector);
|
pathPrefix, failureListener, nodeSelector, strictDeprecationMode);
|
||||||
httpClient.start();
|
httpClient.start();
|
||||||
return restClient;
|
return restClient;
|
||||||
}
|
}
|
||||||
|
@ -115,7 +115,7 @@ public class RestClientMultipleHostsTests extends RestClientTestCase {
|
|||||||
}
|
}
|
||||||
nodes = Collections.unmodifiableList(nodes);
|
nodes = Collections.unmodifiableList(nodes);
|
||||||
failureListener = new HostsTrackingFailureListener();
|
failureListener = new HostsTrackingFailureListener();
|
||||||
return new RestClient(httpClient, 10000, new Header[0], nodes, null, failureListener, nodeSelector);
|
return new RestClient(httpClient, 10000, new Header[0], nodes, null, failureListener, nodeSelector, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -148,7 +148,7 @@ public class RestClientSingleHostTests extends RestClientTestCase {
|
|||||||
node = new Node(new HttpHost("localhost", 9200));
|
node = new Node(new HttpHost("localhost", 9200));
|
||||||
failureListener = new HostsTrackingFailureListener();
|
failureListener = new HostsTrackingFailureListener();
|
||||||
restClient = new RestClient(httpClient, 10000, defaultHeaders,
|
restClient = new RestClient(httpClient, 10000, defaultHeaders,
|
||||||
singletonList(node), null, failureListener, NodeSelector.ANY);
|
singletonList(node), null, failureListener, NodeSelector.ANY, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -57,7 +57,7 @@ public class RestClientTests extends RestClientTestCase {
|
|||||||
public void testCloseIsIdempotent() throws IOException {
|
public void testCloseIsIdempotent() throws IOException {
|
||||||
List<Node> nodes = singletonList(new Node(new HttpHost("localhost", 9200)));
|
List<Node> nodes = singletonList(new Node(new HttpHost("localhost", 9200)));
|
||||||
CloseableHttpAsyncClient closeableHttpAsyncClient = mock(CloseableHttpAsyncClient.class);
|
CloseableHttpAsyncClient closeableHttpAsyncClient = mock(CloseableHttpAsyncClient.class);
|
||||||
RestClient restClient = new RestClient(closeableHttpAsyncClient, 1_000, new Header[0], nodes, null, null, null);
|
RestClient restClient = new RestClient(closeableHttpAsyncClient, 1_000, new Header[0], nodes, null, null, null, false);
|
||||||
restClient.close();
|
restClient.close();
|
||||||
verify(closeableHttpAsyncClient, times(1)).close();
|
verify(closeableHttpAsyncClient, times(1)).close();
|
||||||
restClient.close();
|
restClient.close();
|
||||||
@ -345,7 +345,7 @@ public class RestClientTests extends RestClientTestCase {
|
|||||||
private static RestClient createRestClient() {
|
private static RestClient createRestClient() {
|
||||||
List<Node> nodes = Collections.singletonList(new Node(new HttpHost("localhost", 9200)));
|
List<Node> nodes = Collections.singletonList(new Node(new HttpHost("localhost", 9200)));
|
||||||
return new RestClient(mock(CloseableHttpAsyncClient.class), randomLongBetween(1_000, 30_000),
|
return new RestClient(mock(CloseableHttpAsyncClient.class), randomLongBetween(1_000, 30_000),
|
||||||
new Header[] {}, nodes, null, null, null);
|
new Header[] {}, nodes, null, null, null, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testRoundRobin() throws IOException {
|
public void testRoundRobin() throws IOException {
|
||||||
|
@ -1,16 +1,22 @@
|
|||||||
[[java-rest-high-cluster-get-settings]]
|
--
|
||||||
|
:api: get-settings
|
||||||
|
:request: ClusterGetSettingsRequest
|
||||||
|
:response: ClusterGetSettingsResponse
|
||||||
|
--
|
||||||
|
|
||||||
|
[id="{upid}-{api}"]
|
||||||
=== Cluster Get Settings API
|
=== Cluster Get Settings API
|
||||||
|
|
||||||
The Cluster Get Settings API allows to get the cluster wide settings.
|
The Cluster Get Settings API allows to get the cluster wide settings.
|
||||||
|
|
||||||
[[java-rest-high-cluster-get-settings-request]]
|
[id="{upid}-{api}-request"]
|
||||||
==== Cluster Get Settings Request
|
==== Cluster Get Settings Request
|
||||||
|
|
||||||
A `ClusterGetSettingsRequest`:
|
A +{request}+:
|
||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[get-settings-request]
|
include-tagged::{doc-tests-file}[{api}-request]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
|
|
||||||
==== Optional arguments
|
==== Optional arguments
|
||||||
@ -18,75 +24,40 @@ The following arguments can optionally be provided:
|
|||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[get-settings-request-includeDefaults]
|
include-tagged::{doc-tests-file}[{api}-request-includeDefaults]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
<1> By default only those settings that were explicitly set are returned. Setting this to true also returns
|
<1> By default only those settings that were explicitly set are returned. Setting this to true also returns
|
||||||
the default settings.
|
the default settings.
|
||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[get-settings-request-local]
|
include-tagged::{doc-tests-file}[{api}-request-local]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
<1> By default the request goes to the master of the cluster to get the latest results. If local is specified it gets
|
<1> By default the request goes to the master of the cluster to get the latest results. If local is specified it gets
|
||||||
the results from whichever node the request goes to.
|
the results from whichever node the request goes to.
|
||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[get-settings-request-masterTimeout]
|
include-tagged::{doc-tests-file}[{api}-request-masterTimeout]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
<1> Timeout to connect to the master node as a `TimeValue`
|
<1> Timeout to connect to the master node as a `TimeValue`
|
||||||
<2> Timeout to connect to the master node as a `String`
|
<2> Timeout to connect to the master node as a `String`
|
||||||
|
|
||||||
[[java-rest-high-cluster-get-settings-sync]]
|
include::../execution.asciidoc[]
|
||||||
==== Synchronous Execution
|
|
||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
[id="{upid}-{api}-response"]
|
||||||
--------------------------------------------------
|
|
||||||
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[get-settings-execute]
|
|
||||||
--------------------------------------------------
|
|
||||||
<1> Execute the request and get back the response in a `ClusterGetSettingsResponse` object.
|
|
||||||
|
|
||||||
[[java-rest-high-cluster-get-settings-async]]
|
|
||||||
==== Asynchronous Execution
|
|
||||||
|
|
||||||
The asynchronous execution of a cluster get settings requires both the
|
|
||||||
`ClusterGetSettingsRequest` instance and an `ActionListener` instance to be
|
|
||||||
passed to the asynchronous method:
|
|
||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
|
||||||
--------------------------------------------------
|
|
||||||
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[get-settings-execute-async]
|
|
||||||
--------------------------------------------------
|
|
||||||
<1> The `ClusterGetSettingsRequest` to execute and the `ActionListener`
|
|
||||||
to use when the execution completes
|
|
||||||
|
|
||||||
The asynchronous method does not block and returns immediately. Once it is
|
|
||||||
completed the `ActionListener` is called back using the `onResponse` method
|
|
||||||
if the execution successfully completed or using the `onFailure` method if
|
|
||||||
it failed.
|
|
||||||
|
|
||||||
A typical listener for `ClusterGetSettingsResponse` looks like:
|
|
||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
|
||||||
--------------------------------------------------
|
|
||||||
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[get-settings-execute-listener]
|
|
||||||
--------------------------------------------------
|
|
||||||
<1> Called when the execution is successfully completed. The response is
|
|
||||||
provided as an argument
|
|
||||||
<2> Called in case of a failure. The raised exception is provided as an argument
|
|
||||||
|
|
||||||
[[java-rest-high-cluster-get-settings-response]]
|
|
||||||
==== Cluster Get Settings Response
|
==== Cluster Get Settings Response
|
||||||
|
|
||||||
The returned `ClusterGetSettingsResponse` allows to retrieve information about the
|
The returned +{response}+ allows to retrieve information about the
|
||||||
executed operation as follows:
|
executed operation as follows:
|
||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[get-settings-response]
|
include-tagged::{doc-tests-file}[{api}-response]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
<1> Get the persistent settings.
|
<1> Get the persistent settings.
|
||||||
<2> Get the transient settings.
|
<2> Get the transient settings.
|
||||||
<3> Get the default settings (returns empty settings if `includeDefaults` was not set to `true`).
|
<3> Get the default settings (returns empty settings if `includeDefaults` was not set to `true`).
|
||||||
<4> Get the value as a `String` for a particular setting. The order of searching is first in `persistentSettings` then in
|
<4> Get the value as a `String` for a particular setting. The order of searching is first in `persistentSettings` then in
|
||||||
`transientSettings` and finally, if not found in either, in `defaultSettings`.
|
`transientSettings` and finally, if not found in either, in `defaultSettings`.
|
||||||
|
|
||||||
|
@ -1,16 +1,22 @@
|
|||||||
[[java-rest-high-cluster-health]]
|
--
|
||||||
|
:api: health
|
||||||
|
:request: ClusterHealthRequest
|
||||||
|
:response: ClusterHealthResponse
|
||||||
|
--
|
||||||
|
|
||||||
|
[id="{upid}-{api}"]
|
||||||
=== Cluster Health API
|
=== Cluster Health API
|
||||||
|
|
||||||
The Cluster Health API allows getting cluster health.
|
The Cluster Health API allows getting cluster health.
|
||||||
|
|
||||||
[[java-rest-high-cluster-health-request]]
|
[id="{upid}-{api}-request"]
|
||||||
==== Cluster Health Request
|
==== Cluster Health Request
|
||||||
|
|
||||||
A `ClusterHealthRequest`:
|
A +{request}+:
|
||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[health-request]
|
include-tagged::{doc-tests-file}[{api}-request]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
There are no required parameters. By default, the client will check all indices and will not wait
|
There are no required parameters. By default, the client will check all indices and will not wait
|
||||||
for any events.
|
for any events.
|
||||||
@ -21,14 +27,14 @@ Indices which should be checked can be passed in the constructor:
|
|||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[health-request-indices-ctr]
|
include-tagged::{doc-tests-file}[{api}-request-indices-ctr]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
|
|
||||||
Or using the corresponding setter method:
|
Or using the corresponding setter method:
|
||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[health-request-indices-setter]
|
include-tagged::{doc-tests-file}[{api}-request-indices-setter]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
|
|
||||||
==== Other parameters
|
==== Other parameters
|
||||||
@ -37,53 +43,53 @@ Other parameters can be passed only through setter methods:
|
|||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[health-request-timeout]
|
include-tagged::{doc-tests-file}[{api}-request-timeout]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
<1> Timeout for the request as a `TimeValue`. Defaults to 30 seconds
|
<1> Timeout for the request as a `TimeValue`. Defaults to 30 seconds
|
||||||
<2> As a `String`
|
<2> As a `String`
|
||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[health-request-master-timeout]
|
include-tagged::{doc-tests-file}[{api}-request-master-timeout]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
<1> Timeout to connect to the master node as a `TimeValue`. Defaults to the same as `timeout`
|
<1> Timeout to connect to the master node as a `TimeValue`. Defaults to the same as `timeout`
|
||||||
<2> As a `String`
|
<2> As a `String`
|
||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[health-request-wait-status]
|
include-tagged::{doc-tests-file}[{api}-request-wait-status]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
<1> The status to wait (e.g. `green`, `yellow`, or `red`). Accepts a `ClusterHealthStatus` value.
|
<1> The status to wait (e.g. `green`, `yellow`, or `red`). Accepts a `ClusterHealthStatus` value.
|
||||||
<2> Using predefined method
|
<2> Using predefined method
|
||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[health-request-wait-events]
|
include-tagged::{doc-tests-file}[{api}-request-wait-events]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
<1> The priority of the events to wait for. Accepts a `Priority` value.
|
<1> The priority of the events to wait for. Accepts a `Priority` value.
|
||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[health-request-level]
|
include-tagged::{doc-tests-file}[{api}-request-level]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
<1> The level of detail of the returned health information. Accepts a `ClusterHealthRequest.Level` value.
|
<1> The level of detail of the returned health information. Accepts a +{request}.Level+ value.
|
||||||
Default value is `cluster`.
|
Default value is `cluster`.
|
||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[health-request-wait-relocation]
|
include-tagged::{doc-tests-file}[{api}-request-wait-relocation]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
<1> Wait for 0 relocating shards. Defaults to `false`
|
<1> Wait for 0 relocating shards. Defaults to `false`
|
||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[health-request-wait-initializing]
|
include-tagged::{doc-tests-file}[{api}-request-wait-initializing]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
<1> Wait for 0 initializing shards. Defaults to `false`
|
<1> Wait for 0 initializing shards. Defaults to `false`
|
||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[health-request-wait-nodes]
|
include-tagged::{doc-tests-file}[{api}-request-wait-nodes]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
<1> Wait for `N` nodes in the cluster. Defaults to `0`
|
<1> Wait for `N` nodes in the cluster. Defaults to `0`
|
||||||
<2> Using `>=N`, `<=N`, `>N` and `<N` notation
|
<2> Using `>=N`, `<=N`, `>N` and `<N` notation
|
||||||
@ -91,7 +97,7 @@ include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[health-request-wai
|
|||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[health-request-wait-active]
|
include-tagged::{doc-tests-file}[{api}-request-wait-active]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
|
|
||||||
<1> Wait for all shards to be active in the cluster
|
<1> Wait for all shards to be active in the cluster
|
||||||
@ -99,77 +105,42 @@ include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[health-request-wai
|
|||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[health-request-local]
|
include-tagged::{doc-tests-file}[{api}-request-local]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
<1> Non-master node can be used for this request. Defaults to `false`
|
<1> Non-master node can be used for this request. Defaults to `false`
|
||||||
|
|
||||||
[[java-rest-high-cluster-health-sync]]
|
include::../execution.asciidoc[]
|
||||||
==== Synchronous Execution
|
|
||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
[id="{upid}-{api}-response"]
|
||||||
--------------------------------------------------
|
|
||||||
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[health-execute]
|
|
||||||
--------------------------------------------------
|
|
||||||
|
|
||||||
[[java-rest-high-cluster-health-async]]
|
|
||||||
==== Asynchronous Execution
|
|
||||||
|
|
||||||
The asynchronous execution of a cluster health request requires both the
|
|
||||||
`ClusterHealthRequest` instance and an `ActionListener` instance to be
|
|
||||||
passed to the asynchronous method:
|
|
||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
|
||||||
--------------------------------------------------
|
|
||||||
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[health-execute-async]
|
|
||||||
--------------------------------------------------
|
|
||||||
<1> The `ClusterHealthRequest` to execute and the `ActionListener` to use
|
|
||||||
when the execution completes
|
|
||||||
|
|
||||||
The asynchronous method does not block and returns immediately. Once it is
|
|
||||||
completed the `ActionListener` is called back using the `onResponse` method
|
|
||||||
if the execution successfully completed or using the `onFailure` method if
|
|
||||||
it failed.
|
|
||||||
|
|
||||||
A typical listener for `ClusterHealthResponse` looks like:
|
|
||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
|
||||||
--------------------------------------------------
|
|
||||||
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[health-execute-listener]
|
|
||||||
--------------------------------------------------
|
|
||||||
<1> Called when the execution is successfully completed. The response is
|
|
||||||
provided as an argument
|
|
||||||
<2> Called in case of a failure. The raised exception is provided as an argument
|
|
||||||
|
|
||||||
[[java-rest-high-cluster-health-response]]
|
|
||||||
==== Cluster Health Response
|
==== Cluster Health Response
|
||||||
|
|
||||||
The returned `ClusterHealthResponse` contains the next information about the
|
The returned +{response}+ contains the next information about the
|
||||||
cluster:
|
cluster:
|
||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[health-response-general]
|
include-tagged::{doc-tests-file}[{api}-response-general]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
<1> Name of the cluster
|
<1> Name of the cluster
|
||||||
<2> Cluster status (`green`, `yellow` or `red`)
|
<2> Cluster status (`green`, `yellow` or `red`)
|
||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[health-response-request-status]
|
include-tagged::{doc-tests-file}[{api}-response-request-status]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
<1> Whether request was timed out while processing
|
<1> Whether request was timed out while processing
|
||||||
<2> Status of the request (`OK` or `REQUEST_TIMEOUT`). Other errors will be thrown as exceptions
|
<2> Status of the request (`OK` or `REQUEST_TIMEOUT`). Other errors will be thrown as exceptions
|
||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[health-response-nodes]
|
include-tagged::{doc-tests-file}[{api}-response-nodes]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
<1> Number of nodes in the cluster
|
<1> Number of nodes in the cluster
|
||||||
<2> Number of data nodes in the cluster
|
<2> Number of data nodes in the cluster
|
||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[health-response-shards]
|
include-tagged::{doc-tests-file}[{api}-response-shards]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
<1> Number of active shards
|
<1> Number of active shards
|
||||||
<2> Number of primary active shards
|
<2> Number of primary active shards
|
||||||
@ -181,7 +152,7 @@ include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[health-response-sh
|
|||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[health-response-task]
|
include-tagged::{doc-tests-file}[{api}-response-task]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
<1> Maximum wait time of all tasks in the queue
|
<1> Maximum wait time of all tasks in the queue
|
||||||
<2> Number of currently pending tasks
|
<2> Number of currently pending tasks
|
||||||
@ -189,18 +160,18 @@ include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[health-response-ta
|
|||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[health-response-indices]
|
include-tagged::{doc-tests-file}[{api}-response-indices]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
<1> Detailed information about indices in the cluster
|
<1> Detailed information about indices in the cluster
|
||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[health-response-index]
|
include-tagged::{doc-tests-file}[{api}-response-index]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
<1> Detailed information about a specific index
|
<1> Detailed information about a specific index
|
||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[health-response-shard-details]
|
include-tagged::{doc-tests-file}[{api}-response-shard-details]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
<1> Detailed information about a specific shard
|
<1> Detailed information about a specific shard
|
@ -1,16 +1,22 @@
|
|||||||
[[java-rest-high-cluster-put-settings]]
|
--
|
||||||
|
:api: put-settings
|
||||||
|
:request: ClusterUpdateSettingsRequest
|
||||||
|
:response: ClusterUpdateSettingsResponse
|
||||||
|
--
|
||||||
|
|
||||||
|
[id="{upid}-{api}"]
|
||||||
=== Cluster Update Settings API
|
=== Cluster Update Settings API
|
||||||
|
|
||||||
The Cluster Update Settings API allows to update cluster wide settings.
|
The Cluster Update Settings API allows to update cluster wide settings.
|
||||||
|
|
||||||
[[java-rest-high-cluster-put-settings-request]]
|
[id="{upid}-{api}-request"]
|
||||||
==== Cluster Update Settings Request
|
==== Cluster Update Settings Request
|
||||||
|
|
||||||
A `ClusterUpdateSettingsRequest`:
|
A +{request}+:
|
||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[put-settings-request]
|
include-tagged::{doc-tests-file}[{api}-request]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
|
|
||||||
==== Cluster Settings
|
==== Cluster Settings
|
||||||
@ -18,7 +24,7 @@ At least one setting to be updated must be provided:
|
|||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[put-settings-request-cluster-settings]
|
include-tagged::{doc-tests-file}[{api}-request-cluster-settings]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
<1> Sets the transient settings to be applied
|
<1> Sets the transient settings to be applied
|
||||||
<2> Sets the persistent setting to be applied
|
<2> Sets the persistent setting to be applied
|
||||||
@ -28,26 +34,26 @@ The settings to be applied can be provided in different ways:
|
|||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[put-settings-create-settings]
|
include-tagged::{doc-tests-file}[{api}-create-settings]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
<1> Creates a transient setting as `Settings`
|
<1> Creates a transient setting as `Settings`
|
||||||
<2> Creates a persistent setting as `Settings`
|
<2> Creates a persistent setting as `Settings`
|
||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[put-settings-settings-builder]
|
include-tagged::{doc-tests-file}[{api}-settings-builder]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
<1> Settings provided as `Settings.Builder`
|
<1> Settings provided as `Settings.Builder`
|
||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[put-settings-settings-source]
|
include-tagged::{doc-tests-file}[{api}-settings-source]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
<1> Settings provided as `String`
|
<1> Settings provided as `String`
|
||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[put-settings-settings-map]
|
include-tagged::{doc-tests-file}[{api}-settings-map]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
<1> Settings provided as a `Map`
|
<1> Settings provided as a `Map`
|
||||||
|
|
||||||
@ -56,7 +62,7 @@ The following arguments can optionally be provided:
|
|||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[put-settings-request-timeout]
|
include-tagged::{doc-tests-file}[{api}-request-timeout]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
<1> Timeout to wait for the all the nodes to acknowledge the settings were applied
|
<1> Timeout to wait for the all the nodes to acknowledge the settings were applied
|
||||||
as a `TimeValue`
|
as a `TimeValue`
|
||||||
@ -65,58 +71,23 @@ as a `String`
|
|||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[put-settings-request-masterTimeout]
|
include-tagged::{doc-tests-file}[{api}-request-masterTimeout]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
<1> Timeout to connect to the master node as a `TimeValue`
|
<1> Timeout to connect to the master node as a `TimeValue`
|
||||||
<2> Timeout to connect to the master node as a `String`
|
<2> Timeout to connect to the master node as a `String`
|
||||||
|
|
||||||
[[java-rest-high-cluster-put-settings-sync]]
|
include::../execution.asciidoc[]
|
||||||
==== Synchronous Execution
|
|
||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
[id="{upid}-{api}-response"]
|
||||||
--------------------------------------------------
|
|
||||||
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[put-settings-execute]
|
|
||||||
--------------------------------------------------
|
|
||||||
|
|
||||||
[[java-rest-high-cluster-put-settings-async]]
|
|
||||||
==== Asynchronous Execution
|
|
||||||
|
|
||||||
The asynchronous execution of a cluster update settings requires both the
|
|
||||||
`ClusterUpdateSettingsRequest` instance and an `ActionListener` instance to be
|
|
||||||
passed to the asynchronous method:
|
|
||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
|
||||||
--------------------------------------------------
|
|
||||||
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[put-settings-execute-async]
|
|
||||||
--------------------------------------------------
|
|
||||||
<1> The `ClusterUpdateSettingsRequest` to execute and the `ActionListener`
|
|
||||||
to use when the execution completes
|
|
||||||
|
|
||||||
The asynchronous method does not block and returns immediately. Once it is
|
|
||||||
completed the `ActionListener` is called back using the `onResponse` method
|
|
||||||
if the execution successfully completed or using the `onFailure` method if
|
|
||||||
it failed.
|
|
||||||
|
|
||||||
A typical listener for `ClusterUpdateSettingsResponse` looks like:
|
|
||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
|
||||||
--------------------------------------------------
|
|
||||||
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[put-settings-execute-listener]
|
|
||||||
--------------------------------------------------
|
|
||||||
<1> Called when the execution is successfully completed. The response is
|
|
||||||
provided as an argument
|
|
||||||
<2> Called in case of a failure. The raised exception is provided as an argument
|
|
||||||
|
|
||||||
[[java-rest-high-cluster-put-settings-response]]
|
|
||||||
==== Cluster Update Settings Response
|
==== Cluster Update Settings Response
|
||||||
|
|
||||||
The returned `ClusterUpdateSettings` allows to retrieve information about the
|
The returned +{response}+ allows to retrieve information about the
|
||||||
executed operation as follows:
|
executed operation as follows:
|
||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[put-settings-response]
|
include-tagged::{doc-tests-file}[{api}-response]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
<1> Indicates whether all of the nodes have acknowledged the request
|
<1> Indicates whether all of the nodes have acknowledged the request
|
||||||
<2> Indicates which transient settings have been applied
|
<2> Indicates which transient settings have been applied
|
||||||
<3> Indicates which persistent settings have been applied
|
<3> Indicates which persistent settings have been applied
|
||||||
|
48
docs/java-rest/high-level/execution.asciidoc
Normal file
48
docs/java-rest/high-level/execution.asciidoc
Normal file
@ -0,0 +1,48 @@
|
|||||||
|
////
|
||||||
|
This file is included by every high level rest client API documentation page
|
||||||
|
so we don't have to copy and paste the same asciidoc over and over again. We
|
||||||
|
*do* have to copy and paste the same Java tests over and over again. For now
|
||||||
|
this is intentional because it forces us to *write* and execute the tests
|
||||||
|
which, while a bit ceremonial, does force us to cover these calls in *some*
|
||||||
|
test.
|
||||||
|
////
|
||||||
|
|
||||||
|
[id="{upid}-{api}-sync"]
|
||||||
|
==== Synchronous Execution
|
||||||
|
|
||||||
|
When executing a +{request}+ in the following manner, the client waits
|
||||||
|
for the +{response}+ to be returned before continuing with code execution:
|
||||||
|
|
||||||
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
|
--------------------------------------------------
|
||||||
|
include-tagged::{doc-tests-file}[{api}-execute]
|
||||||
|
--------------------------------------------------
|
||||||
|
|
||||||
|
[id="{upid}-{api}-async"]
|
||||||
|
==== Asynchronous Execution
|
||||||
|
|
||||||
|
Executing a +{request}+ can also be done in an asynchronous fashion so that
|
||||||
|
the client can return directly. Users need to specify how the response or
|
||||||
|
potential failures will be handled by passing the request and a listener to the
|
||||||
|
asynchronous {api} method:
|
||||||
|
|
||||||
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
|
--------------------------------------------------
|
||||||
|
include-tagged::{doc-tests-file}[{api}-execute-async]
|
||||||
|
--------------------------------------------------
|
||||||
|
<1> The +{request}+ to execute and the `ActionListener` to use when
|
||||||
|
the execution completes
|
||||||
|
|
||||||
|
The asynchronous method does not block and returns immediately. Once it is
|
||||||
|
completed the `ActionListener` is called back using the `onResponse` method
|
||||||
|
if the execution successfully completed or using the `onFailure` method if
|
||||||
|
it failed.
|
||||||
|
|
||||||
|
A typical listener for +{response}+ looks like:
|
||||||
|
|
||||||
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
|
--------------------------------------------------
|
||||||
|
include-tagged::{doc-tests-file}[{api}-execute-listener]
|
||||||
|
--------------------------------------------------
|
||||||
|
<1> Called when the execution is successfully completed.
|
||||||
|
<2> Called when the whole +{request}+ fails.
|
@ -1,4 +1,6 @@
|
|||||||
[[java-rest-high]]
|
:mainid: java-rest-high
|
||||||
|
|
||||||
|
[id="{mainid}"]
|
||||||
= Java High Level REST Client
|
= Java High Level REST Client
|
||||||
|
|
||||||
[partintro]
|
[partintro]
|
||||||
@ -31,3 +33,4 @@ include::migration.asciidoc[]
|
|||||||
include::../license.asciidoc[]
|
include::../license.asciidoc[]
|
||||||
|
|
||||||
:doc-tests!:
|
:doc-tests!:
|
||||||
|
:mainid!:
|
||||||
|
@ -10,7 +10,7 @@ The query builders are used to create the query to execute within a search reque
|
|||||||
is a query builder for every type of query supported by the Query DSL. Each query builder
|
is a query builder for every type of query supported by the Query DSL. Each query builder
|
||||||
implements the `QueryBuilder` interface and allows to set the specific options for a given
|
implements the `QueryBuilder` interface and allows to set the specific options for a given
|
||||||
type of query. Once created, the `QueryBuilder` object can be set as the query parameter of
|
type of query. Once created, the `QueryBuilder` object can be set as the query parameter of
|
||||||
`SearchSourceBuilder`. The <<java-rest-high-document-search-request-building-queries, Search Request>>
|
`SearchSourceBuilder`. The <<java-rest-high-search-request-building-queries, Search Request>>
|
||||||
page shows an example of how to build a full search request using `SearchSourceBuilder` and
|
page shows an example of how to build a full search request using `SearchSourceBuilder` and
|
||||||
`QueryBuilder` objects. The <<java-rest-high-query-builders, Building Search Queries>> page
|
`QueryBuilder` objects. The <<java-rest-high-query-builders, Building Search Queries>> page
|
||||||
gives a list of all available search queries with their corresponding `QueryBuilder` objects
|
gives a list of all available search queries with their corresponding `QueryBuilder` objects
|
||||||
@ -24,7 +24,7 @@ aggregation (or pipeline aggregation) supported by Elasticsearch. All builders e
|
|||||||
`AggregationBuilder` class (or `PipelineAggregationBuilder`class). Once created, `AggregationBuilder`
|
`AggregationBuilder` class (or `PipelineAggregationBuilder`class). Once created, `AggregationBuilder`
|
||||||
objects can be set as the aggregation parameter of `SearchSourceBuilder`. There is a example
|
objects can be set as the aggregation parameter of `SearchSourceBuilder`. There is a example
|
||||||
of how `AggregationBuilder` objects are used with `SearchSourceBuilder` objects to define the aggregations
|
of how `AggregationBuilder` objects are used with `SearchSourceBuilder` objects to define the aggregations
|
||||||
to compute with a search query in <<java-rest-high-document-search-request-building-aggs, Search Request>> page.
|
to compute with a search query in <<java-rest-high-search-request-building-aggs, Search Request>> page.
|
||||||
The <<java-rest-high-aggregation-builders, Building Aggregations>> page gives a list of all available
|
The <<java-rest-high-aggregation-builders, Building Aggregations>> page gives a list of all available
|
||||||
aggregations with their corresponding `AggregationBuilder` objects and `AggregationBuilders` helper methods.
|
aggregations with their corresponding `AggregationBuilder` objects and `AggregationBuilders` helper methods.
|
||||||
|
|
||||||
|
@ -1,10 +1,16 @@
|
|||||||
[[java-rest-high-search]]
|
--
|
||||||
|
:api: search
|
||||||
|
:request: SearchRequest
|
||||||
|
:response: SearchResponse
|
||||||
|
--
|
||||||
|
|
||||||
|
[id="{upid}-{api}"]
|
||||||
=== Search API
|
=== Search API
|
||||||
|
|
||||||
[[java-rest-high-document-search-request]]
|
[id="{upid}-{api}-request"]
|
||||||
==== Search Request
|
==== Search Request
|
||||||
|
|
||||||
The `SearchRequest` is used for any operation that has to do with searching
|
The +{request}+ is used for any operation that has to do with searching
|
||||||
documents, aggregations, suggestions and also offers ways of requesting
|
documents, aggregations, suggestions and also offers ways of requesting
|
||||||
highlighting on the resulting documents.
|
highlighting on the resulting documents.
|
||||||
|
|
||||||
@ -12,7 +18,7 @@ In its most basic form, we can add a query to the request:
|
|||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[search-request-basic]
|
include-tagged::{doc-tests-file}[{api}-request-basic]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
|
|
||||||
<1> Creates the `SeachRequest`. Without arguments this runs against all indices.
|
<1> Creates the `SeachRequest`. Without arguments this runs against all indices.
|
||||||
@ -20,14 +26,14 @@ include-tagged::{doc-tests}/SearchDocumentationIT.java[search-request-basic]
|
|||||||
<3> Add a `match_all` query to the `SearchSourceBuilder`.
|
<3> Add a `match_all` query to the `SearchSourceBuilder`.
|
||||||
<4> Add the `SearchSourceBuilder` to the `SeachRequest`.
|
<4> Add the `SearchSourceBuilder` to the `SeachRequest`.
|
||||||
|
|
||||||
[[java-rest-high-search-request-optional]]
|
[id="{upid}-{api}-request-optional"]
|
||||||
===== Optional arguments
|
===== Optional arguments
|
||||||
|
|
||||||
Let's first look at some of the optional arguments of a `SearchRequest`:
|
Let's first look at some of the optional arguments of a +{request}+:
|
||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[search-request-indices-types]
|
include-tagged::{doc-tests-file}[{api}-request-indices-types]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
<1> Restricts the request to an index
|
<1> Restricts the request to an index
|
||||||
<2> Limits the request to a type
|
<2> Limits the request to a type
|
||||||
@ -36,20 +42,20 @@ There are a couple of other interesting optional parameters:
|
|||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[search-request-routing]
|
include-tagged::{doc-tests-file}[{api}-request-routing]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
<1> Set a routing parameter
|
<1> Set a routing parameter
|
||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[search-request-indicesOptions]
|
include-tagged::{doc-tests-file}[{api}-request-indicesOptions]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
<1> Setting `IndicesOptions` controls how unavailable indices are resolved and
|
<1> Setting `IndicesOptions` controls how unavailable indices are resolved and
|
||||||
how wildcard expressions are expanded
|
how wildcard expressions are expanded
|
||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[search-request-preference]
|
include-tagged::{doc-tests-file}[{api}-request-preference]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
<1> Use the preference parameter e.g. to execute the search to prefer local
|
<1> Use the preference parameter e.g. to execute the search to prefer local
|
||||||
shards. The default is to randomize across shards.
|
shards. The default is to randomize across shards.
|
||||||
@ -65,7 +71,7 @@ Here are a few examples of some common options:
|
|||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[search-source-basics]
|
include-tagged::{doc-tests-file}[{api}-source-basics]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
<1> Create a `SearchSourceBuilder` with default options.
|
<1> Create a `SearchSourceBuilder` with default options.
|
||||||
<2> Set the query. Can be any type of `QueryBuilder`
|
<2> Set the query. Can be any type of `QueryBuilder`
|
||||||
@ -77,14 +83,14 @@ Defaults to 10.
|
|||||||
take.
|
take.
|
||||||
|
|
||||||
After this, the `SearchSourceBuilder` only needs to be added to the
|
After this, the `SearchSourceBuilder` only needs to be added to the
|
||||||
`SearchRequest`:
|
+{request}+:
|
||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[search-source-setter]
|
include-tagged::{doc-tests-file}[{api}-source-setter]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
|
|
||||||
[[java-rest-high-document-search-request-building-queries]]
|
[id="{upid}-{api}-request-building-queries"]
|
||||||
===== Building queries
|
===== Building queries
|
||||||
|
|
||||||
Search queries are created using `QueryBuilder` objects. A `QueryBuilder` exists
|
Search queries are created using `QueryBuilder` objects. A `QueryBuilder` exists
|
||||||
@ -94,7 +100,7 @@ A `QueryBuilder` can be created using its constructor:
|
|||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[search-query-builder-ctor]
|
include-tagged::{doc-tests-file}[{api}-query-builder-ctor]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
<1> Create a full text {ref}/query-dsl-match-query.html[Match Query] that matches
|
<1> Create a full text {ref}/query-dsl-match-query.html[Match Query] that matches
|
||||||
the text "kimchy" over the field "user".
|
the text "kimchy" over the field "user".
|
||||||
@ -104,7 +110,7 @@ of the search query it creates:
|
|||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[search-query-builder-options]
|
include-tagged::{doc-tests-file}[{api}-query-builder-options]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
<1> Enable fuzzy matching on the match query
|
<1> Enable fuzzy matching on the match query
|
||||||
<2> Set the prefix length option on the match query
|
<2> Set the prefix length option on the match query
|
||||||
@ -117,7 +123,7 @@ This class provides helper methods that can be used to create `QueryBuilder` obj
|
|||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[search-query-builders]
|
include-tagged::{doc-tests-file}[{api}-query-builders]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
|
|
||||||
Whatever the method used to create it, the `QueryBuilder` object must be added
|
Whatever the method used to create it, the `QueryBuilder` object must be added
|
||||||
@ -125,10 +131,10 @@ to the `SearchSourceBuilder` as follows:
|
|||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[search-query-setter]
|
include-tagged::{doc-tests-file}[{api}-query-setter]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
|
|
||||||
The <<java-rest-high-query-builders, Building Queries>> page gives a list of all available search queries with
|
The <<{upid}-query-builders, Building Queries>> page gives a list of all available search queries with
|
||||||
their corresponding `QueryBuilder` objects and `QueryBuilders` helper methods.
|
their corresponding `QueryBuilder` objects and `QueryBuilders` helper methods.
|
||||||
|
|
||||||
|
|
||||||
@ -138,7 +144,7 @@ The `SearchSourceBuilder` allows to add one or more `SortBuilder` instances. The
|
|||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[search-source-sorting]
|
include-tagged::{doc-tests-file}[{api}-source-sorting]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
<1> Sort descending by `_score` (the default)
|
<1> Sort descending by `_score` (the default)
|
||||||
<2> Also sort ascending by `_id` field
|
<2> Also sort ascending by `_id` field
|
||||||
@ -149,17 +155,17 @@ By default, search requests return the contents of the document `_source` but li
|
|||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[search-source-filtering-off]
|
include-tagged::{doc-tests-file}[{api}-source-filtering-off]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
|
|
||||||
The method also accepts an array of one or more wildcard patterns to control which fields get included or excluded in a more fine grained way:
|
The method also accepts an array of one or more wildcard patterns to control which fields get included or excluded in a more fine grained way:
|
||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[search-source-filtering-includes]
|
include-tagged::{doc-tests-file}[{api}-source-filtering-includes]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
|
|
||||||
[[java-rest-high-request-highlighting]]
|
[id="{upid}-{api}-request-highlighting"]
|
||||||
===== Requesting Highlighting
|
===== Requesting Highlighting
|
||||||
|
|
||||||
Highlighting search results can be achieved by setting a `HighlightBuilder` on the
|
Highlighting search results can be achieved by setting a `HighlightBuilder` on the
|
||||||
@ -168,7 +174,7 @@ fields by adding one or more `HighlightBuilder.Field` instances to a `HighlightB
|
|||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[search-request-highlighting]
|
include-tagged::{doc-tests-file}[{api}-request-highlighting]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
<1> Creates a new `HighlightBuilder`
|
<1> Creates a new `HighlightBuilder`
|
||||||
<2> Create a field highlighter for the `title` field
|
<2> Create a field highlighter for the `title` field
|
||||||
@ -179,9 +185,9 @@ There are many options which are explained in detail in the Rest API documentati
|
|||||||
API parameters (e.g. `pre_tags`) are usually changed by
|
API parameters (e.g. `pre_tags`) are usually changed by
|
||||||
setters with a similar name (e.g. `#preTags(String ...)`).
|
setters with a similar name (e.g. `#preTags(String ...)`).
|
||||||
|
|
||||||
Highlighted text fragments can <<java-rest-high-retrieve-highlighting,later be retrieved>> from the `SearchResponse`.
|
Highlighted text fragments can <<{upid}-{api}-response-highlighting,later be retrieved>> from the +{response}+.
|
||||||
|
|
||||||
[[java-rest-high-document-search-request-building-aggs]]
|
[id="{upid}-{api}-request-building-aggs"]
|
||||||
===== Requesting Aggregations
|
===== Requesting Aggregations
|
||||||
|
|
||||||
Aggregations can be added to the search by first creating the appropriate
|
Aggregations can be added to the search by first creating the appropriate
|
||||||
@ -191,13 +197,13 @@ sub-aggregation on the average age of employees in the company:
|
|||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[search-request-aggregations]
|
include-tagged::{doc-tests-file}[{api}-request-aggregations]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
|
|
||||||
The <<java-rest-high-aggregation-builders, Building Aggregations>> page gives a list of all available aggregations with
|
The <<{upid}-aggregation-builders, Building Aggregations>> page gives a list of all available aggregations with
|
||||||
their corresponding `AggregationBuilder` objects and `AggregationBuilders` helper methods.
|
their corresponding `AggregationBuilder` objects and `AggregationBuilders` helper methods.
|
||||||
|
|
||||||
We will later see how to <<java-rest-high-retrieve-aggs,access aggregations>> in the `SearchResponse`.
|
We will later see how to <<{upid}-{api}-response-aggs,access aggregations>> in the +{response}+.
|
||||||
|
|
||||||
===== Requesting Suggestions
|
===== Requesting Suggestions
|
||||||
|
|
||||||
@ -207,14 +213,14 @@ need to be added to the top level `SuggestBuilder`, which itself can be set on t
|
|||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[search-request-suggestion]
|
include-tagged::{doc-tests-file}[{api}-request-suggestion]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
<1> Creates a new `TermSuggestionBuilder` for the `user` field and
|
<1> Creates a new `TermSuggestionBuilder` for the `user` field and
|
||||||
the text `kmichy`
|
the text `kmichy`
|
||||||
<2> Adds the suggestion builder and names it `suggest_user`
|
<2> Adds the suggestion builder and names it `suggest_user`
|
||||||
|
|
||||||
We will later see how to <<java-rest-high-retrieve-suggestions,retrieve suggestions>> from the
|
We will later see how to <<{upid}-{api}-response-suggestions,retrieve suggestions>> from the
|
||||||
`SearchResponse`.
|
+{response}+.
|
||||||
|
|
||||||
===== Profiling Queries and Aggregations
|
===== Profiling Queries and Aggregations
|
||||||
|
|
||||||
@ -223,56 +229,18 @@ a specific search request. in order to use it, the profile flag must be set to t
|
|||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[search-request-profiling]
|
include-tagged::{doc-tests-file}[{api}-request-profiling]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
|
|
||||||
Once the `SearchRequest` is executed the corresponding `SearchResponse` will
|
Once the +{request}+ is executed the corresponding +{response}+ will
|
||||||
<<java-rest-high-retrieve-profile-results,contain the profiling results>>.
|
<<{upid}-{api}-response-profile,contain the profiling results>>.
|
||||||
|
|
||||||
[[java-rest-high-document-search-sync]]
|
include::../execution.asciidoc[]
|
||||||
==== Synchronous Execution
|
|
||||||
|
|
||||||
When executing a `SearchRequest` in the following manner, the client waits
|
[id="{upid}-{api}-response"]
|
||||||
for the `SearchResponse` to be returned before continuing with code execution:
|
==== {response}
|
||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
The +{response}+ that is returned by executing the search provides details
|
||||||
--------------------------------------------------
|
|
||||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[search-execute]
|
|
||||||
--------------------------------------------------
|
|
||||||
|
|
||||||
[[java-rest-high-document-search-async]]
|
|
||||||
==== Asynchronous Execution
|
|
||||||
|
|
||||||
Executing a `SearchRequest` can also be done in an asynchronous fashion so that
|
|
||||||
the client can return directly. Users need to specify how the response or
|
|
||||||
potential failures will be handled by passing the request and a listeners to the
|
|
||||||
asynchronous search method:
|
|
||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
|
||||||
--------------------------------------------------
|
|
||||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[search-execute-async]
|
|
||||||
--------------------------------------------------
|
|
||||||
<1> The `SearchRequest` to execute and the `ActionListener` to use when
|
|
||||||
the execution completes
|
|
||||||
|
|
||||||
The asynchronous method does not block and returns immediately. Once it is
|
|
||||||
completed the `ActionListener` is called back using the `onResponse` method
|
|
||||||
if the execution successfully completed or using the `onFailure` method if
|
|
||||||
it failed.
|
|
||||||
|
|
||||||
A typical listener for `SearchResponse` looks like:
|
|
||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
|
||||||
--------------------------------------------------
|
|
||||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[search-execute-listener]
|
|
||||||
--------------------------------------------------
|
|
||||||
<1> Called when the execution is successfully completed.
|
|
||||||
<2> Called when the whole `SearchRequest` fails.
|
|
||||||
|
|
||||||
[[java-rest-high-search-response]]
|
|
||||||
==== SearchResponse
|
|
||||||
|
|
||||||
The `SearchResponse` that is returned by executing the search provides details
|
|
||||||
about the search execution itself as well as access to the documents returned.
|
about the search execution itself as well as access to the documents returned.
|
||||||
First, there is useful information about the request execution itself, like the
|
First, there is useful information about the request execution itself, like the
|
||||||
HTTP status code, execution time or whether the request terminated early or timed
|
HTTP status code, execution time or whether the request terminated early or timed
|
||||||
@ -280,7 +248,7 @@ out:
|
|||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[search-response-1]
|
include-tagged::{doc-tests-file}[{api}-response-1]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
|
|
||||||
Second, the response also provides information about the execution on the
|
Second, the response also provides information about the execution on the
|
||||||
@ -291,10 +259,10 @@ failures can also be handled by iterating over an array off
|
|||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[search-response-2]
|
include-tagged::{doc-tests-file}[{api}-response-2]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
|
|
||||||
[[java-rest-high-retrieve-searchHits]]
|
[id="{upid}-{api}-response-search-hits"]
|
||||||
===== Retrieving SearchHits
|
===== Retrieving SearchHits
|
||||||
|
|
||||||
To get access to the returned documents, we need to first get the `SearchHits`
|
To get access to the returned documents, we need to first get the `SearchHits`
|
||||||
@ -302,7 +270,7 @@ contained in the response:
|
|||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[search-hits-get]
|
include-tagged::{doc-tests-file}[{api}-hits-get]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
|
|
||||||
The `SearchHits` provides global information about all hits, like total number
|
The `SearchHits` provides global information about all hits, like total number
|
||||||
@ -310,7 +278,7 @@ of hits or the maximum score:
|
|||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[search-hits-info]
|
include-tagged::{doc-tests-file}[{api}-hits-info]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
|
|
||||||
Nested inside the `SearchHits` are the individual search results that can
|
Nested inside the `SearchHits` are the individual search results that can
|
||||||
@ -319,7 +287,7 @@ be iterated over:
|
|||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[search-hits-singleHit]
|
include-tagged::{doc-tests-file}[{api}-hits-singleHit]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
|
|
||||||
The `SearchHit` provides access to basic information like index, type, docId and
|
The `SearchHit` provides access to basic information like index, type, docId and
|
||||||
@ -327,7 +295,7 @@ score of each search hit:
|
|||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[search-hits-singleHit-properties]
|
include-tagged::{doc-tests-file}[{api}-hits-singleHit-properties]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
|
|
||||||
Furthermore, it lets you get back the document source, either as a simple
|
Furthermore, it lets you get back the document source, either as a simple
|
||||||
@ -338,34 +306,34 @@ cases need to be cast accordingly:
|
|||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[search-hits-singleHit-source]
|
include-tagged::{doc-tests-file}[{api}-hits-singleHit-source]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
|
|
||||||
[[java-rest-high-retrieve-highlighting]]
|
[id="{upid}-{api}-response-highlighting"]
|
||||||
===== Retrieving Highlighting
|
===== Retrieving Highlighting
|
||||||
|
|
||||||
If <<java-rest-high-request-highlighting,requested>>, highlighted text fragments can be retrieved from each `SearchHit` in the result. The hit object offers
|
If <<{upid}-{api}-request-highlighting,requested>>, highlighted text fragments can be retrieved from each `SearchHit` in the result. The hit object offers
|
||||||
access to a map of field names to `HighlightField` instances, each of which contains one
|
access to a map of field names to `HighlightField` instances, each of which contains one
|
||||||
or many highlighted text fragments:
|
or many highlighted text fragments:
|
||||||
|
|
||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[search-request-highlighting-get]
|
include-tagged::{doc-tests-file}[{api}-request-highlighting-get]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
<1> Get the highlighting for the `title` field
|
<1> Get the highlighting for the `title` field
|
||||||
<2> Get one or many fragments containing the highlighted field content
|
<2> Get one or many fragments containing the highlighted field content
|
||||||
|
|
||||||
[[java-rest-high-retrieve-aggs]]
|
[id="{upid}-{api}-response-aggs"]
|
||||||
===== Retrieving Aggregations
|
===== Retrieving Aggregations
|
||||||
|
|
||||||
Aggregations can be retrieved from the `SearchResponse` by first getting the
|
Aggregations can be retrieved from the +{response}+ by first getting the
|
||||||
root of the aggregation tree, the `Aggregations` object, and then getting the
|
root of the aggregation tree, the `Aggregations` object, and then getting the
|
||||||
aggregation by name.
|
aggregation by name.
|
||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[search-request-aggregations-get]
|
include-tagged::{doc-tests-file}[{api}-request-aggregations-get]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
<1> Get the `by_company` terms aggregation
|
<1> Get the `by_company` terms aggregation
|
||||||
<2> Get the buckets that is keyed with `Elastic`
|
<2> Get the buckets that is keyed with `Elastic`
|
||||||
@ -377,7 +345,7 @@ otherwise a `ClassCastException` will be thrown:
|
|||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[search-request-aggregations-get-wrongCast]
|
include-tagged::{doc-tests-file}[search-request-aggregations-get-wrongCast]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
<1> This will throw an exception because "by_company" is a `terms` aggregation
|
<1> This will throw an exception because "by_company" is a `terms` aggregation
|
||||||
but we try to retrieve it as a `range` aggregation
|
but we try to retrieve it as a `range` aggregation
|
||||||
@ -388,14 +356,14 @@ needs to happen explicitly:
|
|||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[search-request-aggregations-asMap]
|
include-tagged::{doc-tests-file}[{api}-request-aggregations-asMap]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
|
|
||||||
There are also getters that return all top level aggregations as a list:
|
There are also getters that return all top level aggregations as a list:
|
||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[search-request-aggregations-asList]
|
include-tagged::{doc-tests-file}[{api}-request-aggregations-asList]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
|
|
||||||
And last but not least you can iterate over all aggregations and then e.g.
|
And last but not least you can iterate over all aggregations and then e.g.
|
||||||
@ -403,17 +371,17 @@ decide how to further process them based on their type:
|
|||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[search-request-aggregations-iterator]
|
include-tagged::{doc-tests-file}[{api}-request-aggregations-iterator]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
|
|
||||||
[[java-rest-high-retrieve-suggestions]]
|
[id="{upid}-{api}-response-suggestions"]
|
||||||
===== Retrieving Suggestions
|
===== Retrieving Suggestions
|
||||||
|
|
||||||
To get back the suggestions from a `SearchResponse`, use the `Suggest` object as an entry point and then retrieve the nested suggestion objects:
|
To get back the suggestions from a +{response}+, use the `Suggest` object as an entry point and then retrieve the nested suggestion objects:
|
||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[search-request-suggestion-get]
|
include-tagged::{doc-tests-file}[{api}-request-suggestion-get]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
<1> Use the `Suggest` class to access suggestions
|
<1> Use the `Suggest` class to access suggestions
|
||||||
<2> Suggestions can be retrieved by name. You need to assign them to the correct
|
<2> Suggestions can be retrieved by name. You need to assign them to the correct
|
||||||
@ -421,21 +389,21 @@ type of Suggestion class (here `TermSuggestion`), otherwise a `ClassCastExceptio
|
|||||||
<3> Iterate over the suggestion entries
|
<3> Iterate over the suggestion entries
|
||||||
<4> Iterate over the options in one entry
|
<4> Iterate over the options in one entry
|
||||||
|
|
||||||
[[java-rest-high-retrieve-profile-results]]
|
[id="{upid}-{api}-response-profile"]
|
||||||
===== Retrieving Profiling Results
|
===== Retrieving Profiling Results
|
||||||
|
|
||||||
Profiling results are retrieved from a `SearchResponse` using the `getProfileResults()` method. This
|
Profiling results are retrieved from a +{response}+ using the `getProfileResults()` method. This
|
||||||
method returns a `Map` containing a `ProfileShardResult` object for every shard involved in the
|
method returns a `Map` containing a `ProfileShardResult` object for every shard involved in the
|
||||||
`SearchRequest` execution. `ProfileShardResult` are stored in the `Map` using a key that uniquely
|
+{request}+ execution. `ProfileShardResult` are stored in the `Map` using a key that uniquely
|
||||||
identifies the shard the profile result corresponds to.
|
identifies the shard the profile result corresponds to.
|
||||||
|
|
||||||
Here is a sample code that shows how to iterate over all the profiling results of every shard:
|
Here is a sample code that shows how to iterate over all the profiling results of every shard:
|
||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[search-request-profiling-get]
|
include-tagged::{doc-tests-file}[{api}-request-profiling-get]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
<1> Retrieve the `Map` of `ProfileShardResult` from the `SearchResponse`
|
<1> Retrieve the `Map` of `ProfileShardResult` from the +{response}+
|
||||||
<2> Profiling results can be retrieved by shard's key if the key is known, otherwise it might be simpler
|
<2> Profiling results can be retrieved by shard's key if the key is known, otherwise it might be simpler
|
||||||
to iterate over all the profiling results
|
to iterate over all the profiling results
|
||||||
<3> Retrieve the key that identifies which shard the `ProfileShardResult` belongs to
|
<3> Retrieve the key that identifies which shard the `ProfileShardResult` belongs to
|
||||||
@ -446,7 +414,7 @@ executed against the underlying Lucene index:
|
|||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[search-request-profiling-queries]
|
include-tagged::{doc-tests-file}[{api}-request-profiling-queries]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
<1> Retrieve the list of `QueryProfileShardResult`
|
<1> Retrieve the list of `QueryProfileShardResult`
|
||||||
<2> Iterate over each `QueryProfileShardResult`
|
<2> Iterate over each `QueryProfileShardResult`
|
||||||
@ -456,7 +424,7 @@ Each `QueryProfileShardResult` gives access to the detailed query tree execution
|
|||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[search-request-profiling-queries-results]
|
include-tagged::{doc-tests-file}[{api}-request-profiling-queries-results]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
<1> Iterate over the profile results
|
<1> Iterate over the profile results
|
||||||
<2> Retrieve the name of the Lucene query
|
<2> Retrieve the name of the Lucene query
|
||||||
@ -470,7 +438,7 @@ The `QueryProfileShardResult` also gives access to the profiling information for
|
|||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[search-request-profiling-queries-collectors]
|
include-tagged::{doc-tests-file}[{api}-request-profiling-queries-collectors]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
<1> Retrieve the profiling result of the Lucene collector
|
<1> Retrieve the profiling result of the Lucene collector
|
||||||
<2> Retrieve the name of the Lucene collector
|
<2> Retrieve the name of the Lucene collector
|
||||||
@ -485,7 +453,7 @@ to the detailed aggregations tree execution:
|
|||||||
|
|
||||||
["source","java",subs="attributes,callouts,macros"]
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
include-tagged::{doc-tests}/SearchDocumentationIT.java[search-request-profiling-aggs]
|
include-tagged::{doc-tests-file}[{api}-request-profiling-aggs]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
<1> Retrieve the `AggregationProfileShardResult`
|
<1> Retrieve the `AggregationProfileShardResult`
|
||||||
<2> Iterate over the aggregation profile results
|
<2> Iterate over the aggregation profile results
|
||||||
|
@ -46,6 +46,8 @@ The Java High Level REST Client supports the following Search APIs:
|
|||||||
* <<java-rest-high-rank-eval>>
|
* <<java-rest-high-rank-eval>>
|
||||||
* <<java-rest-high-explain>>
|
* <<java-rest-high-explain>>
|
||||||
|
|
||||||
|
:upid: {mainid}
|
||||||
|
:doc-tests-file: {doc-tests}/SearchDocumentationIT.java
|
||||||
include::search/search.asciidoc[]
|
include::search/search.asciidoc[]
|
||||||
include::search/scroll.asciidoc[]
|
include::search/scroll.asciidoc[]
|
||||||
include::search/multi-search.asciidoc[]
|
include::search/multi-search.asciidoc[]
|
||||||
@ -137,6 +139,8 @@ The Java High Level REST Client supports the following Cluster APIs:
|
|||||||
* <<java-rest-high-cluster-get-settings>>
|
* <<java-rest-high-cluster-get-settings>>
|
||||||
* <<java-rest-high-cluster-health>>
|
* <<java-rest-high-cluster-health>>
|
||||||
|
|
||||||
|
:upid: {mainid}-cluster
|
||||||
|
:doc-tests-file: {doc-tests}/ClusterClientDocumentationIT.java
|
||||||
include::cluster/put_settings.asciidoc[]
|
include::cluster/put_settings.asciidoc[]
|
||||||
include::cluster/get_settings.asciidoc[]
|
include::cluster/get_settings.asciidoc[]
|
||||||
include::cluster/health.asciidoc[]
|
include::cluster/health.asciidoc[]
|
||||||
@ -309,3 +313,15 @@ The Java High Level REST Client supports the following Graph APIs:
|
|||||||
* <<java-rest-high-x-pack-graph-explore>>
|
* <<java-rest-high-x-pack-graph-explore>>
|
||||||
|
|
||||||
include::graph/explore.asciidoc[]
|
include::graph/explore.asciidoc[]
|
||||||
|
|
||||||
|
////
|
||||||
|
Clear attributes that we use to document that APIs included above so they
|
||||||
|
don't leak into the rest of the documentation.
|
||||||
|
////
|
||||||
|
--
|
||||||
|
:api!:
|
||||||
|
:request!:
|
||||||
|
:response!:
|
||||||
|
:doc-tests-file!:
|
||||||
|
:upid!:
|
||||||
|
--
|
||||||
|
@ -22,7 +22,7 @@ This API provides a starting point for ingesting data into {es} in a format that
|
|||||||
is suitable for subsequent use with other {ml} functionality.
|
is suitable for subsequent use with other {ml} functionality.
|
||||||
|
|
||||||
Unlike other {es} endpoints, the data that is posted to this endpoint does not
|
Unlike other {es} endpoints, the data that is posted to this endpoint does not
|
||||||
need to be UTF-8 encoded and in JSON format. It must, however, be text; binary
|
need to be UTF-8 encoded and in JSON format. It must, however, be text; binary
|
||||||
file formats are not currently supported.
|
file formats are not currently supported.
|
||||||
|
|
||||||
The response from the API contains:
|
The response from the API contains:
|
||||||
@ -122,6 +122,11 @@ to request analysis of 100000 lines to achieve some variety.
|
|||||||
is not specified and the delimiter is pipe (`|`), the default value is `true`.
|
is not specified and the delimiter is pipe (`|`), the default value is `true`.
|
||||||
Otherwise, the default value is `false`.
|
Otherwise, the default value is `false`.
|
||||||
|
|
||||||
|
`timeout`::
|
||||||
|
(time) Sets the maximum amount of time that the structure analysis make take.
|
||||||
|
If the analysis is still running when the timeout expires then it will be
|
||||||
|
aborted. The default value is 25 seconds.
|
||||||
|
|
||||||
`timestamp_field`::
|
`timestamp_field`::
|
||||||
(string) The name of the field that contains the primary timestamp of each
|
(string) The name of the field that contains the primary timestamp of each
|
||||||
record in the file. In particular, if the file were ingested into an index,
|
record in the file. In particular, if the file were ingested into an index,
|
||||||
@ -197,7 +202,7 @@ the formats it knows, which are these Joda formats and their Java time equivalen
|
|||||||
|
|
||||||
The text file that you want to analyze. It must contain data that is suitable to
|
The text file that you want to analyze. It must contain data that is suitable to
|
||||||
be ingested into {es}. It does not need to be in JSON format and it does not
|
be ingested into {es}. It does not need to be in JSON format and it does not
|
||||||
need to be UTF-8 encoded. The size is limited to the {es} HTTP receive buffer
|
need to be UTF-8 encoded. The size is limited to the {es} HTTP receive buffer
|
||||||
size, which defaults to 100 Mb.
|
size, which defaults to 100 Mb.
|
||||||
|
|
||||||
|
|
||||||
@ -245,6 +250,7 @@ POST _xpack/ml/find_file_structure
|
|||||||
// TEST
|
// TEST
|
||||||
|
|
||||||
If the request does not encounter errors, you receive the following result:
|
If the request does not encounter errors, you receive the following result:
|
||||||
|
|
||||||
[source,js]
|
[source,js]
|
||||||
----
|
----
|
||||||
{
|
{
|
||||||
@ -483,7 +489,7 @@ If the request does not encounter errors, you receive the following result:
|
|||||||
`keyword` type as it is not considered specific enough to convert to the
|
`keyword` type as it is not considered specific enough to convert to the
|
||||||
`date` type.
|
`date` type.
|
||||||
<9> `field_stats` contains the most common values of each field, plus basic
|
<9> `field_stats` contains the most common values of each field, plus basic
|
||||||
numeric statistics for the numeric `page_count` field. This information
|
numeric statistics for the numeric `page_count` field. This information
|
||||||
may provide clues that the data needs to be cleaned or transformed prior
|
may provide clues that the data needs to be cleaned or transformed prior
|
||||||
to use by other {ml} functionality.
|
to use by other {ml} functionality.
|
||||||
|
|
||||||
@ -502,11 +508,12 @@ curl -s "s3.amazonaws.com/nyc-tlc/trip+data/yellow_tripdata_2018-06.csv" | head
|
|||||||
|
|
||||||
--
|
--
|
||||||
NOTE: The `Content-Type: application/json` header must be set even though in
|
NOTE: The `Content-Type: application/json` header must be set even though in
|
||||||
this case the data is not JSON. (Alternatively the `Content-Type` can be set
|
this case the data is not JSON. (Alternatively the `Content-Type` can be set
|
||||||
to any other supported by Elasticsearch, but it must be set.)
|
to any other supported by Elasticsearch, but it must be set.)
|
||||||
--
|
--
|
||||||
|
|
||||||
If the request does not encounter errors, you receive the following result:
|
If the request does not encounter errors, you receive the following result:
|
||||||
|
|
||||||
[source,js]
|
[source,js]
|
||||||
----
|
----
|
||||||
{
|
{
|
||||||
@ -1269,9 +1276,405 @@ If the request does not encounter errors, you receive the following result:
|
|||||||
<8> `joda_timestamp_formats` are used to tell Logstash and Ingest pipeline how
|
<8> `joda_timestamp_formats` are used to tell Logstash and Ingest pipeline how
|
||||||
to parse timestamps.
|
to parse timestamps.
|
||||||
<9> `java_timestamp_formats` are the Java time formats recognized in the time
|
<9> `java_timestamp_formats` are the Java time formats recognized in the time
|
||||||
fields. In future Ingest pipeline will switch to use this format.
|
fields. In future Ingest pipeline will switch to use this format.
|
||||||
<10> The timestamp format in this sample doesn't specify a timezone, so to
|
<10> The timestamp format in this sample doesn't specify a timezone, so to
|
||||||
accurately convert them to UTC timestamps to store in Elasticsearch it's
|
accurately convert them to UTC timestamps to store in Elasticsearch it's
|
||||||
necessary to supply the timezone they relate to. `need_client_timezone`
|
necessary to supply the timezone they relate to. `need_client_timezone`
|
||||||
will be `false` for timestamp formats that include the timezone.
|
will be `false` for timestamp formats that include the timezone.
|
||||||
|
|
||||||
|
If you try to analyze a lot of data then the analysis will take a long time.
|
||||||
|
If you want to limit the amount of processing your {es} cluster performs for
|
||||||
|
a request, use the timeout query parameter. The analysis will be aborted and
|
||||||
|
an error returned when the timeout expires. For example, you can replace 20000
|
||||||
|
lines in the previous example with 200000 and set a 1 second timeout on the
|
||||||
|
analysis:
|
||||||
|
|
||||||
|
[source,js]
|
||||||
|
----
|
||||||
|
curl -s "s3.amazonaws.com/nyc-tlc/trip+data/yellow_tripdata_2018-06.csv" | head -200000 | curl -s -H "Content-Type: application/json" -XPOST "localhost:9200/_xpack/ml/find_file_structure?pretty&lines_to_sample=200000&timeout=1s" -T -
|
||||||
|
----
|
||||||
|
// NOTCONSOLE
|
||||||
|
// Not converting to console because this shows how curl can be used
|
||||||
|
|
||||||
|
Unless you are using an incredibly fast computer you'll receive a timeout error:
|
||||||
|
|
||||||
|
[source,js]
|
||||||
|
----
|
||||||
|
{
|
||||||
|
"error" : {
|
||||||
|
"root_cause" : [
|
||||||
|
{
|
||||||
|
"type" : "timeout_exception",
|
||||||
|
"reason" : "Aborting structure analysis during [delimited record parsing] as it has taken longer than the timeout of [1s]"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"type" : "timeout_exception",
|
||||||
|
"reason" : "Aborting structure analysis during [delimited record parsing] as it has taken longer than the timeout of [1s]"
|
||||||
|
},
|
||||||
|
"status" : 500
|
||||||
|
}
|
||||||
|
----
|
||||||
|
// NOTCONSOLE
|
||||||
|
|
||||||
|
--
|
||||||
|
NOTE: If you try the example above yourself you will note that the overall
|
||||||
|
running time of the `curl` commands is considerably longer than 1 second. This
|
||||||
|
is because it takes a while to download 200000 lines of CSV from the internet,
|
||||||
|
and the timeout is measured from the time this endpoint starts to process the
|
||||||
|
data.
|
||||||
|
--
|
||||||
|
|
||||||
|
This is an example of analyzing {es}'s own log file:
|
||||||
|
|
||||||
|
[source,js]
|
||||||
|
----
|
||||||
|
curl -s -H "Content-Type: application/json" -XPOST "localhost:9200/_xpack/ml/find_file_structure?pretty" -T "$ES_HOME/logs/elasticsearch.log"
|
||||||
|
----
|
||||||
|
// NOTCONSOLE
|
||||||
|
// Not converting to console because this shows how curl can be used
|
||||||
|
|
||||||
|
If the request does not encounter errors, the result will look something like
|
||||||
|
this:
|
||||||
|
|
||||||
|
[source,js]
|
||||||
|
----
|
||||||
|
{
|
||||||
|
"num_lines_analyzed" : 53,
|
||||||
|
"num_messages_analyzed" : 53,
|
||||||
|
"sample_start" : "[2018-09-27T14:39:28,518][INFO ][o.e.e.NodeEnvironment ] [node-0] using [1] data paths, mounts [[/ (/dev/disk1)]], net usable_space [165.4gb], net total_space [464.7gb], types [hfs]\n[2018-09-27T14:39:28,521][INFO ][o.e.e.NodeEnvironment ] [node-0] heap size [494.9mb], compressed ordinary object pointers [true]\n",
|
||||||
|
"charset" : "UTF-8",
|
||||||
|
"has_byte_order_marker" : false,
|
||||||
|
"format" : "semi_structured_text", <1>
|
||||||
|
"multiline_start_pattern" : "^\\[\\b\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2},\\d{3}", <2>
|
||||||
|
"grok_pattern" : "\\[%{TIMESTAMP_ISO8601:timestamp}\\]\\[%{LOGLEVEL:loglevel}.*", <3>
|
||||||
|
"timestamp_field" : "timestamp",
|
||||||
|
"joda_timestamp_formats" : [
|
||||||
|
"ISO8601"
|
||||||
|
],
|
||||||
|
"java_timestamp_formats" : [
|
||||||
|
"yyyy-MM-dd'T'HH:mm:ss,SSS"
|
||||||
|
],
|
||||||
|
"need_client_timezone" : true,
|
||||||
|
"mappings" : {
|
||||||
|
"@timestamp" : {
|
||||||
|
"type" : "date"
|
||||||
|
},
|
||||||
|
"loglevel" : {
|
||||||
|
"type" : "keyword"
|
||||||
|
},
|
||||||
|
"message" : {
|
||||||
|
"type" : "text"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"field_stats" : {
|
||||||
|
"loglevel" : {
|
||||||
|
"count" : 53,
|
||||||
|
"cardinality" : 3,
|
||||||
|
"top_hits" : [
|
||||||
|
{
|
||||||
|
"value" : "INFO",
|
||||||
|
"count" : 51
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"value" : "DEBUG",
|
||||||
|
"count" : 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"value" : "WARN",
|
||||||
|
"count" : 1
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"timestamp" : {
|
||||||
|
"count" : 53,
|
||||||
|
"cardinality" : 28,
|
||||||
|
"top_hits" : [
|
||||||
|
{
|
||||||
|
"value" : "2018-09-27T14:39:29,859",
|
||||||
|
"count" : 10
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"value" : "2018-09-27T14:39:29,860",
|
||||||
|
"count" : 9
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"value" : "2018-09-27T14:39:29,858",
|
||||||
|
"count" : 6
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"value" : "2018-09-27T14:39:28,523",
|
||||||
|
"count" : 3
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"value" : "2018-09-27T14:39:34,234",
|
||||||
|
"count" : 2
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"value" : "2018-09-27T14:39:28,518",
|
||||||
|
"count" : 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"value" : "2018-09-27T14:39:28,521",
|
||||||
|
"count" : 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"value" : "2018-09-27T14:39:28,522",
|
||||||
|
"count" : 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"value" : "2018-09-27T14:39:29,861",
|
||||||
|
"count" : 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"value" : "2018-09-27T14:39:32,786",
|
||||||
|
"count" : 1
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
----
|
||||||
|
// NOTCONSOLE
|
||||||
|
|
||||||
|
<1> This time the `format` has been identified as `semi_structured_text`.
|
||||||
|
<2> The `multiline_start_pattern` is set on the basis that the timestamp appears
|
||||||
|
in the first line of each multi-line log message.
|
||||||
|
<3> A very simple `grok_pattern` has been created, which extracts the timestamp
|
||||||
|
and recognizable fields that appear in every analyzed message. In this case
|
||||||
|
the only field that was recognized beyond the timestamp was the log level.
|
||||||
|
|
||||||
|
If you recognize more fields than the simple `grok_pattern` produced by the
|
||||||
|
structure finder unaided then you can resubmit the request specifying a more
|
||||||
|
advanced `grok_pattern` as a query parameter and the structure finder will
|
||||||
|
calculate `field_stats` for your additional fields.
|
||||||
|
|
||||||
|
In the case of the {es} log a more complete Grok pattern is
|
||||||
|
`\[%{TIMESTAMP_ISO8601:timestamp}\]\[%{LOGLEVEL:loglevel} *\]\[%{JAVACLASS:class} *\] \[%{HOSTNAME:node}\] %{JAVALOGMESSAGE:message}`.
|
||||||
|
You can analyze the same log file again, submitting this `grok_pattern` as a
|
||||||
|
query parameter (appropriately URL escaped):
|
||||||
|
|
||||||
|
[source,js]
|
||||||
|
----
|
||||||
|
curl -s -H "Content-Type: application/json" -XPOST "localhost:9200/_xpack/ml/find_file_structure?pretty&format=semi_structured_text&grok_pattern=%5C%5B%25%7BTIMESTAMP_ISO8601:timestamp%7D%5C%5D%5C%5B%25%7BLOGLEVEL:loglevel%7D%20*%5C%5D%5C%5B%25%7BJAVACLASS:class%7D%20*%5C%5D%20%5C%5B%25%7BHOSTNAME:node%7D%5C%5D%20%25%7BJAVALOGMESSAGE:message%7D" -T "$ES_HOME/logs/elasticsearch.log"
|
||||||
|
----
|
||||||
|
// NOTCONSOLE
|
||||||
|
// Not converting to console because this shows how curl can be used
|
||||||
|
|
||||||
|
If the request does not encounter errors, the result will look something like
|
||||||
|
this:
|
||||||
|
|
||||||
|
[source,js]
|
||||||
|
----
|
||||||
|
{
|
||||||
|
"num_lines_analyzed" : 53,
|
||||||
|
"num_messages_analyzed" : 53,
|
||||||
|
"sample_start" : "[2018-09-27T14:39:28,518][INFO ][o.e.e.NodeEnvironment ] [node-0] using [1] data paths, mounts [[/ (/dev/disk1)]], net usable_space [165.4gb], net total_space [464.7gb], types [hfs]\n[2018-09-27T14:39:28,521][INFO ][o.e.e.NodeEnvironment ] [node-0] heap size [494.9mb], compressed ordinary object pointers [true]\n",
|
||||||
|
"charset" : "UTF-8",
|
||||||
|
"has_byte_order_marker" : false,
|
||||||
|
"format" : "semi_structured_text",
|
||||||
|
"multiline_start_pattern" : "^\\[\\b\\d{4}-\\d{2}-\\d{2}T\\d{2}:\\d{2}:\\d{2},\\d{3}",
|
||||||
|
"grok_pattern" : "\\[%{TIMESTAMP_ISO8601:timestamp}\\]\\[%{LOGLEVEL:loglevel} *\\]\\[%{JAVACLASS:class} *\\] \\[%{HOSTNAME:node}\\] %{JAVALOGMESSAGE:message}", <1>
|
||||||
|
"timestamp_field" : "timestamp",
|
||||||
|
"joda_timestamp_formats" : [
|
||||||
|
"ISO8601"
|
||||||
|
],
|
||||||
|
"java_timestamp_formats" : [
|
||||||
|
"yyyy-MM-dd'T'HH:mm:ss,SSS"
|
||||||
|
],
|
||||||
|
"need_client_timezone" : true,
|
||||||
|
"mappings" : {
|
||||||
|
"@timestamp" : {
|
||||||
|
"type" : "date"
|
||||||
|
},
|
||||||
|
"class" : {
|
||||||
|
"type" : "keyword"
|
||||||
|
},
|
||||||
|
"loglevel" : {
|
||||||
|
"type" : "keyword"
|
||||||
|
},
|
||||||
|
"message" : {
|
||||||
|
"type" : "text"
|
||||||
|
},
|
||||||
|
"node" : {
|
||||||
|
"type" : "keyword"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"field_stats" : { <2>
|
||||||
|
"class" : {
|
||||||
|
"count" : 53,
|
||||||
|
"cardinality" : 14,
|
||||||
|
"top_hits" : [
|
||||||
|
{
|
||||||
|
"value" : "o.e.p.PluginsService",
|
||||||
|
"count" : 26
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"value" : "o.e.c.m.MetaDataIndexTemplateService",
|
||||||
|
"count" : 8
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"value" : "o.e.n.Node",
|
||||||
|
"count" : 7
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"value" : "o.e.e.NodeEnvironment",
|
||||||
|
"count" : 2
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"value" : "o.e.a.ActionModule",
|
||||||
|
"count" : 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"value" : "o.e.c.s.ClusterApplierService",
|
||||||
|
"count" : 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"value" : "o.e.c.s.MasterService",
|
||||||
|
"count" : 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"value" : "o.e.d.DiscoveryModule",
|
||||||
|
"count" : 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"value" : "o.e.g.GatewayService",
|
||||||
|
"count" : 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"value" : "o.e.l.LicenseService",
|
||||||
|
"count" : 1
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"loglevel" : {
|
||||||
|
"count" : 53,
|
||||||
|
"cardinality" : 3,
|
||||||
|
"top_hits" : [
|
||||||
|
{
|
||||||
|
"value" : "INFO",
|
||||||
|
"count" : 51
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"value" : "DEBUG",
|
||||||
|
"count" : 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"value" : "WARN",
|
||||||
|
"count" : 1
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"message" : {
|
||||||
|
"count" : 53,
|
||||||
|
"cardinality" : 53,
|
||||||
|
"top_hits" : [
|
||||||
|
{
|
||||||
|
"value" : "Using REST wrapper from plugin org.elasticsearch.xpack.security.Security",
|
||||||
|
"count" : 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"value" : "adding template [.monitoring-alerts] for index patterns [.monitoring-alerts-6]",
|
||||||
|
"count" : 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"value" : "adding template [.monitoring-beats] for index patterns [.monitoring-beats-6-*]",
|
||||||
|
"count" : 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"value" : "adding template [.monitoring-es] for index patterns [.monitoring-es-6-*]",
|
||||||
|
"count" : 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"value" : "adding template [.monitoring-kibana] for index patterns [.monitoring-kibana-6-*]",
|
||||||
|
"count" : 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"value" : "adding template [.monitoring-logstash] for index patterns [.monitoring-logstash-6-*]",
|
||||||
|
"count" : 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"value" : "adding template [.triggered_watches] for index patterns [.triggered_watches*]",
|
||||||
|
"count" : 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"value" : "adding template [.watch-history-9] for index patterns [.watcher-history-9*]",
|
||||||
|
"count" : 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"value" : "adding template [.watches] for index patterns [.watches*]",
|
||||||
|
"count" : 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"value" : "starting ...",
|
||||||
|
"count" : 1
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"node" : {
|
||||||
|
"count" : 53,
|
||||||
|
"cardinality" : 1,
|
||||||
|
"top_hits" : [
|
||||||
|
{
|
||||||
|
"value" : "node-0",
|
||||||
|
"count" : 53
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"timestamp" : {
|
||||||
|
"count" : 53,
|
||||||
|
"cardinality" : 28,
|
||||||
|
"top_hits" : [
|
||||||
|
{
|
||||||
|
"value" : "2018-09-27T14:39:29,859",
|
||||||
|
"count" : 10
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"value" : "2018-09-27T14:39:29,860",
|
||||||
|
"count" : 9
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"value" : "2018-09-27T14:39:29,858",
|
||||||
|
"count" : 6
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"value" : "2018-09-27T14:39:28,523",
|
||||||
|
"count" : 3
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"value" : "2018-09-27T14:39:34,234",
|
||||||
|
"count" : 2
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"value" : "2018-09-27T14:39:28,518",
|
||||||
|
"count" : 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"value" : "2018-09-27T14:39:28,521",
|
||||||
|
"count" : 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"value" : "2018-09-27T14:39:28,522",
|
||||||
|
"count" : 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"value" : "2018-09-27T14:39:29,861",
|
||||||
|
"count" : 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"value" : "2018-09-27T14:39:32,786",
|
||||||
|
"count" : 1
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
----
|
||||||
|
// NOTCONSOLE
|
||||||
|
|
||||||
|
<1> The `grok_pattern` in the output is now the overridden one supplied in the
|
||||||
|
query parameter.
|
||||||
|
<2> The returned `field_stats` include entries for the fields from the
|
||||||
|
overridden `grok_pattern`.
|
||||||
|
|
||||||
|
The URL escaping is hard, so if you are working interactively it is best to use
|
||||||
|
the {ml} UI!
|
||||||
|
@ -1,15 +1,22 @@
|
|||||||
[role="xpack"]
|
[role="xpack"]
|
||||||
[[ml-settings]]
|
[[ml-settings]]
|
||||||
=== Machine Learning Settings in Elasticsearch
|
=== Machine learning settings in Elasticsearch
|
||||||
++++
|
++++
|
||||||
<titleabbrev>Machine Learning Settings</titleabbrev>
|
<titleabbrev>Machine learning settings</titleabbrev>
|
||||||
++++
|
++++
|
||||||
|
|
||||||
You do not need to configure any settings to use {ml}. It is enabled by default.
|
You do not need to configure any settings to use {ml}. It is enabled by default.
|
||||||
|
|
||||||
|
All of these settings can be added to the `elasticsearch.yml` configuration file.
|
||||||
|
The dynamic settings can also be updated across a cluster with the
|
||||||
|
<<cluster-update-settings,cluster update settings API>>.
|
||||||
|
|
||||||
|
TIP: Dynamic settings take precedence over settings in the `elasticsearch.yml`
|
||||||
|
file.
|
||||||
|
|
||||||
[float]
|
[float]
|
||||||
[[general-ml-settings]]
|
[[general-ml-settings]]
|
||||||
==== General Machine Learning Settings
|
==== General machine learning settings
|
||||||
|
|
||||||
`node.ml`::
|
`node.ml`::
|
||||||
Set to `true` (default) to identify the node as a _machine learning node_. +
|
Set to `true` (default) to identify the node as a _machine learning node_. +
|
||||||
@ -37,12 +44,6 @@ IMPORTANT: If you want to use {ml} features in your cluster, you must have
|
|||||||
`xpack.ml.enabled` set to `true` on all master-eligible nodes. This is the
|
`xpack.ml.enabled` set to `true` on all master-eligible nodes. This is the
|
||||||
default behavior.
|
default behavior.
|
||||||
|
|
||||||
`xpack.ml.max_open_jobs`::
|
|
||||||
The maximum number of jobs that can run on a node. Defaults to `20`.
|
|
||||||
The maximum number of jobs is also constrained by memory usage, so fewer
|
|
||||||
jobs than specified by this setting will run on a node if the estimated
|
|
||||||
memory use of the jobs would be higher than allowed.
|
|
||||||
|
|
||||||
`xpack.ml.max_machine_memory_percent`::
|
`xpack.ml.max_machine_memory_percent`::
|
||||||
The maximum percentage of the machine's memory that {ml} may use for running
|
The maximum percentage of the machine's memory that {ml} may use for running
|
||||||
analytics processes. (These processes are separate to the {es} JVM.) Defaults to
|
analytics processes. (These processes are separate to the {es} JVM.) Defaults to
|
||||||
@ -57,8 +58,26 @@ that is greater than this setting value, an error occurs. Existing jobs are not
|
|||||||
affected when you update this setting. For more information about the
|
affected when you update this setting. For more information about the
|
||||||
`model_memory_limit` property, see <<ml-apilimits>>.
|
`model_memory_limit` property, see <<ml-apilimits>>.
|
||||||
|
|
||||||
|
`xpack.ml.max_open_jobs`::
|
||||||
|
The maximum number of jobs that can run on a node. Defaults to `20`.
|
||||||
|
The maximum number of jobs is also constrained by memory usage, so fewer
|
||||||
|
jobs than specified by this setting will run on a node if the estimated
|
||||||
|
memory use of the jobs would be higher than allowed.
|
||||||
|
|
||||||
`xpack.ml.node_concurrent_job_allocations`::
|
`xpack.ml.node_concurrent_job_allocations`::
|
||||||
The maximum number of jobs that can concurrently be in the `opening` state on
|
The maximum number of jobs that can concurrently be in the `opening` state on
|
||||||
each node. Typically, jobs spend a small amount of time in this state before
|
each node. Typically, jobs spend a small amount of time in this state before
|
||||||
they move to `open` state. Jobs that must restore large models when they are
|
they move to `open` state. Jobs that must restore large models when they are
|
||||||
opening spend more time in the `opening` state. Defaults to `2`.
|
opening spend more time in the `opening` state. Defaults to `2`.
|
||||||
|
|
||||||
|
[float]
|
||||||
|
[[advanced-ml-settings]]
|
||||||
|
==== Advanced machine learning settings
|
||||||
|
|
||||||
|
These settings are for advanced use cases; the default values are generally
|
||||||
|
sufficient:
|
||||||
|
|
||||||
|
`xpack.ml.max_anomaly_records`:: (<<cluster-update-settings,Dynamic>>)
|
||||||
|
The maximum number of records that are output per bucket. The default value is
|
||||||
|
`500`.
|
||||||
|
|
||||||
|
@ -1,8 +1,7 @@
|
|||||||
[role="xpack"]
|
[role="xpack"]
|
||||||
[testenv="basic"]
|
[testenv="basic"]
|
||||||
[appendix]
|
|
||||||
[[sql-syntax-reserved]]
|
[[sql-syntax-reserved]]
|
||||||
= Reserved keywords
|
== Reserved keywords
|
||||||
|
|
||||||
Table with reserved keywords that need to be quoted. Also provide an example to make it more obvious.
|
Table with reserved keywords that need to be quoted. Also provide an example to make it more obvious.
|
||||||
|
|
||||||
|
@ -275,6 +275,8 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin, Scri
|
|||||||
filters.put("sorani_normalization", SoraniNormalizationFilterFactory::new);
|
filters.put("sorani_normalization", SoraniNormalizationFilterFactory::new);
|
||||||
filters.put("stemmer_override", requiresAnalysisSettings(StemmerOverrideTokenFilterFactory::new));
|
filters.put("stemmer_override", requiresAnalysisSettings(StemmerOverrideTokenFilterFactory::new));
|
||||||
filters.put("stemmer", StemmerTokenFilterFactory::new);
|
filters.put("stemmer", StemmerTokenFilterFactory::new);
|
||||||
|
filters.put("synonym", requiresAnalysisSettings(SynonymTokenFilterFactory::new));
|
||||||
|
filters.put("synonym_graph", requiresAnalysisSettings(SynonymGraphTokenFilterFactory::new));
|
||||||
filters.put("trim", TrimTokenFilterFactory::new);
|
filters.put("trim", TrimTokenFilterFactory::new);
|
||||||
filters.put("truncate", requiresAnalysisSettings(TruncateTokenFilterFactory::new));
|
filters.put("truncate", requiresAnalysisSettings(TruncateTokenFilterFactory::new));
|
||||||
filters.put("unique", UniqueTokenFilterFactory::new);
|
filters.put("unique", UniqueTokenFilterFactory::new);
|
||||||
|
@ -17,7 +17,7 @@
|
|||||||
* under the License.
|
* under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
package org.elasticsearch.index.analysis;
|
package org.elasticsearch.analysis.common;
|
||||||
|
|
||||||
import org.apache.logging.log4j.Logger;
|
import org.apache.logging.log4j.Logger;
|
||||||
import org.apache.logging.log4j.LogManager;
|
import org.apache.logging.log4j.LogManager;
|
@ -17,7 +17,7 @@
|
|||||||
* under the License.
|
* under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
package org.elasticsearch.index.analysis;
|
package org.elasticsearch.analysis.common;
|
||||||
|
|
||||||
import org.apache.logging.log4j.Logger;
|
import org.apache.logging.log4j.Logger;
|
||||||
import org.apache.logging.log4j.LogManager;
|
import org.apache.logging.log4j.LogManager;
|
@ -17,7 +17,7 @@
|
|||||||
* under the License.
|
* under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
package org.elasticsearch.index.analysis;
|
package org.elasticsearch.analysis.common;
|
||||||
|
|
||||||
import org.apache.lucene.analysis.Analyzer;
|
import org.apache.lucene.analysis.Analyzer;
|
||||||
import org.apache.lucene.analysis.TokenStream;
|
import org.apache.lucene.analysis.TokenStream;
|
||||||
@ -26,16 +26,18 @@ import org.apache.lucene.analysis.synonym.SynonymMap;
|
|||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.env.Environment;
|
import org.elasticsearch.env.Environment;
|
||||||
import org.elasticsearch.index.IndexSettings;
|
import org.elasticsearch.index.IndexSettings;
|
||||||
|
import org.elasticsearch.index.analysis.CharFilterFactory;
|
||||||
|
import org.elasticsearch.index.analysis.TokenFilterFactory;
|
||||||
|
import org.elasticsearch.index.analysis.TokenizerFactory;
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.function.Function;
|
import java.util.function.Function;
|
||||||
|
|
||||||
public class SynonymGraphTokenFilterFactory extends SynonymTokenFilterFactory {
|
public class SynonymGraphTokenFilterFactory extends SynonymTokenFilterFactory {
|
||||||
|
|
||||||
public SynonymGraphTokenFilterFactory(IndexSettings indexSettings, Environment env, AnalysisRegistry analysisRegistry,
|
SynonymGraphTokenFilterFactory(IndexSettings indexSettings, Environment env,
|
||||||
String name, Settings settings) throws IOException {
|
String name, Settings settings) {
|
||||||
super(indexSettings, env, analysisRegistry, name, settings);
|
super(indexSettings, env, name, settings);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
@ -17,7 +17,7 @@
|
|||||||
* under the License.
|
* under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
package org.elasticsearch.index.analysis;
|
package org.elasticsearch.analysis.common;
|
||||||
|
|
||||||
import org.apache.lucene.analysis.Analyzer;
|
import org.apache.lucene.analysis.Analyzer;
|
||||||
import org.apache.lucene.analysis.TokenStream;
|
import org.apache.lucene.analysis.TokenStream;
|
||||||
@ -26,8 +26,13 @@ import org.apache.lucene.analysis.synonym.SynonymMap;
|
|||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.env.Environment;
|
import org.elasticsearch.env.Environment;
|
||||||
import org.elasticsearch.index.IndexSettings;
|
import org.elasticsearch.index.IndexSettings;
|
||||||
|
import org.elasticsearch.index.analysis.AbstractTokenFilterFactory;
|
||||||
|
import org.elasticsearch.index.analysis.Analysis;
|
||||||
|
import org.elasticsearch.index.analysis.CharFilterFactory;
|
||||||
|
import org.elasticsearch.index.analysis.CustomAnalyzer;
|
||||||
|
import org.elasticsearch.index.analysis.TokenFilterFactory;
|
||||||
|
import org.elasticsearch.index.analysis.TokenizerFactory;
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.io.Reader;
|
import java.io.Reader;
|
||||||
import java.io.StringReader;
|
import java.io.StringReader;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
@ -35,14 +40,14 @@ import java.util.function.Function;
|
|||||||
|
|
||||||
public class SynonymTokenFilterFactory extends AbstractTokenFilterFactory {
|
public class SynonymTokenFilterFactory extends AbstractTokenFilterFactory {
|
||||||
|
|
||||||
protected final String format;
|
private final String format;
|
||||||
protected final boolean expand;
|
private final boolean expand;
|
||||||
protected final boolean lenient;
|
private final boolean lenient;
|
||||||
protected final Settings settings;
|
protected final Settings settings;
|
||||||
protected final Environment environment;
|
protected final Environment environment;
|
||||||
|
|
||||||
public SynonymTokenFilterFactory(IndexSettings indexSettings, Environment env, AnalysisRegistry analysisRegistry,
|
SynonymTokenFilterFactory(IndexSettings indexSettings, Environment env,
|
||||||
String name, Settings settings) throws IOException {
|
String name, Settings settings) {
|
||||||
super(indexSettings, name, settings);
|
super(indexSettings, name, settings);
|
||||||
this.settings = settings;
|
this.settings = settings;
|
||||||
|
|
||||||
@ -83,15 +88,15 @@ public class SynonymTokenFilterFactory extends AbstractTokenFilterFactory {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
protected Analyzer buildSynonymAnalyzer(TokenizerFactory tokenizer, List<CharFilterFactory> charFilters,
|
Analyzer buildSynonymAnalyzer(TokenizerFactory tokenizer, List<CharFilterFactory> charFilters,
|
||||||
List<TokenFilterFactory> tokenFilters) {
|
List<TokenFilterFactory> tokenFilters) {
|
||||||
return new CustomAnalyzer("synonyms", tokenizer, charFilters.toArray(new CharFilterFactory[0]),
|
return new CustomAnalyzer("synonyms", tokenizer, charFilters.toArray(new CharFilterFactory[0]),
|
||||||
tokenFilters.stream()
|
tokenFilters.stream()
|
||||||
.map(TokenFilterFactory::getSynonymFilter)
|
.map(TokenFilterFactory::getSynonymFilter)
|
||||||
.toArray(TokenFilterFactory[]::new));
|
.toArray(TokenFilterFactory[]::new));
|
||||||
}
|
}
|
||||||
|
|
||||||
protected SynonymMap buildSynonyms(Analyzer analyzer, Reader rules) {
|
SynonymMap buildSynonyms(Analyzer analyzer, Reader rules) {
|
||||||
try {
|
try {
|
||||||
SynonymMap.Builder parser;
|
SynonymMap.Builder parser;
|
||||||
if ("wordnet".equalsIgnoreCase(format)) {
|
if ("wordnet".equalsIgnoreCase(format)) {
|
||||||
@ -107,7 +112,7 @@ public class SynonymTokenFilterFactory extends AbstractTokenFilterFactory {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
protected Reader getRulesFromSettings(Environment env) {
|
Reader getRulesFromSettings(Environment env) {
|
||||||
Reader rulesReader;
|
Reader rulesReader;
|
||||||
if (settings.getAsList("synonyms", null) != null) {
|
if (settings.getAsList("synonyms", null) != null) {
|
||||||
List<String> rulesList = Analysis.getWordList(env, settings, "synonyms");
|
List<String> rulesList = Analysis.getWordList(env, settings, "synonyms");
|
@ -24,7 +24,6 @@ import org.apache.lucene.analysis.en.PorterStemFilterFactory;
|
|||||||
import org.apache.lucene.analysis.miscellaneous.LimitTokenCountFilterFactory;
|
import org.apache.lucene.analysis.miscellaneous.LimitTokenCountFilterFactory;
|
||||||
import org.apache.lucene.analysis.reverse.ReverseStringFilterFactory;
|
import org.apache.lucene.analysis.reverse.ReverseStringFilterFactory;
|
||||||
import org.apache.lucene.analysis.snowball.SnowballPorterFilterFactory;
|
import org.apache.lucene.analysis.snowball.SnowballPorterFilterFactory;
|
||||||
import org.elasticsearch.index.analysis.SynonymTokenFilterFactory;
|
|
||||||
import org.elasticsearch.indices.analysis.AnalysisFactoryTestCase;
|
import org.elasticsearch.indices.analysis.AnalysisFactoryTestCase;
|
||||||
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
@ -106,6 +105,7 @@ public class CommonAnalysisFactoryTests extends AnalysisFactoryTestCase {
|
|||||||
filters.put("stemmeroverride", StemmerOverrideTokenFilterFactory.class);
|
filters.put("stemmeroverride", StemmerOverrideTokenFilterFactory.class);
|
||||||
filters.put("kstem", KStemTokenFilterFactory.class);
|
filters.put("kstem", KStemTokenFilterFactory.class);
|
||||||
filters.put("synonym", SynonymTokenFilterFactory.class);
|
filters.put("synonym", SynonymTokenFilterFactory.class);
|
||||||
|
filters.put("synonymgraph", SynonymGraphTokenFilterFactory.class);
|
||||||
filters.put("dictionarycompoundword", DictionaryCompoundWordTokenFilterFactory.class);
|
filters.put("dictionarycompoundword", DictionaryCompoundWordTokenFilterFactory.class);
|
||||||
filters.put("hyphenationcompoundword", HyphenationCompoundWordTokenFilterFactory.class);
|
filters.put("hyphenationcompoundword", HyphenationCompoundWordTokenFilterFactory.class);
|
||||||
filters.put("reversestring", ReverseTokenFilterFactory.class);
|
filters.put("reversestring", ReverseTokenFilterFactory.class);
|
||||||
|
@ -17,7 +17,7 @@
|
|||||||
* under the License.
|
* under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
package org.elasticsearch.index.analysis;
|
package org.elasticsearch.analysis.common;
|
||||||
|
|
||||||
import org.apache.lucene.analysis.CharArraySet;
|
import org.apache.lucene.analysis.CharArraySet;
|
||||||
import org.apache.lucene.analysis.StopFilter;
|
import org.apache.lucene.analysis.StopFilter;
|
@ -17,7 +17,7 @@
|
|||||||
* under the License.
|
* under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
package org.elasticsearch.index.analysis;
|
package org.elasticsearch.analysis.common;
|
||||||
|
|
||||||
import org.apache.lucene.analysis.CharArraySet;
|
import org.apache.lucene.analysis.CharArraySet;
|
||||||
import org.apache.lucene.analysis.StopFilter;
|
import org.apache.lucene.analysis.StopFilter;
|
@ -21,9 +21,12 @@ package org.elasticsearch.analysis.common;
|
|||||||
|
|
||||||
import org.elasticsearch.action.search.SearchResponse;
|
import org.elasticsearch.action.search.SearchResponse;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||||
import org.elasticsearch.index.IndexSettings;
|
import org.elasticsearch.index.IndexSettings;
|
||||||
import org.elasticsearch.index.query.Operator;
|
import org.elasticsearch.index.query.Operator;
|
||||||
import org.elasticsearch.plugins.Plugin;
|
import org.elasticsearch.plugins.Plugin;
|
||||||
|
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||||
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
|
import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder;
|
||||||
import org.elasticsearch.test.ESIntegTestCase;
|
import org.elasticsearch.test.ESIntegTestCase;
|
||||||
|
|
||||||
@ -31,11 +34,18 @@ import java.io.IOException;
|
|||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
|
|
||||||
|
import static org.elasticsearch.client.Requests.searchRequest;
|
||||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||||
|
import static org.elasticsearch.index.query.QueryBuilders.boolQuery;
|
||||||
|
import static org.elasticsearch.index.query.QueryBuilders.matchPhrasePrefixQuery;
|
||||||
import static org.elasticsearch.index.query.QueryBuilders.matchPhraseQuery;
|
import static org.elasticsearch.index.query.QueryBuilders.matchPhraseQuery;
|
||||||
import static org.elasticsearch.index.query.QueryBuilders.matchQuery;
|
import static org.elasticsearch.index.query.QueryBuilders.matchQuery;
|
||||||
|
import static org.elasticsearch.index.query.QueryBuilders.termQuery;
|
||||||
|
import static org.elasticsearch.search.builder.SearchSourceBuilder.highlight;
|
||||||
|
import static org.elasticsearch.search.builder.SearchSourceBuilder.searchSource;
|
||||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHighlight;
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHighlight;
|
||||||
|
import static org.hamcrest.Matchers.anyOf;
|
||||||
import static org.hamcrest.Matchers.equalTo;
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
import static org.hamcrest.Matchers.startsWith;
|
import static org.hamcrest.Matchers.startsWith;
|
||||||
|
|
||||||
@ -153,4 +163,165 @@ public class HighlighterWithAnalyzersTests extends ESIntegTestCase {
|
|||||||
+ "<em>http://www.facebook.com</em> <em>http://elasticsearch.org</em> "
|
+ "<em>http://www.facebook.com</em> <em>http://elasticsearch.org</em> "
|
||||||
+ "<em>http://xing.com</em> <em>http://cnn.com</em> http://quora.com"));
|
+ "<em>http://xing.com</em> <em>http://cnn.com</em> http://quora.com"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testSynonyms() throws IOException {
|
||||||
|
Settings.Builder builder = Settings.builder()
|
||||||
|
.put(indexSettings())
|
||||||
|
.put("index.analysis.analyzer.synonym.tokenizer", "standard")
|
||||||
|
.putList("index.analysis.analyzer.synonym.filter", "synonym", "lowercase")
|
||||||
|
.put("index.analysis.filter.synonym.type", "synonym")
|
||||||
|
.putList("index.analysis.filter.synonym.synonyms", "fast,quick");
|
||||||
|
|
||||||
|
assertAcked(prepareCreate("test").setSettings(builder.build())
|
||||||
|
.addMapping("type1", "field1",
|
||||||
|
"type=text,term_vector=with_positions_offsets,search_analyzer=synonym," +
|
||||||
|
"analyzer=standard,index_options=offsets"));
|
||||||
|
ensureGreen();
|
||||||
|
|
||||||
|
client().prepareIndex("test", "type1", "0").setSource(
|
||||||
|
"field1", "The quick brown fox jumps over the lazy dog").get();
|
||||||
|
refresh();
|
||||||
|
for (String highlighterType : new String[] {"plain", "fvh", "unified"}) {
|
||||||
|
logger.info("--> highlighting (type=" + highlighterType + ") and searching on field1");
|
||||||
|
SearchSourceBuilder source = searchSource()
|
||||||
|
.query(matchQuery("field1", "quick brown fox").operator(Operator.AND))
|
||||||
|
.highlighter(
|
||||||
|
highlight()
|
||||||
|
.field("field1")
|
||||||
|
.order("score")
|
||||||
|
.preTags("<x>")
|
||||||
|
.postTags("</x>")
|
||||||
|
.highlighterType(highlighterType));
|
||||||
|
SearchResponse searchResponse = client().search(searchRequest("test").source(source)).actionGet();
|
||||||
|
assertHighlight(searchResponse, 0, "field1", 0, 1,
|
||||||
|
equalTo("The <x>quick</x> <x>brown</x> <x>fox</x> jumps over the lazy dog"));
|
||||||
|
|
||||||
|
source = searchSource()
|
||||||
|
.query(matchQuery("field1", "fast brown fox").operator(Operator.AND))
|
||||||
|
.highlighter(highlight().field("field1").order("score").preTags("<x>").postTags("</x>"));
|
||||||
|
searchResponse = client().search(searchRequest("test").source(source)).actionGet();
|
||||||
|
assertHighlight(searchResponse, 0, "field1", 0, 1,
|
||||||
|
equalTo("The <x>quick</x> <x>brown</x> <x>fox</x> jumps over the lazy dog"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testPhrasePrefix() throws IOException {
|
||||||
|
Settings.Builder builder = Settings.builder()
|
||||||
|
.put(indexSettings())
|
||||||
|
.put("index.analysis.analyzer.synonym.tokenizer", "standard")
|
||||||
|
.putList("index.analysis.analyzer.synonym.filter", "synonym", "lowercase")
|
||||||
|
.put("index.analysis.filter.synonym.type", "synonym")
|
||||||
|
.putList("index.analysis.filter.synonym.synonyms", "quick => fast");
|
||||||
|
|
||||||
|
assertAcked(prepareCreate("first_test_index").setSettings(builder.build()).addMapping("type1", type1TermVectorMapping()));
|
||||||
|
|
||||||
|
ensureGreen();
|
||||||
|
|
||||||
|
client().prepareIndex("first_test_index", "type1", "0").setSource(
|
||||||
|
"field0", "The quick brown fox jumps over the lazy dog",
|
||||||
|
"field1", "The quick brown fox jumps over the lazy dog").get();
|
||||||
|
client().prepareIndex("first_test_index", "type1", "1").setSource("field1",
|
||||||
|
"The quick browse button is a fancy thing, right bro?").get();
|
||||||
|
refresh();
|
||||||
|
logger.info("--> highlighting and searching on field0");
|
||||||
|
|
||||||
|
SearchSourceBuilder source = searchSource()
|
||||||
|
.query(matchPhrasePrefixQuery("field0", "bro"))
|
||||||
|
.highlighter(highlight().field("field0").order("score").preTags("<x>").postTags("</x>"));
|
||||||
|
SearchResponse searchResponse = client().search(searchRequest("first_test_index").source(source)).actionGet();
|
||||||
|
|
||||||
|
assertHighlight(searchResponse, 0, "field0", 0, 1, equalTo("The quick <x>brown</x> fox jumps over the lazy dog"));
|
||||||
|
|
||||||
|
source = searchSource()
|
||||||
|
.query(matchPhrasePrefixQuery("field0", "quick bro"))
|
||||||
|
.highlighter(highlight().field("field0").order("score").preTags("<x>").postTags("</x>"));
|
||||||
|
|
||||||
|
searchResponse = client().search(searchRequest("first_test_index").source(source)).actionGet();
|
||||||
|
assertHighlight(searchResponse, 0, "field0", 0, 1,
|
||||||
|
equalTo("The <x>quick</x> <x>brown</x> fox jumps over the lazy dog"));
|
||||||
|
|
||||||
|
logger.info("--> highlighting and searching on field1");
|
||||||
|
source = searchSource()
|
||||||
|
.query(boolQuery()
|
||||||
|
.should(matchPhrasePrefixQuery("field1", "test"))
|
||||||
|
.should(matchPhrasePrefixQuery("field1", "bro"))
|
||||||
|
)
|
||||||
|
.highlighter(highlight().field("field1").order("score").preTags("<x>").postTags("</x>"));
|
||||||
|
|
||||||
|
searchResponse = client().search(searchRequest("first_test_index").source(source)).actionGet();
|
||||||
|
assertThat(searchResponse.getHits().totalHits, equalTo(2L));
|
||||||
|
for (int i = 0; i < 2; i++) {
|
||||||
|
assertHighlight(searchResponse, i, "field1", 0, 1, anyOf(
|
||||||
|
equalTo("The quick <x>browse</x> button is a fancy thing, right <x>bro</x>?"),
|
||||||
|
equalTo("The quick <x>brown</x> fox jumps over the lazy dog")));
|
||||||
|
}
|
||||||
|
|
||||||
|
source = searchSource()
|
||||||
|
.query(matchPhrasePrefixQuery("field1", "quick bro"))
|
||||||
|
.highlighter(highlight().field("field1").order("score").preTags("<x>").postTags("</x>"));
|
||||||
|
|
||||||
|
searchResponse = client().search(searchRequest("first_test_index").source(source)).actionGet();
|
||||||
|
|
||||||
|
assertHighlight(searchResponse, 0, "field1", 0, 1, anyOf(
|
||||||
|
equalTo("The <x>quick</x> <x>browse</x> button is a fancy thing, right bro?"),
|
||||||
|
equalTo("The <x>quick</x> <x>brown</x> fox jumps over the lazy dog")));
|
||||||
|
assertHighlight(searchResponse, 1, "field1", 0, 1, anyOf(
|
||||||
|
equalTo("The <x>quick</x> <x>browse</x> button is a fancy thing, right bro?"),
|
||||||
|
equalTo("The <x>quick</x> <x>brown</x> fox jumps over the lazy dog")));
|
||||||
|
|
||||||
|
assertAcked(prepareCreate("second_test_index").setSettings(builder.build()).addMapping("doc",
|
||||||
|
"field4", "type=text,term_vector=with_positions_offsets,analyzer=synonym",
|
||||||
|
"field3", "type=text,analyzer=synonym"));
|
||||||
|
// with synonyms
|
||||||
|
client().prepareIndex("second_test_index", "doc", "0").setSource(
|
||||||
|
"type", "type2",
|
||||||
|
"field4", "The quick brown fox jumps over the lazy dog",
|
||||||
|
"field3", "The quick brown fox jumps over the lazy dog").get();
|
||||||
|
client().prepareIndex("second_test_index", "doc", "1").setSource(
|
||||||
|
"type", "type2",
|
||||||
|
"field4", "The quick browse button is a fancy thing, right bro?").get();
|
||||||
|
client().prepareIndex("second_test_index", "doc", "2").setSource(
|
||||||
|
"type", "type2",
|
||||||
|
"field4", "a quick fast blue car").get();
|
||||||
|
refresh();
|
||||||
|
|
||||||
|
source = searchSource().postFilter(termQuery("type", "type2")).query(matchPhrasePrefixQuery("field3", "fast bro"))
|
||||||
|
.highlighter(highlight().field("field3").order("score").preTags("<x>").postTags("</x>"));
|
||||||
|
|
||||||
|
searchResponse = client().search(searchRequest("second_test_index").source(source)).actionGet();
|
||||||
|
|
||||||
|
assertHighlight(searchResponse, 0, "field3", 0, 1,
|
||||||
|
equalTo("The <x>quick</x> <x>brown</x> fox jumps over the lazy dog"));
|
||||||
|
|
||||||
|
logger.info("--> highlighting and searching on field4");
|
||||||
|
source = searchSource().postFilter(termQuery("type", "type2")).query(matchPhrasePrefixQuery("field4", "the fast bro"))
|
||||||
|
.highlighter(highlight().field("field4").order("score").preTags("<x>").postTags("</x>"));
|
||||||
|
searchResponse = client().search(searchRequest("second_test_index").source(source)).actionGet();
|
||||||
|
|
||||||
|
assertHighlight(searchResponse, 0, "field4", 0, 1, anyOf(
|
||||||
|
equalTo("<x>The</x> <x>quick</x> <x>browse</x> button is a fancy thing, right bro?"),
|
||||||
|
equalTo("<x>The</x> <x>quick</x> <x>brown</x> fox jumps over the lazy dog")));
|
||||||
|
assertHighlight(searchResponse, 1, "field4", 0, 1, anyOf(
|
||||||
|
equalTo("<x>The</x> <x>quick</x> <x>browse</x> button is a fancy thing, right bro?"),
|
||||||
|
equalTo("<x>The</x> <x>quick</x> <x>brown</x> fox jumps over the lazy dog")));
|
||||||
|
|
||||||
|
logger.info("--> highlighting and searching on field4");
|
||||||
|
source = searchSource().postFilter(termQuery("type", "type2"))
|
||||||
|
.query(matchPhrasePrefixQuery("field4", "a fast quick blue ca"))
|
||||||
|
.highlighter(highlight().field("field4").order("score").preTags("<x>").postTags("</x>"));
|
||||||
|
searchResponse = client().search(searchRequest("second_test_index").source(source)).actionGet();
|
||||||
|
|
||||||
|
assertHighlight(searchResponse, 0, "field4", 0, 1,
|
||||||
|
anyOf(equalTo("<x>a quick fast blue car</x>"),
|
||||||
|
equalTo("<x>a</x> <x>quick</x> <x>fast</x> <x>blue</x> <x>car</x>")));
|
||||||
|
}
|
||||||
|
|
||||||
|
public static XContentBuilder type1TermVectorMapping() throws IOException {
|
||||||
|
return XContentFactory.jsonBuilder().startObject().startObject("type1")
|
||||||
|
.startObject("properties")
|
||||||
|
.startObject("field1").field("type", "text").field("term_vector", "with_positions_offsets").endObject()
|
||||||
|
.startObject("field2").field("type", "text").field("term_vector", "with_positions_offsets").endObject()
|
||||||
|
.endObject()
|
||||||
|
.endObject().endObject();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -223,6 +223,68 @@
|
|||||||
- match: { tokens.0.token: Foo }
|
- match: { tokens.0.token: Foo }
|
||||||
- match: { tokens.1.token: Bar! }
|
- match: { tokens.1.token: Bar! }
|
||||||
|
|
||||||
|
---
|
||||||
|
"synonym":
|
||||||
|
- do:
|
||||||
|
indices.create:
|
||||||
|
index: test
|
||||||
|
body:
|
||||||
|
settings:
|
||||||
|
analysis:
|
||||||
|
filter:
|
||||||
|
my_synonym:
|
||||||
|
type: synonym
|
||||||
|
synonyms: ["car,auto"]
|
||||||
|
|
||||||
|
- do:
|
||||||
|
indices.analyze:
|
||||||
|
index: test
|
||||||
|
body:
|
||||||
|
text: what car magazine
|
||||||
|
tokenizer: whitespace
|
||||||
|
filter: [ my_synonym ]
|
||||||
|
- length: { tokens: 4 }
|
||||||
|
- match: { tokens.0.token: what }
|
||||||
|
- match: { tokens.0.position: 0 }
|
||||||
|
- match: { tokens.1.token: car }
|
||||||
|
- match: { tokens.1.position: 1 }
|
||||||
|
- match: { tokens.2.token: auto }
|
||||||
|
- match: { tokens.2.position: 1 }
|
||||||
|
- match: { tokens.3.token: magazine }
|
||||||
|
- match: { tokens.3.position: 2 }
|
||||||
|
|
||||||
|
---
|
||||||
|
"synonym_graph":
|
||||||
|
- do:
|
||||||
|
indices.create:
|
||||||
|
index: test
|
||||||
|
body:
|
||||||
|
settings:
|
||||||
|
analysis:
|
||||||
|
filter:
|
||||||
|
my_graph_synonym:
|
||||||
|
type: synonym_graph
|
||||||
|
synonyms: [ "guinea pig,cavy" ]
|
||||||
|
|
||||||
|
- do:
|
||||||
|
indices.analyze:
|
||||||
|
index: test
|
||||||
|
body:
|
||||||
|
text: my guinea pig snores
|
||||||
|
tokenizer: whitespace
|
||||||
|
filter: [ my_graph_synonym ]
|
||||||
|
- length: { tokens: 5 }
|
||||||
|
- match: { tokens.0.token: my }
|
||||||
|
- match: { tokens.1.token: cavy }
|
||||||
|
- match: { tokens.1.position: 1 }
|
||||||
|
- match: { tokens.1.positionLength: 2 }
|
||||||
|
- match: { tokens.2.token: guinea }
|
||||||
|
- match: { tokens.2.position: 1 }
|
||||||
|
- match: { tokens.3.token: pig }
|
||||||
|
- match: { tokens.3.position: 2 }
|
||||||
|
- match: { tokens.4.token: snores }
|
||||||
|
- match: { tokens.4.position: 3 }
|
||||||
|
|
||||||
---
|
---
|
||||||
"synonym_graph and flatten_graph":
|
"synonym_graph and flatten_graph":
|
||||||
- do:
|
- do:
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
---
|
||||||
"Synonym filter with char_filter":
|
"Synonym filter with char_filter":
|
||||||
# Tests analyze with synonym and char_filter. This is in the analysis-common module
|
# Tests analyze with synonym and char_filter. This is in the analysis-common module
|
||||||
# because there are no char filters in core.
|
# because there are no char filters in core.
|
||||||
@ -30,3 +31,49 @@
|
|||||||
- match: { tokens.2.token: the }
|
- match: { tokens.2.token: the }
|
||||||
- match: { tokens.3.token: elasticsearch }
|
- match: { tokens.3.token: elasticsearch }
|
||||||
- match: { tokens.4.token: man! }
|
- match: { tokens.4.token: man! }
|
||||||
|
|
||||||
|
---
|
||||||
|
"Non-standard position length":
|
||||||
|
- do:
|
||||||
|
indices.create:
|
||||||
|
index: test
|
||||||
|
body:
|
||||||
|
settings:
|
||||||
|
index:
|
||||||
|
analysis:
|
||||||
|
filter:
|
||||||
|
syns:
|
||||||
|
type: synonym
|
||||||
|
synonyms: [ "wtf,what the fudge" ]
|
||||||
|
analyzer:
|
||||||
|
custom_syns:
|
||||||
|
tokenizer: standard
|
||||||
|
filter: [ lowercase, syns ]
|
||||||
|
|
||||||
|
- do:
|
||||||
|
indices.analyze:
|
||||||
|
index: test
|
||||||
|
body:
|
||||||
|
analyzer: custom_syns
|
||||||
|
text: "say what the fudge dude"
|
||||||
|
|
||||||
|
- length: { tokens: 6 }
|
||||||
|
- match: { tokens.0.token: say }
|
||||||
|
- match: { tokens.0.position: 0 }
|
||||||
|
- match: { tokens.0.positionLength: null }
|
||||||
|
- match: { tokens.1.token: what }
|
||||||
|
- match: { tokens.1.position: 1 }
|
||||||
|
- match: { tokens.1.positionLength: null }
|
||||||
|
- match: { tokens.2.token: wtf }
|
||||||
|
- match: { tokens.2.position: 1 }
|
||||||
|
- match: { tokens.2.positionLength: 3 }
|
||||||
|
- match: { tokens.3.token: the }
|
||||||
|
- match: { tokens.3.position: 2 }
|
||||||
|
- match: { tokens.3.positionLength: null }
|
||||||
|
- match: { tokens.4.token: fudge }
|
||||||
|
- match: { tokens.4.position: 3 }
|
||||||
|
- match: { tokens.4.positionLength: null }
|
||||||
|
- match: { tokens.5.token: dude }
|
||||||
|
- match: { tokens.5.position: 4 }
|
||||||
|
- match: { tokens.5.positionLength: null }
|
||||||
|
|
||||||
|
@ -0,0 +1,82 @@
|
|||||||
|
---
|
||||||
|
"validate query with synonyms":
|
||||||
|
- do:
|
||||||
|
indices.create:
|
||||||
|
index: test
|
||||||
|
body:
|
||||||
|
settings:
|
||||||
|
index:
|
||||||
|
analysis:
|
||||||
|
filter:
|
||||||
|
syns:
|
||||||
|
type: synonym
|
||||||
|
synonyms: [ "one,two" ]
|
||||||
|
analyzer:
|
||||||
|
syns:
|
||||||
|
tokenizer: standard
|
||||||
|
filter: [ syns ]
|
||||||
|
mappings:
|
||||||
|
test:
|
||||||
|
properties:
|
||||||
|
field:
|
||||||
|
type: text
|
||||||
|
analyzer: syns
|
||||||
|
|
||||||
|
- do:
|
||||||
|
indices.validate_query:
|
||||||
|
index: test
|
||||||
|
explain: true
|
||||||
|
body:
|
||||||
|
query:
|
||||||
|
match_phrase_prefix:
|
||||||
|
field:
|
||||||
|
query: foo
|
||||||
|
|
||||||
|
- is_true: valid
|
||||||
|
- length: { explanations: 1 }
|
||||||
|
- match: { explanations.0.explanation: "/field:\"foo\\*\"/" }
|
||||||
|
|
||||||
|
- do:
|
||||||
|
indices.validate_query:
|
||||||
|
index: test
|
||||||
|
explain: true
|
||||||
|
body:
|
||||||
|
query:
|
||||||
|
match_phrase_prefix:
|
||||||
|
field:
|
||||||
|
query: foo bar
|
||||||
|
|
||||||
|
- is_true: valid
|
||||||
|
- length: { explanations: 1 }
|
||||||
|
- match: { explanations.0.explanation: "field:\"foo bar*\"" }
|
||||||
|
|
||||||
|
- do:
|
||||||
|
indices.validate_query:
|
||||||
|
index: test
|
||||||
|
explain: true
|
||||||
|
body:
|
||||||
|
query:
|
||||||
|
match_phrase_prefix:
|
||||||
|
field:
|
||||||
|
query: one bar
|
||||||
|
|
||||||
|
- is_true: valid
|
||||||
|
- length: { explanations: 1 }
|
||||||
|
- match: { explanations.0.explanation: "field:\"(one two) bar*\"" }
|
||||||
|
|
||||||
|
- do:
|
||||||
|
indices.validate_query:
|
||||||
|
index: test
|
||||||
|
explain: true
|
||||||
|
body:
|
||||||
|
query:
|
||||||
|
match_phrase_prefix:
|
||||||
|
field:
|
||||||
|
query: foo one
|
||||||
|
|
||||||
|
- is_true: valid
|
||||||
|
- length: { explanations: 1 }
|
||||||
|
- match: { explanations.0.explanation: "field:\"foo (one* two*)\"" }
|
||||||
|
|
||||||
|
|
||||||
|
|
@ -0,0 +1,307 @@
|
|||||||
|
---
|
||||||
|
"Test common terms query with stacked tokens":
|
||||||
|
- do:
|
||||||
|
indices.create:
|
||||||
|
index: test
|
||||||
|
body:
|
||||||
|
settings:
|
||||||
|
analysis:
|
||||||
|
filter:
|
||||||
|
syns:
|
||||||
|
type: synonym
|
||||||
|
synonyms: [ "quick,fast" ]
|
||||||
|
analyzer:
|
||||||
|
syns:
|
||||||
|
tokenizer: standard
|
||||||
|
filter: [ "syns" ]
|
||||||
|
mappings:
|
||||||
|
test:
|
||||||
|
properties:
|
||||||
|
field1:
|
||||||
|
type: text
|
||||||
|
analyzer: syns
|
||||||
|
field2:
|
||||||
|
type: text
|
||||||
|
analyzer: syns
|
||||||
|
|
||||||
|
- do:
|
||||||
|
index:
|
||||||
|
index: test
|
||||||
|
type: test
|
||||||
|
id: 3
|
||||||
|
body:
|
||||||
|
field1: quick lazy huge brown pidgin
|
||||||
|
field2: the quick lazy huge brown fox jumps over the tree
|
||||||
|
|
||||||
|
- do:
|
||||||
|
index:
|
||||||
|
index: test
|
||||||
|
type: test
|
||||||
|
id: 1
|
||||||
|
body:
|
||||||
|
field1: the quick brown fox
|
||||||
|
|
||||||
|
- do:
|
||||||
|
index:
|
||||||
|
index: test
|
||||||
|
type: test
|
||||||
|
id: 2
|
||||||
|
body:
|
||||||
|
field1: the quick lazy huge brown fox jumps over the tree
|
||||||
|
refresh: true
|
||||||
|
|
||||||
|
- do:
|
||||||
|
search:
|
||||||
|
body:
|
||||||
|
query:
|
||||||
|
common:
|
||||||
|
field1:
|
||||||
|
query: the fast brown
|
||||||
|
cutoff_frequency: 3
|
||||||
|
low_freq_operator: or
|
||||||
|
- match: { hits.total: 3 }
|
||||||
|
- match: { hits.hits.0._id: "1" }
|
||||||
|
- match: { hits.hits.1._id: "2" }
|
||||||
|
- match: { hits.hits.2._id: "3" }
|
||||||
|
|
||||||
|
- do:
|
||||||
|
search:
|
||||||
|
body:
|
||||||
|
query:
|
||||||
|
common:
|
||||||
|
field1:
|
||||||
|
query: the fast brown
|
||||||
|
cutoff_frequency: 3
|
||||||
|
low_freq_operator: and
|
||||||
|
- match: { hits.total: 2 }
|
||||||
|
- match: { hits.hits.0._id: "1" }
|
||||||
|
- match: { hits.hits.1._id: "2" }
|
||||||
|
|
||||||
|
- do:
|
||||||
|
search:
|
||||||
|
body:
|
||||||
|
query:
|
||||||
|
common:
|
||||||
|
field1:
|
||||||
|
query: the fast brown
|
||||||
|
cutoff_frequency: 3
|
||||||
|
- match: { hits.total: 3 }
|
||||||
|
- match: { hits.hits.0._id: "1" }
|
||||||
|
- match: { hits.hits.1._id: "2" }
|
||||||
|
- match: { hits.hits.2._id: "3" }
|
||||||
|
|
||||||
|
- do:
|
||||||
|
search:
|
||||||
|
body:
|
||||||
|
query:
|
||||||
|
common:
|
||||||
|
field1:
|
||||||
|
query: the fast huge fox
|
||||||
|
minimum_should_match:
|
||||||
|
low_freq: 3
|
||||||
|
- match: { hits.total: 1 }
|
||||||
|
- match: { hits.hits.0._id: "2" }
|
||||||
|
|
||||||
|
- do:
|
||||||
|
search:
|
||||||
|
body:
|
||||||
|
query:
|
||||||
|
common:
|
||||||
|
field1:
|
||||||
|
query: the fast lazy fox brown
|
||||||
|
cutoff_frequency: 1
|
||||||
|
minimum_should_match:
|
||||||
|
high_freq: 5
|
||||||
|
- match: { hits.total: 2 }
|
||||||
|
- match: { hits.hits.0._id: "2" }
|
||||||
|
- match: { hits.hits.1._id: "1" }
|
||||||
|
|
||||||
|
- do:
|
||||||
|
search:
|
||||||
|
body:
|
||||||
|
query:
|
||||||
|
common:
|
||||||
|
field1:
|
||||||
|
query: the fast lazy fox brown
|
||||||
|
cutoff_frequency: 1
|
||||||
|
minimum_should_match:
|
||||||
|
high_freq: 6
|
||||||
|
- match: { hits.total: 1 }
|
||||||
|
- match: { hits.hits.0._id: "2" }
|
||||||
|
|
||||||
|
- do:
|
||||||
|
search:
|
||||||
|
body:
|
||||||
|
query:
|
||||||
|
common:
|
||||||
|
field1:
|
||||||
|
query: the fast lazy fox brown
|
||||||
|
cutoff_frequency: 1
|
||||||
|
- match: { hits.total: 1 }
|
||||||
|
- match: { hits.hits.0._id: "2" }
|
||||||
|
|
||||||
|
- do:
|
||||||
|
search:
|
||||||
|
body:
|
||||||
|
query:
|
||||||
|
common:
|
||||||
|
field1:
|
||||||
|
query: the quick brown
|
||||||
|
cutoff_frequency: 3
|
||||||
|
- match: { hits.total: 3 }
|
||||||
|
- match: { hits.hits.0._id: "1" }
|
||||||
|
- match: { hits.hits.1._id: "2" }
|
||||||
|
- match: { hits.hits.2._id: "3" }
|
||||||
|
|
||||||
|
- do:
|
||||||
|
search:
|
||||||
|
body:
|
||||||
|
query:
|
||||||
|
match:
|
||||||
|
field1:
|
||||||
|
query: the fast brown
|
||||||
|
cutoff_frequency: 3
|
||||||
|
operator: and
|
||||||
|
- match: { hits.total: 2 }
|
||||||
|
- match: { hits.hits.0._id: "1" }
|
||||||
|
- match: { hits.hits.1._id: "2" }
|
||||||
|
|
||||||
|
- do:
|
||||||
|
search:
|
||||||
|
body:
|
||||||
|
query:
|
||||||
|
match:
|
||||||
|
field1:
|
||||||
|
query: the fast brown
|
||||||
|
cutoff_frequency: 3
|
||||||
|
operator: or
|
||||||
|
- match: { hits.total: 3 }
|
||||||
|
- match: { hits.hits.0._id: "1" }
|
||||||
|
- match: { hits.hits.1._id: "2" }
|
||||||
|
- match: { hits.hits.2._id: "3" }
|
||||||
|
|
||||||
|
- do:
|
||||||
|
search:
|
||||||
|
body:
|
||||||
|
query:
|
||||||
|
match:
|
||||||
|
field1:
|
||||||
|
query: the fast brown
|
||||||
|
cutoff_frequency: 3
|
||||||
|
minimum_should_match: 3
|
||||||
|
- match: { hits.total: 2 }
|
||||||
|
- match: { hits.hits.0._id: "1" }
|
||||||
|
- match: { hits.hits.1._id: "2" }
|
||||||
|
|
||||||
|
- do:
|
||||||
|
search:
|
||||||
|
body:
|
||||||
|
query:
|
||||||
|
multi_match:
|
||||||
|
query: the fast brown
|
||||||
|
fields: [ "field1", "field2" ]
|
||||||
|
cutoff_frequency: 3
|
||||||
|
operator: and
|
||||||
|
- match: { hits.total: 3 }
|
||||||
|
- match: { hits.hits.0._id: "3" }
|
||||||
|
- match: { hits.hits.1._id: "1" }
|
||||||
|
- match: { hits.hits.2._id: "2" }
|
||||||
|
|
||||||
|
---
|
||||||
|
"Test match query with synonyms - see #3881 for extensive description of the issue":
|
||||||
|
- do:
|
||||||
|
indices.create:
|
||||||
|
index: test
|
||||||
|
body:
|
||||||
|
settings:
|
||||||
|
analysis:
|
||||||
|
filter:
|
||||||
|
synonym:
|
||||||
|
type: synonym
|
||||||
|
synonyms: [ "quick,fast" ]
|
||||||
|
analyzer:
|
||||||
|
index:
|
||||||
|
type: custom
|
||||||
|
tokenizer: standard
|
||||||
|
filter: lowercase
|
||||||
|
search:
|
||||||
|
type: custom
|
||||||
|
tokenizer: standard
|
||||||
|
filter: [ lowercase, synonym ]
|
||||||
|
mappings:
|
||||||
|
test:
|
||||||
|
properties:
|
||||||
|
text:
|
||||||
|
type: text
|
||||||
|
analyzer: index
|
||||||
|
search_analyzer: search
|
||||||
|
|
||||||
|
- do:
|
||||||
|
index:
|
||||||
|
index: test
|
||||||
|
type: test
|
||||||
|
id: 1
|
||||||
|
body:
|
||||||
|
text: quick brown fox
|
||||||
|
refresh: true
|
||||||
|
|
||||||
|
- do:
|
||||||
|
search:
|
||||||
|
body:
|
||||||
|
query:
|
||||||
|
match:
|
||||||
|
text:
|
||||||
|
query: quick
|
||||||
|
operator: and
|
||||||
|
- match: { hits.total: 1 }
|
||||||
|
|
||||||
|
- do:
|
||||||
|
search:
|
||||||
|
body:
|
||||||
|
query:
|
||||||
|
match:
|
||||||
|
text:
|
||||||
|
query: quick brown
|
||||||
|
operator: and
|
||||||
|
- match: { hits.total: 1 }
|
||||||
|
|
||||||
|
- do:
|
||||||
|
search:
|
||||||
|
body:
|
||||||
|
query:
|
||||||
|
match:
|
||||||
|
text:
|
||||||
|
query: fast
|
||||||
|
operator: and
|
||||||
|
- match: { hits.total: 1 }
|
||||||
|
|
||||||
|
- do:
|
||||||
|
index:
|
||||||
|
index: test
|
||||||
|
type: test
|
||||||
|
id: 2
|
||||||
|
body:
|
||||||
|
text: fast brown fox
|
||||||
|
refresh: true
|
||||||
|
|
||||||
|
- do:
|
||||||
|
search:
|
||||||
|
body:
|
||||||
|
query:
|
||||||
|
match:
|
||||||
|
text:
|
||||||
|
query: quick
|
||||||
|
operator: and
|
||||||
|
- match: { hits.total: 2 }
|
||||||
|
|
||||||
|
- do:
|
||||||
|
search:
|
||||||
|
body:
|
||||||
|
query:
|
||||||
|
match:
|
||||||
|
text:
|
||||||
|
query: quick brown
|
||||||
|
operator: and
|
||||||
|
- match: { hits.total: 2 }
|
||||||
|
|
||||||
|
|
@ -0,0 +1,205 @@
|
|||||||
|
setup:
|
||||||
|
- do:
|
||||||
|
indices.create:
|
||||||
|
index: test
|
||||||
|
body:
|
||||||
|
settings:
|
||||||
|
analysis:
|
||||||
|
filter:
|
||||||
|
syns:
|
||||||
|
type: synonym
|
||||||
|
synonyms: [ "wtf, what the fudge", "foo, bar baz" ]
|
||||||
|
graph_syns:
|
||||||
|
type: synonym_graph
|
||||||
|
synonyms: [ "wtf, what the fudge", "foo, bar baz" ]
|
||||||
|
analyzer:
|
||||||
|
lower_syns:
|
||||||
|
type: custom
|
||||||
|
tokenizer: standard
|
||||||
|
filter: [ lowercase, syns ]
|
||||||
|
lower_graph_syns:
|
||||||
|
type: custom
|
||||||
|
tokenizer: standard
|
||||||
|
filter: [ lowercase, graph_syns ]
|
||||||
|
mappings:
|
||||||
|
test:
|
||||||
|
properties:
|
||||||
|
field:
|
||||||
|
type: text
|
||||||
|
|
||||||
|
- do:
|
||||||
|
index:
|
||||||
|
index: test
|
||||||
|
type: test
|
||||||
|
id: 1
|
||||||
|
body:
|
||||||
|
text: say wtf happened foo
|
||||||
|
- do:
|
||||||
|
index:
|
||||||
|
index: test
|
||||||
|
type: test
|
||||||
|
id: 2
|
||||||
|
body:
|
||||||
|
text: bar baz what the fudge man
|
||||||
|
|
||||||
|
- do:
|
||||||
|
index:
|
||||||
|
index: test
|
||||||
|
type: test
|
||||||
|
id: 3
|
||||||
|
body:
|
||||||
|
text: wtf
|
||||||
|
|
||||||
|
- do:
|
||||||
|
index:
|
||||||
|
index: test
|
||||||
|
type: test
|
||||||
|
id: 4
|
||||||
|
body:
|
||||||
|
text: what is the name for fudge
|
||||||
|
|
||||||
|
- do:
|
||||||
|
index:
|
||||||
|
index: test
|
||||||
|
type: test
|
||||||
|
id: 5
|
||||||
|
body:
|
||||||
|
text: bar two three
|
||||||
|
|
||||||
|
- do:
|
||||||
|
index:
|
||||||
|
index: test
|
||||||
|
type: test
|
||||||
|
id: 6
|
||||||
|
body:
|
||||||
|
text: bar baz two three
|
||||||
|
refresh: true
|
||||||
|
|
||||||
|
---
|
||||||
|
"simple multiterm phrase":
|
||||||
|
- do:
|
||||||
|
search:
|
||||||
|
body:
|
||||||
|
query:
|
||||||
|
match_phrase:
|
||||||
|
text:
|
||||||
|
query: foo two three
|
||||||
|
analyzer: lower_syns
|
||||||
|
- match: { hits.total: 1 }
|
||||||
|
- match: { hits.hits.0._id: "5" } # incorrect match because we're not using graph synonyms
|
||||||
|
|
||||||
|
- do:
|
||||||
|
search:
|
||||||
|
body:
|
||||||
|
query:
|
||||||
|
match_phrase:
|
||||||
|
text:
|
||||||
|
query: foo two three
|
||||||
|
analyzer: lower_graph_syns
|
||||||
|
- match: { hits.total: 1 }
|
||||||
|
- match: { hits.hits.0._id: "6" } # correct match because we're using graph synonyms
|
||||||
|
|
||||||
|
---
|
||||||
|
"simple multiterm and":
|
||||||
|
- do:
|
||||||
|
search:
|
||||||
|
body:
|
||||||
|
query:
|
||||||
|
match:
|
||||||
|
text:
|
||||||
|
query: say what the fudge
|
||||||
|
analyzer: lower_syns
|
||||||
|
operator: and
|
||||||
|
- match: { hits.total: 1 }
|
||||||
|
- match: { hits.hits.0._id: "1" } # non-graph synonyms coincidentally give us the correct answer here
|
||||||
|
|
||||||
|
- do:
|
||||||
|
search:
|
||||||
|
body:
|
||||||
|
query:
|
||||||
|
match:
|
||||||
|
text:
|
||||||
|
query: say what the fudge
|
||||||
|
analyzer: lower_graph_syns
|
||||||
|
operator: and
|
||||||
|
- match: { hits.total: 1 }
|
||||||
|
- match: { hits.hits.0._id: "1" }
|
||||||
|
|
||||||
|
---
|
||||||
|
"minimum should match":
|
||||||
|
- do:
|
||||||
|
search:
|
||||||
|
body:
|
||||||
|
query:
|
||||||
|
match:
|
||||||
|
text:
|
||||||
|
query: three what the fudge foo
|
||||||
|
operator: or
|
||||||
|
analyzer: lower_graph_syns
|
||||||
|
auto_generate_synonyms_phrase_query: false
|
||||||
|
- match: { hits.total: 6 }
|
||||||
|
|
||||||
|
- do:
|
||||||
|
search:
|
||||||
|
body:
|
||||||
|
query:
|
||||||
|
match:
|
||||||
|
text:
|
||||||
|
query: three what the fudge foo
|
||||||
|
operator: or
|
||||||
|
analyzer: lower_graph_syns
|
||||||
|
minimum_should_match: 80%
|
||||||
|
- match: { hits.total: 3 }
|
||||||
|
- match: { hits.hits.0._id: "2" }
|
||||||
|
- match: { hits.hits.1._id: "6" }
|
||||||
|
- match: { hits.hits.2._id: "1" }
|
||||||
|
|
||||||
|
---
|
||||||
|
"multiterm synonyms phrase":
|
||||||
|
- do:
|
||||||
|
search:
|
||||||
|
body:
|
||||||
|
query:
|
||||||
|
match:
|
||||||
|
text:
|
||||||
|
query: wtf
|
||||||
|
operator: and
|
||||||
|
analyzer: lower_graph_syns
|
||||||
|
- match: { hits.total: 3 }
|
||||||
|
- match: { hits.hits.0._id: "2" }
|
||||||
|
- match: { hits.hits.1._id: "3" }
|
||||||
|
- match: { hits.hits.2._id: "1" }
|
||||||
|
|
||||||
|
---
|
||||||
|
"phrase prefix":
|
||||||
|
- do:
|
||||||
|
index:
|
||||||
|
index: test
|
||||||
|
type: test
|
||||||
|
id: 7
|
||||||
|
body:
|
||||||
|
text: "WTFD!"
|
||||||
|
|
||||||
|
- do:
|
||||||
|
index:
|
||||||
|
index: test
|
||||||
|
type: test
|
||||||
|
id: 8
|
||||||
|
body:
|
||||||
|
text: "Weird Al's WHAT THE FUDGESICLE"
|
||||||
|
refresh: true
|
||||||
|
|
||||||
|
- do:
|
||||||
|
search:
|
||||||
|
body:
|
||||||
|
query:
|
||||||
|
match_phrase_prefix:
|
||||||
|
text:
|
||||||
|
query: wtf
|
||||||
|
analyzer: lower_graph_syns
|
||||||
|
- match: { hits.total: 5 }
|
||||||
|
- match: { hits.hits.0._id: "3" }
|
||||||
|
- match: { hits.hits.1._id: "7" }
|
||||||
|
- match: { hits.hits.2._id: "1" }
|
||||||
|
- match: { hits.hits.3._id: "8" }
|
||||||
|
- match: { hits.hits.4._id: "2" }
|
@ -0,0 +1,44 @@
|
|||||||
|
---
|
||||||
|
"suggestions with synonyms":
|
||||||
|
- do:
|
||||||
|
indices.create:
|
||||||
|
index: test
|
||||||
|
body:
|
||||||
|
settings:
|
||||||
|
analysis:
|
||||||
|
analyzer:
|
||||||
|
suggest_analyzer_synonyms:
|
||||||
|
type: custom
|
||||||
|
tokenizer: standard
|
||||||
|
filter: [ lowercase, my_synonyms ]
|
||||||
|
filter:
|
||||||
|
my_synonyms:
|
||||||
|
type: synonym
|
||||||
|
synonyms: [ "foo,renamed"]
|
||||||
|
mappings:
|
||||||
|
test:
|
||||||
|
properties:
|
||||||
|
field:
|
||||||
|
type: completion
|
||||||
|
analyzer: suggest_analyzer_synonyms
|
||||||
|
|
||||||
|
- do:
|
||||||
|
index:
|
||||||
|
index: test
|
||||||
|
type: test
|
||||||
|
id: 1
|
||||||
|
body:
|
||||||
|
field:
|
||||||
|
input: [ "Foo Fighters" ]
|
||||||
|
refresh: true
|
||||||
|
|
||||||
|
- do:
|
||||||
|
search:
|
||||||
|
index: test
|
||||||
|
body:
|
||||||
|
suggest:
|
||||||
|
text: r
|
||||||
|
test:
|
||||||
|
completion:
|
||||||
|
field: field
|
||||||
|
- match: {suggest.test.0.options.0.text: Foo Fighters}
|
@ -19,10 +19,6 @@
|
|||||||
|
|
||||||
package org.elasticsearch.ingest.common;
|
package org.elasticsearch.ingest.common;
|
||||||
|
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.Map;
|
|
||||||
|
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.ingest.IngestDocument;
|
import org.elasticsearch.ingest.IngestDocument;
|
||||||
import org.elasticsearch.ingest.RandomDocumentPicks;
|
import org.elasticsearch.ingest.RandomDocumentPicks;
|
||||||
@ -33,6 +29,10 @@ import org.elasticsearch.script.ScriptService;
|
|||||||
import org.elasticsearch.script.ScriptType;
|
import org.elasticsearch.script.ScriptType;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
import static org.hamcrest.Matchers.hasKey;
|
import static org.hamcrest.Matchers.hasKey;
|
||||||
import static org.hamcrest.core.Is.is;
|
import static org.hamcrest.core.Is.is;
|
||||||
|
|
||||||
@ -52,7 +52,8 @@ public class ScriptProcessorTests extends ESTestCase {
|
|||||||
ctx.put("bytes_total", randomBytesTotal);
|
ctx.put("bytes_total", randomBytesTotal);
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
)
|
),
|
||||||
|
Collections.emptyMap()
|
||||||
)
|
)
|
||||||
),
|
),
|
||||||
new HashMap<>(ScriptModule.CORE_CONTEXTS)
|
new HashMap<>(ScriptModule.CORE_CONTEXTS)
|
||||||
|
@ -38,6 +38,7 @@ import org.objectweb.asm.commons.GeneratorAdapter;
|
|||||||
|
|
||||||
import java.lang.invoke.MethodType;
|
import java.lang.invoke.MethodType;
|
||||||
import java.lang.reflect.Constructor;
|
import java.lang.reflect.Constructor;
|
||||||
|
import java.lang.reflect.InvocationTargetException;
|
||||||
import java.lang.reflect.Method;
|
import java.lang.reflect.Method;
|
||||||
import java.security.AccessControlContext;
|
import java.security.AccessControlContext;
|
||||||
import java.security.AccessController;
|
import java.security.AccessController;
|
||||||
@ -127,25 +128,49 @@ public final class PainlessScriptEngine extends AbstractComponent implements Scr
|
|||||||
Compiler compiler = contextsToCompilers.get(context);
|
Compiler compiler = contextsToCompilers.get(context);
|
||||||
|
|
||||||
if (context.instanceClazz.equals(SearchScript.class)) {
|
if (context.instanceClazz.equals(SearchScript.class)) {
|
||||||
GenericElasticsearchScript painlessScript =
|
Constructor<?> constructor = compile(compiler, scriptName, scriptSource, params);
|
||||||
(GenericElasticsearchScript)compile(compiler, scriptName, scriptSource, params);
|
boolean needsScore;
|
||||||
|
|
||||||
|
try {
|
||||||
|
GenericElasticsearchScript newInstance = (GenericElasticsearchScript)constructor.newInstance();
|
||||||
|
needsScore = newInstance.needs_score();
|
||||||
|
} catch (InstantiationException | IllegalAccessException | InvocationTargetException e) {
|
||||||
|
throw new IllegalArgumentException("internal error");
|
||||||
|
}
|
||||||
|
|
||||||
SearchScript.Factory factory = (p, lookup) -> new SearchScript.LeafFactory() {
|
SearchScript.Factory factory = (p, lookup) -> new SearchScript.LeafFactory() {
|
||||||
@Override
|
@Override
|
||||||
public SearchScript newInstance(final LeafReaderContext context) {
|
public SearchScript newInstance(final LeafReaderContext context) {
|
||||||
return new ScriptImpl(painlessScript, p, lookup, context);
|
try {
|
||||||
|
// a new instance is required for the class bindings model to work correctly
|
||||||
|
GenericElasticsearchScript newInstance = (GenericElasticsearchScript)constructor.newInstance();
|
||||||
|
return new ScriptImpl(newInstance, p, lookup, context);
|
||||||
|
} catch (InstantiationException | IllegalAccessException | InvocationTargetException e) {
|
||||||
|
throw new IllegalArgumentException("internal error");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
@Override
|
@Override
|
||||||
public boolean needs_score() {
|
public boolean needs_score() {
|
||||||
return painlessScript.needs_score();
|
return needsScore;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
return context.factoryClazz.cast(factory);
|
return context.factoryClazz.cast(factory);
|
||||||
} else if (context.instanceClazz.equals(ExecutableScript.class)) {
|
} else if (context.instanceClazz.equals(ExecutableScript.class)) {
|
||||||
GenericElasticsearchScript painlessScript =
|
Constructor<?> constructor = compile(compiler, scriptName, scriptSource, params);
|
||||||
(GenericElasticsearchScript)compile(compiler, scriptName, scriptSource, params);
|
|
||||||
|
ExecutableScript.Factory factory = new ExecutableScript.Factory() {
|
||||||
|
@Override
|
||||||
|
public ExecutableScript newInstance(Map<String, Object> params) {
|
||||||
|
try {
|
||||||
|
// a new instance is required for the class bindings model to work correctly
|
||||||
|
GenericElasticsearchScript newInstance = (GenericElasticsearchScript)constructor.newInstance();
|
||||||
|
return new ScriptImpl(newInstance, params, null, null);
|
||||||
|
} catch (InstantiationException | IllegalAccessException | InvocationTargetException e) {
|
||||||
|
throw new IllegalArgumentException("internal error");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
ExecutableScript.Factory factory = (p) -> new ScriptImpl(painlessScript, p, null, null);
|
|
||||||
return context.factoryClazz.cast(factory);
|
return context.factoryClazz.cast(factory);
|
||||||
} else {
|
} else {
|
||||||
// Check we ourselves are not being called by unprivileged code.
|
// Check we ourselves are not being called by unprivileged code.
|
||||||
@ -367,7 +392,7 @@ public final class PainlessScriptEngine extends AbstractComponent implements Scr
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Object compile(Compiler compiler, String scriptName, String source, Map<String, String> params, Object... args) {
|
Constructor<?> compile(Compiler compiler, String scriptName, String source, Map<String, String> params) {
|
||||||
final CompilerSettings compilerSettings = buildCompilerSettings(params);
|
final CompilerSettings compilerSettings = buildCompilerSettings(params);
|
||||||
|
|
||||||
// Check we ourselves are not being called by unprivileged code.
|
// Check we ourselves are not being called by unprivileged code.
|
||||||
@ -383,14 +408,14 @@ public final class PainlessScriptEngine extends AbstractComponent implements Scr
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
// Drop all permissions to actually compile the code itself.
|
// Drop all permissions to actually compile the code itself.
|
||||||
return AccessController.doPrivileged(new PrivilegedAction<Object>() {
|
return AccessController.doPrivileged(new PrivilegedAction<Constructor<?>>() {
|
||||||
@Override
|
@Override
|
||||||
public Object run() {
|
public Constructor<?> run() {
|
||||||
String name = scriptName == null ? source : scriptName;
|
String name = scriptName == null ? source : scriptName;
|
||||||
Constructor<?> constructor = compiler.compile(loader, new MainMethodReserved(), name, source, compilerSettings);
|
Constructor<?> constructor = compiler.compile(loader, new MainMethodReserved(), name, source, compilerSettings);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
return constructor.newInstance(args);
|
return constructor;
|
||||||
} catch (Exception exception) { // Catch everything to let the user know this is something caused internally.
|
} catch (Exception exception) { // Catch everything to let the user know this is something caused internally.
|
||||||
throw new IllegalStateException(
|
throw new IllegalStateException(
|
||||||
"An internal error occurred attempting to define the script [" + name + "].", exception);
|
"An internal error occurred attempting to define the script [" + name + "].", exception);
|
||||||
|
@ -68,28 +68,29 @@ public class BaseClassTests extends ScriptTestCase {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testGets() {
|
public void testGets() throws Exception {
|
||||||
Compiler compiler = new Compiler(Gets.class, null, null, painlessLookup);
|
Compiler compiler = new Compiler(Gets.class, null, null, painlessLookup);
|
||||||
Map<String, Object> map = new HashMap<>();
|
Map<String, Object> map = new HashMap<>();
|
||||||
map.put("s", 1);
|
map.put("s", 1);
|
||||||
|
|
||||||
assertEquals(1, ((Gets)scriptEngine.compile(compiler, null, "testInt", emptyMap(), "s", -1, null)).execute());
|
assertEquals(1, ((Gets)scriptEngine.compile(compiler, null, "testInt", emptyMap()).newInstance("s", -1, null)).execute());
|
||||||
assertEquals(Collections.emptyMap(), ((Gets)scriptEngine.compile(compiler, null, "testMap", emptyMap(), "s", -1, null)).execute());
|
assertEquals(Collections.emptyMap(),
|
||||||
assertEquals(Collections.singletonMap("1", "1"),
|
((Gets)scriptEngine.compile(compiler, null, "testMap", emptyMap()).newInstance("s", -1, null)).execute());
|
||||||
((Gets)scriptEngine.compile(compiler, null, "testMap", emptyMap(), "s", -1, Collections.singletonMap("1", "1"))).execute());
|
assertEquals(Collections.singletonMap("1", "1"), ((Gets)scriptEngine.compile(
|
||||||
assertEquals("s", ((Gets)scriptEngine.compile(compiler, null, "testString", emptyMap(), "s", -1, null)).execute());
|
compiler, null, "testMap", emptyMap()).newInstance("s", -1, Collections.singletonMap("1", "1"))).execute());
|
||||||
assertEquals(map,
|
assertEquals("s", ((Gets)scriptEngine.compile(compiler, null, "testString", emptyMap()).newInstance("s", -1, null)).execute());
|
||||||
((Gets)scriptEngine.compile(compiler, null, "testMap.put(testString, testInt); testMap", emptyMap(), "s", -1, null)).execute());
|
assertEquals(map, ((Gets)scriptEngine.compile(
|
||||||
|
compiler, null, "testMap.put(testString, testInt); testMap", emptyMap()).newInstance("s", -1, null)).execute());
|
||||||
}
|
}
|
||||||
|
|
||||||
public abstract static class NoArgs {
|
public abstract static class NoArgs {
|
||||||
public static final String[] PARAMETERS = new String[] {};
|
public static final String[] PARAMETERS = new String[] {};
|
||||||
public abstract Object execute();
|
public abstract Object execute();
|
||||||
}
|
}
|
||||||
public void testNoArgs() {
|
public void testNoArgs() throws Exception {
|
||||||
Compiler compiler = new Compiler(NoArgs.class, null, null, painlessLookup);
|
Compiler compiler = new Compiler(NoArgs.class, null, null, painlessLookup);
|
||||||
assertEquals(1, ((NoArgs)scriptEngine.compile(compiler, null, "1", emptyMap())).execute());
|
assertEquals(1, ((NoArgs)scriptEngine.compile(compiler, null, "1", emptyMap()).newInstance()).execute());
|
||||||
assertEquals("foo", ((NoArgs)scriptEngine.compile(compiler, null, "'foo'", emptyMap())).execute());
|
assertEquals("foo", ((NoArgs)scriptEngine.compile(compiler, null, "'foo'", emptyMap()).newInstance()).execute());
|
||||||
|
|
||||||
Exception e = expectScriptThrows(IllegalArgumentException.class, () ->
|
Exception e = expectScriptThrows(IllegalArgumentException.class, () ->
|
||||||
scriptEngine.compile(compiler, null, "doc", emptyMap()));
|
scriptEngine.compile(compiler, null, "doc", emptyMap()));
|
||||||
@ -110,12 +111,12 @@ public class BaseClassTests extends ScriptTestCase {
|
|||||||
public static final String[] PARAMETERS = new String[] {"arg"};
|
public static final String[] PARAMETERS = new String[] {"arg"};
|
||||||
public abstract Object execute(Object arg);
|
public abstract Object execute(Object arg);
|
||||||
}
|
}
|
||||||
public void testOneArg() {
|
public void testOneArg() throws Exception {
|
||||||
Compiler compiler = new Compiler(OneArg.class, null, null, painlessLookup);
|
Compiler compiler = new Compiler(OneArg.class, null, null, painlessLookup);
|
||||||
Object rando = randomInt();
|
Object rando = randomInt();
|
||||||
assertEquals(rando, ((OneArg)scriptEngine.compile(compiler, null, "arg", emptyMap())).execute(rando));
|
assertEquals(rando, ((OneArg)scriptEngine.compile(compiler, null, "arg", emptyMap()).newInstance()).execute(rando));
|
||||||
rando = randomAlphaOfLength(5);
|
rando = randomAlphaOfLength(5);
|
||||||
assertEquals(rando, ((OneArg)scriptEngine.compile(compiler, null, "arg", emptyMap())).execute(rando));
|
assertEquals(rando, ((OneArg)scriptEngine.compile(compiler, null, "arg", emptyMap()).newInstance()).execute(rando));
|
||||||
|
|
||||||
Compiler noargs = new Compiler(NoArgs.class, null, null, painlessLookup);
|
Compiler noargs = new Compiler(NoArgs.class, null, null, painlessLookup);
|
||||||
Exception e = expectScriptThrows(IllegalArgumentException.class, () ->
|
Exception e = expectScriptThrows(IllegalArgumentException.class, () ->
|
||||||
@ -131,34 +132,38 @@ public class BaseClassTests extends ScriptTestCase {
|
|||||||
public static final String[] PARAMETERS = new String[] {"arg"};
|
public static final String[] PARAMETERS = new String[] {"arg"};
|
||||||
public abstract Object execute(String[] arg);
|
public abstract Object execute(String[] arg);
|
||||||
}
|
}
|
||||||
public void testArrayArg() {
|
public void testArrayArg() throws Exception {
|
||||||
Compiler compiler = new Compiler(ArrayArg.class, null, null, painlessLookup);
|
Compiler compiler = new Compiler(ArrayArg.class, null, null, painlessLookup);
|
||||||
String rando = randomAlphaOfLength(5);
|
String rando = randomAlphaOfLength(5);
|
||||||
assertEquals(rando, ((ArrayArg)scriptEngine.compile(compiler, null, "arg[0]", emptyMap())).execute(new String[] {rando, "foo"}));
|
assertEquals(rando,
|
||||||
|
((ArrayArg)scriptEngine.compile(compiler, null, "arg[0]", emptyMap()).newInstance()).execute(new String[] {rando, "foo"}));
|
||||||
}
|
}
|
||||||
|
|
||||||
public abstract static class PrimitiveArrayArg {
|
public abstract static class PrimitiveArrayArg {
|
||||||
public static final String[] PARAMETERS = new String[] {"arg"};
|
public static final String[] PARAMETERS = new String[] {"arg"};
|
||||||
public abstract Object execute(int[] arg);
|
public abstract Object execute(int[] arg);
|
||||||
}
|
}
|
||||||
public void testPrimitiveArrayArg() {
|
public void testPrimitiveArrayArg() throws Exception {
|
||||||
Compiler compiler = new Compiler(PrimitiveArrayArg.class, null, null, painlessLookup);
|
Compiler compiler = new Compiler(PrimitiveArrayArg.class, null, null, painlessLookup);
|
||||||
int rando = randomInt();
|
int rando = randomInt();
|
||||||
assertEquals(rando, ((PrimitiveArrayArg)scriptEngine.compile(compiler, null, "arg[0]", emptyMap())).execute(new int[] {rando, 10}));
|
assertEquals(rando, ((PrimitiveArrayArg)scriptEngine.compile(
|
||||||
|
compiler, null, "arg[0]", emptyMap()).newInstance()).execute(new int[] {rando, 10}));
|
||||||
}
|
}
|
||||||
|
|
||||||
public abstract static class DefArrayArg {
|
public abstract static class DefArrayArg {
|
||||||
public static final String[] PARAMETERS = new String[] {"arg"};
|
public static final String[] PARAMETERS = new String[] {"arg"};
|
||||||
public abstract Object execute(Object[] arg);
|
public abstract Object execute(Object[] arg);
|
||||||
}
|
}
|
||||||
public void testDefArrayArg() {
|
public void testDefArrayArg()throws Exception {
|
||||||
Compiler compiler = new Compiler(DefArrayArg.class, null, null, painlessLookup);
|
Compiler compiler = new Compiler(DefArrayArg.class, null, null, painlessLookup);
|
||||||
Object rando = randomInt();
|
Object rando = randomInt();
|
||||||
assertEquals(rando, ((DefArrayArg)scriptEngine.compile(compiler, null, "arg[0]", emptyMap())).execute(new Object[] {rando, 10}));
|
assertEquals(rando,
|
||||||
|
((DefArrayArg)scriptEngine.compile(compiler, null, "arg[0]", emptyMap()).newInstance()).execute(new Object[] {rando, 10}));
|
||||||
rando = randomAlphaOfLength(5);
|
rando = randomAlphaOfLength(5);
|
||||||
assertEquals(rando, ((DefArrayArg)scriptEngine.compile(compiler, null, "arg[0]", emptyMap())).execute(new Object[] {rando, 10}));
|
assertEquals(rando,
|
||||||
assertEquals(5,
|
((DefArrayArg)scriptEngine.compile(compiler, null, "arg[0]", emptyMap()).newInstance()).execute(new Object[] {rando, 10}));
|
||||||
((DefArrayArg)scriptEngine.compile(compiler, null, "arg[0].length()", emptyMap())).execute(new Object[] {rando, 10}));
|
assertEquals(5, ((DefArrayArg)scriptEngine.compile(
|
||||||
|
compiler, null, "arg[0].length()", emptyMap()).newInstance()).execute(new Object[] {rando, 10}));
|
||||||
}
|
}
|
||||||
|
|
||||||
public abstract static class ManyArgs {
|
public abstract static class ManyArgs {
|
||||||
@ -169,24 +174,24 @@ public class BaseClassTests extends ScriptTestCase {
|
|||||||
public abstract boolean needsC();
|
public abstract boolean needsC();
|
||||||
public abstract boolean needsD();
|
public abstract boolean needsD();
|
||||||
}
|
}
|
||||||
public void testManyArgs() {
|
public void testManyArgs() throws Exception {
|
||||||
Compiler compiler = new Compiler(ManyArgs.class, null, null, painlessLookup);
|
Compiler compiler = new Compiler(ManyArgs.class, null, null, painlessLookup);
|
||||||
int rando = randomInt();
|
int rando = randomInt();
|
||||||
assertEquals(rando, ((ManyArgs)scriptEngine.compile(compiler, null, "a", emptyMap())).execute(rando, 0, 0, 0));
|
assertEquals(rando, ((ManyArgs)scriptEngine.compile(compiler, null, "a", emptyMap()).newInstance()).execute(rando, 0, 0, 0));
|
||||||
assertEquals(10, ((ManyArgs)scriptEngine.compile(compiler, null, "a + b + c + d", emptyMap())).execute(1, 2, 3, 4));
|
assertEquals(10, ((ManyArgs)scriptEngine.compile(compiler, null, "a + b + c + d", emptyMap()).newInstance()).execute(1, 2, 3, 4));
|
||||||
|
|
||||||
// While we're here we can verify that painless correctly finds used variables
|
// While we're here we can verify that painless correctly finds used variables
|
||||||
ManyArgs script = (ManyArgs)scriptEngine.compile(compiler, null, "a", emptyMap());
|
ManyArgs script = (ManyArgs)scriptEngine.compile(compiler, null, "a", emptyMap()).newInstance();
|
||||||
assertTrue(script.needsA());
|
assertTrue(script.needsA());
|
||||||
assertFalse(script.needsB());
|
assertFalse(script.needsB());
|
||||||
assertFalse(script.needsC());
|
assertFalse(script.needsC());
|
||||||
assertFalse(script.needsD());
|
assertFalse(script.needsD());
|
||||||
script = (ManyArgs)scriptEngine.compile(compiler, null, "a + b + c", emptyMap());
|
script = (ManyArgs)scriptEngine.compile(compiler, null, "a + b + c", emptyMap()).newInstance();
|
||||||
assertTrue(script.needsA());
|
assertTrue(script.needsA());
|
||||||
assertTrue(script.needsB());
|
assertTrue(script.needsB());
|
||||||
assertTrue(script.needsC());
|
assertTrue(script.needsC());
|
||||||
assertFalse(script.needsD());
|
assertFalse(script.needsD());
|
||||||
script = (ManyArgs)scriptEngine.compile(compiler, null, "a + b + c + d", emptyMap());
|
script = (ManyArgs)scriptEngine.compile(compiler, null, "a + b + c + d", emptyMap()).newInstance();
|
||||||
assertTrue(script.needsA());
|
assertTrue(script.needsA());
|
||||||
assertTrue(script.needsB());
|
assertTrue(script.needsB());
|
||||||
assertTrue(script.needsC());
|
assertTrue(script.needsC());
|
||||||
@ -197,10 +202,11 @@ public class BaseClassTests extends ScriptTestCase {
|
|||||||
public static final String[] PARAMETERS = new String[] {"arg"};
|
public static final String[] PARAMETERS = new String[] {"arg"};
|
||||||
public abstract Object execute(String... arg);
|
public abstract Object execute(String... arg);
|
||||||
}
|
}
|
||||||
public void testVararg() {
|
public void testVararg() throws Exception {
|
||||||
Compiler compiler = new Compiler(VarargTest.class, null, null, painlessLookup);
|
Compiler compiler = new Compiler(VarargTest.class, null, null, painlessLookup);
|
||||||
assertEquals("foo bar baz", ((VarargTest)scriptEngine.compile(compiler, null, "String.join(' ', Arrays.asList(arg))", emptyMap()))
|
assertEquals("foo bar baz",
|
||||||
.execute("foo", "bar", "baz"));
|
((VarargTest)scriptEngine.compile(compiler, null, "String.join(' ', Arrays.asList(arg))", emptyMap()).newInstance())
|
||||||
|
.execute("foo", "bar", "baz"));
|
||||||
}
|
}
|
||||||
|
|
||||||
public abstract static class DefaultMethods {
|
public abstract static class DefaultMethods {
|
||||||
@ -213,26 +219,29 @@ public class BaseClassTests extends ScriptTestCase {
|
|||||||
return execute(a, b, c, 1);
|
return execute(a, b, c, 1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
public void testDefaultMethods() {
|
public void testDefaultMethods() throws Exception {
|
||||||
Compiler compiler = new Compiler(DefaultMethods.class, null, null, painlessLookup);
|
Compiler compiler = new Compiler(DefaultMethods.class, null, null, painlessLookup);
|
||||||
int rando = randomInt();
|
int rando = randomInt();
|
||||||
assertEquals(rando, ((DefaultMethods)scriptEngine.compile(compiler, null, "a", emptyMap())).execute(rando, 0, 0, 0));
|
assertEquals(rando, ((DefaultMethods)scriptEngine.compile(compiler, null, "a", emptyMap()).newInstance()).execute(rando, 0, 0, 0));
|
||||||
assertEquals(rando, ((DefaultMethods)scriptEngine.compile(compiler, null, "a", emptyMap())).executeWithASingleOne(rando, 0, 0));
|
assertEquals(rando,
|
||||||
assertEquals(10, ((DefaultMethods)scriptEngine.compile(compiler, null, "a + b + c + d", emptyMap())).execute(1, 2, 3, 4));
|
((DefaultMethods)scriptEngine.compile(compiler, null, "a", emptyMap()).newInstance()).executeWithASingleOne(rando, 0, 0));
|
||||||
assertEquals(4, ((DefaultMethods)scriptEngine.compile(compiler, null, "a + b + c + d", emptyMap())).executeWithOne());
|
assertEquals(10,
|
||||||
assertEquals(7, ((DefaultMethods)scriptEngine.compile(compiler, null, "a + b + c + d", emptyMap())).executeWithASingleOne(1, 2, 3));
|
((DefaultMethods)scriptEngine.compile(compiler, null, "a + b + c + d", emptyMap()).newInstance()).execute(1, 2, 3, 4));
|
||||||
|
assertEquals(4, ((DefaultMethods)scriptEngine.compile(compiler, null, "a + b + c + d", emptyMap()).newInstance()).executeWithOne());
|
||||||
|
assertEquals(7, ((DefaultMethods)scriptEngine.compile(
|
||||||
|
compiler, null, "a + b + c + d", emptyMap()).newInstance()).executeWithASingleOne(1, 2, 3));
|
||||||
}
|
}
|
||||||
|
|
||||||
public abstract static class ReturnsVoid {
|
public abstract static class ReturnsVoid {
|
||||||
public static final String[] PARAMETERS = new String[] {"map"};
|
public static final String[] PARAMETERS = new String[] {"map"};
|
||||||
public abstract void execute(Map<String, Object> map);
|
public abstract void execute(Map<String, Object> map);
|
||||||
}
|
}
|
||||||
public void testReturnsVoid() {
|
public void testReturnsVoid() throws Exception {
|
||||||
Compiler compiler = new Compiler(ReturnsVoid.class, null, null, painlessLookup);
|
Compiler compiler = new Compiler(ReturnsVoid.class, null, null, painlessLookup);
|
||||||
Map<String, Object> map = new HashMap<>();
|
Map<String, Object> map = new HashMap<>();
|
||||||
((ReturnsVoid)scriptEngine.compile(compiler, null, "map.a = 'foo'", emptyMap())).execute(map);
|
((ReturnsVoid)scriptEngine.compile(compiler, null, "map.a = 'foo'", emptyMap()).newInstance()).execute(map);
|
||||||
assertEquals(singletonMap("a", "foo"), map);
|
assertEquals(singletonMap("a", "foo"), map);
|
||||||
((ReturnsVoid)scriptEngine.compile(compiler, null, "map.remove('a')", emptyMap())).execute(map);
|
((ReturnsVoid)scriptEngine.compile(compiler, null, "map.remove('a')", emptyMap()).newInstance()).execute(map);
|
||||||
assertEquals(emptyMap(), map);
|
assertEquals(emptyMap(), map);
|
||||||
|
|
||||||
String debug = Debugger.toString(ReturnsVoid.class, "int i = 0", new CompilerSettings());
|
String debug = Debugger.toString(ReturnsVoid.class, "int i = 0", new CompilerSettings());
|
||||||
@ -246,19 +255,23 @@ public class BaseClassTests extends ScriptTestCase {
|
|||||||
public static final String[] PARAMETERS = new String[] {};
|
public static final String[] PARAMETERS = new String[] {};
|
||||||
public abstract boolean execute();
|
public abstract boolean execute();
|
||||||
}
|
}
|
||||||
public void testReturnsPrimitiveBoolean() {
|
public void testReturnsPrimitiveBoolean() throws Exception {
|
||||||
Compiler compiler = new Compiler(ReturnsPrimitiveBoolean.class, null, null, painlessLookup);
|
Compiler compiler = new Compiler(ReturnsPrimitiveBoolean.class, null, null, painlessLookup);
|
||||||
|
|
||||||
assertEquals(true, ((ReturnsPrimitiveBoolean)scriptEngine.compile(compiler, null, "true", emptyMap())).execute());
|
assertEquals(true, ((ReturnsPrimitiveBoolean)scriptEngine.compile(compiler, null, "true", emptyMap()).newInstance()).execute());
|
||||||
assertEquals(false, ((ReturnsPrimitiveBoolean)scriptEngine.compile(compiler, null, "false", emptyMap())).execute());
|
assertEquals(false, ((ReturnsPrimitiveBoolean)scriptEngine.compile(compiler, null, "false", emptyMap()).newInstance()).execute());
|
||||||
assertEquals(true, ((ReturnsPrimitiveBoolean)scriptEngine.compile(compiler, null, "Boolean.TRUE", emptyMap())).execute());
|
|
||||||
assertEquals(false, ((ReturnsPrimitiveBoolean)scriptEngine.compile(compiler, null, "Boolean.FALSE", emptyMap())).execute());
|
|
||||||
|
|
||||||
assertEquals(true, ((ReturnsPrimitiveBoolean)scriptEngine.compile(compiler, null, "def i = true; i", emptyMap())).execute());
|
|
||||||
assertEquals(true,
|
assertEquals(true,
|
||||||
((ReturnsPrimitiveBoolean)scriptEngine.compile(compiler, null, "def i = Boolean.TRUE; i", emptyMap())).execute());
|
((ReturnsPrimitiveBoolean)scriptEngine.compile(compiler, null, "Boolean.TRUE", emptyMap()).newInstance()).execute());
|
||||||
|
assertEquals(false,
|
||||||
|
((ReturnsPrimitiveBoolean)scriptEngine.compile(compiler, null, "Boolean.FALSE", emptyMap()).newInstance()).execute());
|
||||||
|
|
||||||
assertEquals(true, ((ReturnsPrimitiveBoolean)scriptEngine.compile(compiler, null, "true || false", emptyMap())).execute());
|
assertEquals(true,
|
||||||
|
((ReturnsPrimitiveBoolean)scriptEngine.compile(compiler, null, "def i = true; i", emptyMap()).newInstance()).execute());
|
||||||
|
assertEquals(true, ((ReturnsPrimitiveBoolean)scriptEngine.compile(
|
||||||
|
compiler, null, "def i = Boolean.TRUE; i", emptyMap()).newInstance()).execute());
|
||||||
|
|
||||||
|
assertEquals(true,
|
||||||
|
((ReturnsPrimitiveBoolean)scriptEngine.compile(compiler, null, "true || false", emptyMap()).newInstance()).execute());
|
||||||
|
|
||||||
String debug = Debugger.toString(ReturnsPrimitiveBoolean.class, "false", new CompilerSettings());
|
String debug = Debugger.toString(ReturnsPrimitiveBoolean.class, "false", new CompilerSettings());
|
||||||
assertThat(debug, containsString("ICONST_0"));
|
assertThat(debug, containsString("ICONST_0"));
|
||||||
@ -266,41 +279,44 @@ public class BaseClassTests extends ScriptTestCase {
|
|||||||
assertThat(debug, containsString("IRETURN"));
|
assertThat(debug, containsString("IRETURN"));
|
||||||
|
|
||||||
Exception e = expectScriptThrows(ClassCastException.class, () ->
|
Exception e = expectScriptThrows(ClassCastException.class, () ->
|
||||||
((ReturnsPrimitiveBoolean)scriptEngine.compile(compiler, null, "1L", emptyMap())).execute());
|
((ReturnsPrimitiveBoolean)scriptEngine.compile(compiler, null, "1L", emptyMap()).newInstance()).execute());
|
||||||
assertEquals("Cannot cast from [long] to [boolean].", e.getMessage());
|
assertEquals("Cannot cast from [long] to [boolean].", e.getMessage());
|
||||||
e = expectScriptThrows(ClassCastException.class, () ->
|
e = expectScriptThrows(ClassCastException.class, () ->
|
||||||
((ReturnsPrimitiveBoolean)scriptEngine.compile(compiler, null, "1.1f", emptyMap())).execute());
|
((ReturnsPrimitiveBoolean)scriptEngine.compile(compiler, null, "1.1f", emptyMap()).newInstance()).execute());
|
||||||
assertEquals("Cannot cast from [float] to [boolean].", e.getMessage());
|
assertEquals("Cannot cast from [float] to [boolean].", e.getMessage());
|
||||||
e = expectScriptThrows(ClassCastException.class, () ->
|
e = expectScriptThrows(ClassCastException.class, () ->
|
||||||
((ReturnsPrimitiveBoolean)scriptEngine.compile(compiler, null, "1.1d", emptyMap())).execute());
|
((ReturnsPrimitiveBoolean)scriptEngine.compile(compiler, null, "1.1d", emptyMap()).newInstance()).execute());
|
||||||
assertEquals("Cannot cast from [double] to [boolean].", e.getMessage());
|
assertEquals("Cannot cast from [double] to [boolean].", e.getMessage());
|
||||||
expectScriptThrows(ClassCastException.class, () ->
|
expectScriptThrows(ClassCastException.class, () ->
|
||||||
((ReturnsPrimitiveBoolean)scriptEngine.compile(compiler, null, "def i = 1L; i", emptyMap())).execute());
|
((ReturnsPrimitiveBoolean)scriptEngine.compile(compiler, null, "def i = 1L; i", emptyMap()).newInstance()).execute());
|
||||||
expectScriptThrows(ClassCastException.class, () ->
|
expectScriptThrows(ClassCastException.class, () ->
|
||||||
((ReturnsPrimitiveBoolean)scriptEngine.compile(compiler, null, "def i = 1.1f; i", emptyMap())).execute());
|
((ReturnsPrimitiveBoolean)scriptEngine.compile(compiler, null, "def i = 1.1f; i", emptyMap()).newInstance()).execute());
|
||||||
expectScriptThrows(ClassCastException.class, () ->
|
expectScriptThrows(ClassCastException.class, () ->
|
||||||
((ReturnsPrimitiveBoolean)scriptEngine.compile(compiler, null, "def i = 1.1d; i", emptyMap())).execute());
|
((ReturnsPrimitiveBoolean)scriptEngine.compile(compiler, null, "def i = 1.1d; i", emptyMap()).newInstance()).execute());
|
||||||
|
|
||||||
assertEquals(false, ((ReturnsPrimitiveBoolean)scriptEngine.compile(compiler, null, "int i = 0", emptyMap())).execute());
|
assertEquals(false,
|
||||||
|
((ReturnsPrimitiveBoolean)scriptEngine.compile(compiler, null, "int i = 0", emptyMap()).newInstance()).execute());
|
||||||
}
|
}
|
||||||
|
|
||||||
public abstract static class ReturnsPrimitiveInt {
|
public abstract static class ReturnsPrimitiveInt {
|
||||||
public static final String[] PARAMETERS = new String[] {};
|
public static final String[] PARAMETERS = new String[] {};
|
||||||
public abstract int execute();
|
public abstract int execute();
|
||||||
}
|
}
|
||||||
public void testReturnsPrimitiveInt() {
|
public void testReturnsPrimitiveInt() throws Exception {
|
||||||
Compiler compiler = new Compiler(ReturnsPrimitiveInt.class, null, null, painlessLookup);
|
Compiler compiler = new Compiler(ReturnsPrimitiveInt.class, null, null, painlessLookup);
|
||||||
|
|
||||||
assertEquals(1, ((ReturnsPrimitiveInt)scriptEngine.compile(compiler, null, "1", emptyMap())).execute());
|
assertEquals(1, ((ReturnsPrimitiveInt)scriptEngine.compile(compiler, null, "1", emptyMap()).newInstance()).execute());
|
||||||
assertEquals(1, ((ReturnsPrimitiveInt)scriptEngine.compile(compiler, null, "(int) 1L", emptyMap())).execute());
|
assertEquals(1, ((ReturnsPrimitiveInt)scriptEngine.compile(compiler, null, "(int) 1L", emptyMap()).newInstance()).execute());
|
||||||
assertEquals(1, ((ReturnsPrimitiveInt)scriptEngine.compile(compiler, null, "(int) 1.1d", emptyMap())).execute());
|
assertEquals(1, ((ReturnsPrimitiveInt)scriptEngine.compile(compiler, null, "(int) 1.1d", emptyMap()).newInstance()).execute());
|
||||||
assertEquals(1, ((ReturnsPrimitiveInt)scriptEngine.compile(compiler, null, "(int) 1.1f", emptyMap())).execute());
|
assertEquals(1, ((ReturnsPrimitiveInt)scriptEngine.compile(compiler, null, "(int) 1.1f", emptyMap()).newInstance()).execute());
|
||||||
assertEquals(1, ((ReturnsPrimitiveInt)scriptEngine.compile(compiler, null, "Integer.valueOf(1)", emptyMap())).execute());
|
assertEquals(1,
|
||||||
|
((ReturnsPrimitiveInt)scriptEngine.compile(compiler, null, "Integer.valueOf(1)", emptyMap()).newInstance()).execute());
|
||||||
|
|
||||||
assertEquals(1, ((ReturnsPrimitiveInt)scriptEngine.compile(compiler, null, "def i = 1; i", emptyMap())).execute());
|
assertEquals(1, ((ReturnsPrimitiveInt)scriptEngine.compile(compiler, null, "def i = 1; i", emptyMap()).newInstance()).execute());
|
||||||
assertEquals(1, ((ReturnsPrimitiveInt)scriptEngine.compile(compiler, null, "def i = Integer.valueOf(1); i", emptyMap())).execute());
|
assertEquals(1, ((ReturnsPrimitiveInt)scriptEngine.compile(
|
||||||
|
compiler, null, "def i = Integer.valueOf(1); i", emptyMap()).newInstance()).execute());
|
||||||
|
|
||||||
assertEquals(2, ((ReturnsPrimitiveInt)scriptEngine.compile(compiler, null, "1 + 1", emptyMap())).execute());
|
assertEquals(2, ((ReturnsPrimitiveInt)scriptEngine.compile(compiler, null, "1 + 1", emptyMap()).newInstance()).execute());
|
||||||
|
|
||||||
String debug = Debugger.toString(ReturnsPrimitiveInt.class, "1", new CompilerSettings());
|
String debug = Debugger.toString(ReturnsPrimitiveInt.class, "1", new CompilerSettings());
|
||||||
assertThat(debug, containsString("ICONST_1"));
|
assertThat(debug, containsString("ICONST_1"));
|
||||||
@ -308,88 +324,99 @@ public class BaseClassTests extends ScriptTestCase {
|
|||||||
assertThat(debug, containsString("IRETURN"));
|
assertThat(debug, containsString("IRETURN"));
|
||||||
|
|
||||||
Exception e = expectScriptThrows(ClassCastException.class, () ->
|
Exception e = expectScriptThrows(ClassCastException.class, () ->
|
||||||
((ReturnsPrimitiveInt)scriptEngine.compile(compiler, null, "1L", emptyMap())).execute());
|
((ReturnsPrimitiveInt)scriptEngine.compile(compiler, null, "1L", emptyMap()).newInstance()).execute());
|
||||||
assertEquals("Cannot cast from [long] to [int].", e.getMessage());
|
assertEquals("Cannot cast from [long] to [int].", e.getMessage());
|
||||||
e = expectScriptThrows(ClassCastException.class, () ->
|
e = expectScriptThrows(ClassCastException.class, () ->
|
||||||
((ReturnsPrimitiveInt)scriptEngine.compile(compiler, null, "1.1f", emptyMap())).execute());
|
((ReturnsPrimitiveInt)scriptEngine.compile(compiler, null, "1.1f", emptyMap()).newInstance()).execute());
|
||||||
assertEquals("Cannot cast from [float] to [int].", e.getMessage());
|
assertEquals("Cannot cast from [float] to [int].", e.getMessage());
|
||||||
e = expectScriptThrows(ClassCastException.class, () ->
|
e = expectScriptThrows(ClassCastException.class, () ->
|
||||||
((ReturnsPrimitiveInt)scriptEngine.compile(compiler, null, "1.1d", emptyMap())).execute());
|
((ReturnsPrimitiveInt)scriptEngine.compile(compiler, null, "1.1d", emptyMap()).newInstance()).execute());
|
||||||
assertEquals("Cannot cast from [double] to [int].", e.getMessage());
|
assertEquals("Cannot cast from [double] to [int].", e.getMessage());
|
||||||
expectScriptThrows(ClassCastException.class, () ->
|
expectScriptThrows(ClassCastException.class, () ->
|
||||||
((ReturnsPrimitiveInt)scriptEngine.compile(compiler, null, "def i = 1L; i", emptyMap())).execute());
|
((ReturnsPrimitiveInt)scriptEngine.compile(compiler, null, "def i = 1L; i", emptyMap()).newInstance()).execute());
|
||||||
expectScriptThrows(ClassCastException.class, () ->
|
expectScriptThrows(ClassCastException.class, () ->
|
||||||
((ReturnsPrimitiveInt)scriptEngine.compile(compiler, null, "def i = 1.1f; i", emptyMap())).execute());
|
((ReturnsPrimitiveInt)scriptEngine.compile(compiler, null, "def i = 1.1f; i", emptyMap()).newInstance()).execute());
|
||||||
expectScriptThrows(ClassCastException.class, () ->
|
expectScriptThrows(ClassCastException.class, () ->
|
||||||
((ReturnsPrimitiveInt)scriptEngine.compile(compiler, null, "def i = 1.1d; i", emptyMap())).execute());
|
((ReturnsPrimitiveInt)scriptEngine.compile(compiler, null, "def i = 1.1d; i", emptyMap()).newInstance()).execute());
|
||||||
|
|
||||||
assertEquals(0, ((ReturnsPrimitiveInt)scriptEngine.compile(compiler, null, "int i = 0", emptyMap())).execute());
|
assertEquals(0, ((ReturnsPrimitiveInt)scriptEngine.compile(compiler, null, "int i = 0", emptyMap()).newInstance()).execute());
|
||||||
}
|
}
|
||||||
|
|
||||||
public abstract static class ReturnsPrimitiveFloat {
|
public abstract static class ReturnsPrimitiveFloat {
|
||||||
public static final String[] PARAMETERS = new String[] {};
|
public static final String[] PARAMETERS = new String[] {};
|
||||||
public abstract float execute();
|
public abstract float execute();
|
||||||
}
|
}
|
||||||
public void testReturnsPrimitiveFloat() {
|
public void testReturnsPrimitiveFloat() throws Exception {
|
||||||
Compiler compiler = new Compiler(ReturnsPrimitiveFloat.class, null, null, painlessLookup);
|
Compiler compiler = new Compiler(ReturnsPrimitiveFloat.class, null, null, painlessLookup);
|
||||||
|
|
||||||
assertEquals(1.1f, ((ReturnsPrimitiveFloat)scriptEngine.compile(compiler, null, "1.1f", emptyMap())).execute(), 0);
|
assertEquals(1.1f, ((ReturnsPrimitiveFloat)scriptEngine.compile(compiler, null, "1.1f", emptyMap()).newInstance()).execute(), 0);
|
||||||
assertEquals(1.1f, ((ReturnsPrimitiveFloat)scriptEngine.compile(compiler, null, "(float) 1.1d", emptyMap())).execute(), 0);
|
|
||||||
assertEquals(1.1f, ((ReturnsPrimitiveFloat)scriptEngine.compile(compiler, null, "def d = 1.1f; d", emptyMap())).execute(), 0);
|
|
||||||
assertEquals(1.1f,
|
assertEquals(1.1f,
|
||||||
((ReturnsPrimitiveFloat)scriptEngine.compile(compiler, null, "def d = Float.valueOf(1.1f); d", emptyMap())).execute(), 0);
|
((ReturnsPrimitiveFloat)scriptEngine.compile(compiler, null, "(float) 1.1d", emptyMap()).newInstance()).execute(), 0);
|
||||||
|
assertEquals(1.1f,
|
||||||
|
((ReturnsPrimitiveFloat)scriptEngine.compile(compiler, null, "def d = 1.1f; d", emptyMap()).newInstance()).execute(), 0);
|
||||||
|
assertEquals(1.1f, ((ReturnsPrimitiveFloat)scriptEngine.compile(
|
||||||
|
compiler, null, "def d = Float.valueOf(1.1f); d", emptyMap()).newInstance()).execute(), 0);
|
||||||
|
|
||||||
assertEquals(1.1f + 6.7f, ((ReturnsPrimitiveFloat)scriptEngine.compile(compiler, null, "1.1f + 6.7f", emptyMap())).execute(), 0);
|
assertEquals(1.1f + 6.7f,
|
||||||
|
((ReturnsPrimitiveFloat)scriptEngine.compile(compiler, null, "1.1f + 6.7f", emptyMap()).newInstance()).execute(), 0);
|
||||||
|
|
||||||
Exception e = expectScriptThrows(ClassCastException.class, () ->
|
Exception e = expectScriptThrows(ClassCastException.class, () ->
|
||||||
((ReturnsPrimitiveFloat)scriptEngine.compile(compiler, null, "1.1d", emptyMap())).execute());
|
((ReturnsPrimitiveFloat)scriptEngine.compile(compiler, null, "1.1d", emptyMap()).newInstance()).execute());
|
||||||
assertEquals("Cannot cast from [double] to [float].", e.getMessage());
|
assertEquals("Cannot cast from [double] to [float].", e.getMessage());
|
||||||
e = expectScriptThrows(ClassCastException.class, () ->
|
e = expectScriptThrows(ClassCastException.class, () ->
|
||||||
((ReturnsPrimitiveFloat)scriptEngine.compile(compiler, null, "def d = 1.1d; d", emptyMap())).execute());
|
((ReturnsPrimitiveFloat)scriptEngine.compile(compiler, null, "def d = 1.1d; d", emptyMap()).newInstance()).execute());
|
||||||
e = expectScriptThrows(ClassCastException.class, () ->
|
e = expectScriptThrows(ClassCastException.class, () -> ((ReturnsPrimitiveFloat)scriptEngine.compile(
|
||||||
((ReturnsPrimitiveFloat)scriptEngine.compile(compiler, null, "def d = Double.valueOf(1.1); d", emptyMap())).execute());
|
compiler, null, "def d = Double.valueOf(1.1); d", emptyMap()).newInstance()).execute());
|
||||||
|
|
||||||
String debug = Debugger.toString(ReturnsPrimitiveFloat.class, "1f", new CompilerSettings());
|
String debug = Debugger.toString(ReturnsPrimitiveFloat.class, "1f", new CompilerSettings());
|
||||||
assertThat(debug, containsString("FCONST_1"));
|
assertThat(debug, containsString("FCONST_1"));
|
||||||
// The important thing here is that we have the bytecode for returning a float instead of an object
|
// The important thing here is that we have the bytecode for returning a float instead of an object
|
||||||
assertThat(debug, containsString("FRETURN"));
|
assertThat(debug, containsString("FRETURN"));
|
||||||
|
|
||||||
assertEquals(0.0f, ((ReturnsPrimitiveFloat)scriptEngine.compile(compiler, null, "int i = 0", emptyMap())).execute(), 0);
|
assertEquals(0.0f,
|
||||||
|
((ReturnsPrimitiveFloat)scriptEngine.compile(compiler, null, "int i = 0", emptyMap()).newInstance()).execute(), 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
public abstract static class ReturnsPrimitiveDouble {
|
public abstract static class ReturnsPrimitiveDouble {
|
||||||
public static final String[] PARAMETERS = new String[] {};
|
public static final String[] PARAMETERS = new String[] {};
|
||||||
public abstract double execute();
|
public abstract double execute();
|
||||||
}
|
}
|
||||||
public void testReturnsPrimitiveDouble() {
|
public void testReturnsPrimitiveDouble() throws Exception {
|
||||||
Compiler compiler = new Compiler(ReturnsPrimitiveDouble.class, null, null, painlessLookup);
|
Compiler compiler = new Compiler(ReturnsPrimitiveDouble.class, null, null, painlessLookup);
|
||||||
|
|
||||||
assertEquals(1.0, ((ReturnsPrimitiveDouble)scriptEngine.compile(compiler, null, "1", emptyMap())).execute(), 0);
|
assertEquals(1.0, ((ReturnsPrimitiveDouble)scriptEngine.compile(compiler, null, "1", emptyMap()).newInstance()).execute(), 0);
|
||||||
assertEquals(1.0, ((ReturnsPrimitiveDouble)scriptEngine.compile(compiler, null, "1L", emptyMap())).execute(), 0);
|
assertEquals(1.0, ((ReturnsPrimitiveDouble)scriptEngine.compile(compiler, null, "1L", emptyMap()).newInstance()).execute(), 0);
|
||||||
assertEquals(1.1, ((ReturnsPrimitiveDouble)scriptEngine.compile(compiler, null, "1.1d", emptyMap())).execute(), 0);
|
assertEquals(1.1, ((ReturnsPrimitiveDouble)scriptEngine.compile(compiler, null, "1.1d", emptyMap()).newInstance()).execute(), 0);
|
||||||
assertEquals((double) 1.1f, ((ReturnsPrimitiveDouble)scriptEngine.compile(compiler, null, "1.1f", emptyMap())).execute(), 0);
|
|
||||||
assertEquals(1.1, ((ReturnsPrimitiveDouble)scriptEngine.compile(compiler, null, "Double.valueOf(1.1)", emptyMap())).execute(), 0);
|
|
||||||
assertEquals((double) 1.1f,
|
assertEquals((double) 1.1f,
|
||||||
((ReturnsPrimitiveDouble)scriptEngine.compile(compiler, null, "Float.valueOf(1.1f)", emptyMap())).execute(), 0);
|
((ReturnsPrimitiveDouble)scriptEngine.compile(compiler, null, "1.1f", emptyMap()).newInstance()).execute(), 0);
|
||||||
|
assertEquals(1.1, ((ReturnsPrimitiveDouble)scriptEngine.compile(
|
||||||
|
compiler, null, "Double.valueOf(1.1)", emptyMap()).newInstance()).execute(), 0);
|
||||||
|
assertEquals((double) 1.1f, ((ReturnsPrimitiveDouble)scriptEngine.compile(
|
||||||
|
compiler, null, "Float.valueOf(1.1f)", emptyMap()).newInstance()).execute(), 0);
|
||||||
|
|
||||||
assertEquals(1.0, ((ReturnsPrimitiveDouble)scriptEngine.compile(compiler, null, "def d = 1; d", emptyMap())).execute(), 0);
|
assertEquals(1.0,
|
||||||
assertEquals(1.0, ((ReturnsPrimitiveDouble)scriptEngine.compile(compiler, null, "def d = 1L; d", emptyMap())).execute(), 0);
|
((ReturnsPrimitiveDouble)scriptEngine.compile(compiler, null, "def d = 1; d", emptyMap()).newInstance()).execute(), 0);
|
||||||
assertEquals(1.1, ((ReturnsPrimitiveDouble)scriptEngine.compile(compiler, null, "def d = 1.1d; d", emptyMap())).execute(), 0);
|
assertEquals(1.0,
|
||||||
assertEquals((double) 1.1f,
|
((ReturnsPrimitiveDouble)scriptEngine.compile(compiler, null, "def d = 1L; d", emptyMap()).newInstance()).execute(), 0);
|
||||||
((ReturnsPrimitiveDouble)scriptEngine.compile(compiler, null, "def d = 1.1f; d", emptyMap())).execute(), 0);
|
|
||||||
assertEquals(1.1,
|
assertEquals(1.1,
|
||||||
((ReturnsPrimitiveDouble)scriptEngine.compile(compiler, null, "def d = Double.valueOf(1.1); d", emptyMap())).execute(), 0);
|
((ReturnsPrimitiveDouble)scriptEngine.compile(compiler, null, "def d = 1.1d; d", emptyMap()).newInstance()).execute(), 0);
|
||||||
assertEquals((double) 1.1f,
|
assertEquals((double) 1.1f,
|
||||||
((ReturnsPrimitiveDouble)scriptEngine.compile(compiler, null, "def d = Float.valueOf(1.1f); d", emptyMap())).execute(), 0);
|
((ReturnsPrimitiveDouble)scriptEngine.compile(compiler, null, "def d = 1.1f; d", emptyMap()).newInstance()).execute(), 0);
|
||||||
|
assertEquals(1.1, ((ReturnsPrimitiveDouble)scriptEngine.compile(
|
||||||
|
compiler, null, "def d = Double.valueOf(1.1); d", emptyMap()).newInstance()).execute(), 0);
|
||||||
|
assertEquals((double) 1.1f, ((ReturnsPrimitiveDouble)scriptEngine.compile(
|
||||||
|
compiler, null, "def d = Float.valueOf(1.1f); d", emptyMap()).newInstance()).execute(), 0);
|
||||||
|
|
||||||
assertEquals(1.1 + 6.7, ((ReturnsPrimitiveDouble)scriptEngine.compile(compiler, null, "1.1 + 6.7", emptyMap())).execute(), 0);
|
assertEquals(1.1 + 6.7,
|
||||||
|
((ReturnsPrimitiveDouble)scriptEngine.compile(compiler, null, "1.1 + 6.7", emptyMap()).newInstance()).execute(), 0);
|
||||||
|
|
||||||
String debug = Debugger.toString(ReturnsPrimitiveDouble.class, "1", new CompilerSettings());
|
String debug = Debugger.toString(ReturnsPrimitiveDouble.class, "1", new CompilerSettings());
|
||||||
assertThat(debug, containsString("DCONST_1"));
|
assertThat(debug, containsString("DCONST_1"));
|
||||||
// The important thing here is that we have the bytecode for returning a double instead of an object
|
// The important thing here is that we have the bytecode for returning a double instead of an object
|
||||||
assertThat(debug, containsString("DRETURN"));
|
assertThat(debug, containsString("DRETURN"));
|
||||||
|
|
||||||
assertEquals(0.0, ((ReturnsPrimitiveDouble)scriptEngine.compile(compiler, null, "int i = 0", emptyMap())).execute(), 0);
|
assertEquals(0.0,
|
||||||
|
((ReturnsPrimitiveDouble)scriptEngine.compile(compiler, null, "int i = 0", emptyMap()).newInstance()).execute(), 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
public abstract static class NoArgumentsConstant {
|
public abstract static class NoArgumentsConstant {
|
||||||
|
@ -82,20 +82,15 @@ public class AnnotatedTextFieldMapperTests extends ESSingleNodeTestCase {
|
|||||||
@Before
|
@Before
|
||||||
public void setup() {
|
public void setup() {
|
||||||
Settings settings = Settings.builder()
|
Settings settings = Settings.builder()
|
||||||
.put("index.analysis.filter.mySynonyms.type", "synonym")
|
|
||||||
.putList("index.analysis.filter.mySynonyms.synonyms", Collections.singletonList("car, auto"))
|
|
||||||
.put("index.analysis.analyzer.synonym.tokenizer", "standard")
|
|
||||||
.put("index.analysis.analyzer.synonym.filter", "mySynonyms")
|
|
||||||
// Stop filter remains in server as it is part of lucene-core
|
|
||||||
.put("index.analysis.analyzer.my_stop_analyzer.tokenizer", "standard")
|
.put("index.analysis.analyzer.my_stop_analyzer.tokenizer", "standard")
|
||||||
.put("index.analysis.analyzer.my_stop_analyzer.filter", "stop")
|
.put("index.analysis.analyzer.my_stop_analyzer.filter", "stop")
|
||||||
.build();
|
.build();
|
||||||
indexService = createIndex("test", settings);
|
indexService = createIndex("test", settings);
|
||||||
parser = indexService.mapperService().documentMapperParser();
|
parser = indexService.mapperService().documentMapperParser();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Collection<Class<? extends Plugin>> getPlugins() {
|
protected Collection<Class<? extends Plugin>> getPlugins() {
|
||||||
List<Class<? extends Plugin>> classpathPlugins = new ArrayList<>();
|
List<Class<? extends Plugin>> classpathPlugins = new ArrayList<>();
|
||||||
@ -107,16 +102,16 @@ public class AnnotatedTextFieldMapperTests extends ESSingleNodeTestCase {
|
|||||||
|
|
||||||
protected String getFieldType() {
|
protected String getFieldType() {
|
||||||
return "annotated_text";
|
return "annotated_text";
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testAnnotationInjection() throws IOException {
|
public void testAnnotationInjection() throws IOException {
|
||||||
|
|
||||||
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
|
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
.startObject("properties").startObject("field").field("type", getFieldType()).endObject().endObject()
|
.startObject("properties").startObject("field").field("type", getFieldType()).endObject().endObject()
|
||||||
.endObject().endObject());
|
.endObject().endObject());
|
||||||
|
|
||||||
DocumentMapper mapper = indexService.mapperService().merge("type",
|
DocumentMapper mapper = indexService.mapperService().merge("type",
|
||||||
new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE);
|
new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE);
|
||||||
|
|
||||||
// Use example of typed and untyped annotations
|
// Use example of typed and untyped annotations
|
||||||
String annotatedText = "He paid [Stormy Daniels](Stephanie+Clifford&Payee) hush money";
|
String annotatedText = "He paid [Stormy Daniels](Stephanie+Clifford&Payee) hush money";
|
||||||
@ -140,12 +135,12 @@ public class AnnotatedTextFieldMapperTests extends ESSingleNodeTestCase {
|
|||||||
try (Engine.Searcher searcher = shard.acquireSearcher("test")) {
|
try (Engine.Searcher searcher = shard.acquireSearcher("test")) {
|
||||||
LeafReader leaf = searcher.getDirectoryReader().leaves().get(0).reader();
|
LeafReader leaf = searcher.getDirectoryReader().leaves().get(0).reader();
|
||||||
TermsEnum terms = leaf.terms("field").iterator();
|
TermsEnum terms = leaf.terms("field").iterator();
|
||||||
|
|
||||||
assertTrue(terms.seekExact(new BytesRef("stormy")));
|
assertTrue(terms.seekExact(new BytesRef("stormy")));
|
||||||
PostingsEnum postings = terms.postings(null, PostingsEnum.POSITIONS);
|
PostingsEnum postings = terms.postings(null, PostingsEnum.POSITIONS);
|
||||||
assertEquals(0, postings.nextDoc());
|
assertEquals(0, postings.nextDoc());
|
||||||
assertEquals(2, postings.nextPosition());
|
assertEquals(2, postings.nextPosition());
|
||||||
|
|
||||||
assertTrue(terms.seekExact(new BytesRef("Stephanie Clifford")));
|
assertTrue(terms.seekExact(new BytesRef("Stephanie Clifford")));
|
||||||
postings = terms.postings(null, PostingsEnum.POSITIONS);
|
postings = terms.postings(null, PostingsEnum.POSITIONS);
|
||||||
assertEquals(0, postings.nextDoc());
|
assertEquals(0, postings.nextDoc());
|
||||||
@ -156,23 +151,23 @@ public class AnnotatedTextFieldMapperTests extends ESSingleNodeTestCase {
|
|||||||
assertEquals(0, postings.nextDoc());
|
assertEquals(0, postings.nextDoc());
|
||||||
assertEquals(2, postings.nextPosition());
|
assertEquals(2, postings.nextPosition());
|
||||||
|
|
||||||
|
|
||||||
assertTrue(terms.seekExact(new BytesRef("hush")));
|
assertTrue(terms.seekExact(new BytesRef("hush")));
|
||||||
postings = terms.postings(null, PostingsEnum.POSITIONS);
|
postings = terms.postings(null, PostingsEnum.POSITIONS);
|
||||||
assertEquals(0, postings.nextDoc());
|
assertEquals(0, postings.nextDoc());
|
||||||
assertEquals(4, postings.nextPosition());
|
assertEquals(4, postings.nextPosition());
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testToleranceForBadAnnotationMarkup() throws IOException {
|
public void testToleranceForBadAnnotationMarkup() throws IOException {
|
||||||
|
|
||||||
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
|
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
.startObject("properties").startObject("field").field("type", getFieldType()).endObject().endObject()
|
.startObject("properties").startObject("field").field("type", getFieldType()).endObject().endObject()
|
||||||
.endObject().endObject());
|
.endObject().endObject());
|
||||||
|
|
||||||
DocumentMapper mapper = indexService.mapperService().merge("type",
|
DocumentMapper mapper = indexService.mapperService().merge("type",
|
||||||
new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE);
|
new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE);
|
||||||
|
|
||||||
String annotatedText = "foo [bar](MissingEndBracket baz";
|
String annotatedText = "foo [bar](MissingEndBracket baz";
|
||||||
SourceToParse sourceToParse = SourceToParse.source("test", "type", "1", BytesReference
|
SourceToParse sourceToParse = SourceToParse.source("test", "type", "1", BytesReference
|
||||||
@ -195,12 +190,12 @@ public class AnnotatedTextFieldMapperTests extends ESSingleNodeTestCase {
|
|||||||
try (Engine.Searcher searcher = shard.acquireSearcher("test")) {
|
try (Engine.Searcher searcher = shard.acquireSearcher("test")) {
|
||||||
LeafReader leaf = searcher.getDirectoryReader().leaves().get(0).reader();
|
LeafReader leaf = searcher.getDirectoryReader().leaves().get(0).reader();
|
||||||
TermsEnum terms = leaf.terms("field").iterator();
|
TermsEnum terms = leaf.terms("field").iterator();
|
||||||
|
|
||||||
assertTrue(terms.seekExact(new BytesRef("foo")));
|
assertTrue(terms.seekExact(new BytesRef("foo")));
|
||||||
PostingsEnum postings = terms.postings(null, PostingsEnum.POSITIONS);
|
PostingsEnum postings = terms.postings(null, PostingsEnum.POSITIONS);
|
||||||
assertEquals(0, postings.nextDoc());
|
assertEquals(0, postings.nextDoc());
|
||||||
assertEquals(0, postings.nextPosition());
|
assertEquals(0, postings.nextPosition());
|
||||||
|
|
||||||
assertTrue(terms.seekExact(new BytesRef("bar")));
|
assertTrue(terms.seekExact(new BytesRef("bar")));
|
||||||
postings = terms.postings(null, PostingsEnum.POSITIONS);
|
postings = terms.postings(null, PostingsEnum.POSITIONS);
|
||||||
assertEquals(0, postings.nextDoc());
|
assertEquals(0, postings.nextDoc());
|
||||||
@ -209,18 +204,18 @@ public class AnnotatedTextFieldMapperTests extends ESSingleNodeTestCase {
|
|||||||
assertFalse(terms.seekExact(new BytesRef("MissingEndBracket")));
|
assertFalse(terms.seekExact(new BytesRef("MissingEndBracket")));
|
||||||
// Bad markup means value is treated as plain text and fed through tokenisation
|
// Bad markup means value is treated as plain text and fed through tokenisation
|
||||||
assertTrue(terms.seekExact(new BytesRef("missingendbracket")));
|
assertTrue(terms.seekExact(new BytesRef("missingendbracket")));
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testAgainstTermVectorsAPI() throws IOException {
|
public void testAgainstTermVectorsAPI() throws IOException {
|
||||||
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
|
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||||
.startObject("properties").startObject("tvfield").field("type", getFieldType())
|
.startObject("properties").startObject("tvfield").field("type", getFieldType())
|
||||||
.field("term_vector", "with_positions_offsets_payloads")
|
.field("term_vector", "with_positions_offsets_payloads")
|
||||||
.endObject().endObject()
|
.endObject().endObject()
|
||||||
.endObject().endObject());
|
.endObject().endObject());
|
||||||
indexService.mapperService().merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE);
|
indexService.mapperService().merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE);
|
||||||
|
|
||||||
|
|
||||||
int max = between(3, 10);
|
int max = between(3, 10);
|
||||||
BulkRequestBuilder bulk = client().prepareBulk();
|
BulkRequestBuilder bulk = client().prepareBulk();
|
||||||
@ -231,13 +226,13 @@ public class AnnotatedTextFieldMapperTests extends ESSingleNodeTestCase {
|
|||||||
bulk.get();
|
bulk.get();
|
||||||
|
|
||||||
TermVectorsRequest request = new TermVectorsRequest("test", "type", "0").termStatistics(true);
|
TermVectorsRequest request = new TermVectorsRequest("test", "type", "0").termStatistics(true);
|
||||||
|
|
||||||
IndicesService indicesService = getInstanceFromNode(IndicesService.class);
|
IndicesService indicesService = getInstanceFromNode(IndicesService.class);
|
||||||
IndexService test = indicesService.indexService(resolveIndex("test"));
|
IndexService test = indicesService.indexService(resolveIndex("test"));
|
||||||
IndexShard shard = test.getShardOrNull(0);
|
IndexShard shard = test.getShardOrNull(0);
|
||||||
assertThat(shard, notNullValue());
|
assertThat(shard, notNullValue());
|
||||||
TermVectorsResponse response = TermVectorsService.getTermVectors(shard, request);
|
TermVectorsResponse response = TermVectorsService.getTermVectors(shard, request);
|
||||||
assertEquals(1, response.getFields().size());
|
assertEquals(1, response.getFields().size());
|
||||||
|
|
||||||
Terms terms = response.getFields().terms("tvfield");
|
Terms terms = response.getFields().terms("tvfield");
|
||||||
TermsEnum iterator = terms.iterator();
|
TermsEnum iterator = terms.iterator();
|
||||||
@ -245,14 +240,14 @@ public class AnnotatedTextFieldMapperTests extends ESSingleNodeTestCase {
|
|||||||
Set<String> foundTerms = new HashSet<>();
|
Set<String> foundTerms = new HashSet<>();
|
||||||
while ((term = iterator.next()) != null) {
|
while ((term = iterator.next()) != null) {
|
||||||
foundTerms.add(term.utf8ToString());
|
foundTerms.add(term.utf8ToString());
|
||||||
}
|
}
|
||||||
//Check we have both text and annotation tokens
|
//Check we have both text and annotation tokens
|
||||||
assertTrue(foundTerms.contains("brown"));
|
assertTrue(foundTerms.contains("brown"));
|
||||||
assertTrue(foundTerms.contains("Color"));
|
assertTrue(foundTerms.contains("Color"));
|
||||||
assertTrue(foundTerms.contains("fox"));
|
assertTrue(foundTerms.contains("fox"));
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// ===== Code below copied from TextFieldMapperTests ========
|
// ===== Code below copied from TextFieldMapperTests ========
|
||||||
|
|
||||||
public void testDefaults() throws IOException {
|
public void testDefaults() throws IOException {
|
||||||
@ -616,7 +611,7 @@ public class AnnotatedTextFieldMapperTests extends ESSingleNodeTestCase {
|
|||||||
assertThat(doc.rootDoc().getField("field6").fieldType().storeTermVectorPositions(), equalTo(true));
|
assertThat(doc.rootDoc().getField("field6").fieldType().storeTermVectorPositions(), equalTo(true));
|
||||||
assertThat(doc.rootDoc().getField("field6").fieldType().storeTermVectorPayloads(), equalTo(true));
|
assertThat(doc.rootDoc().getField("field6").fieldType().storeTermVectorPayloads(), equalTo(true));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testNullConfigValuesFail() throws MapperParsingException, IOException {
|
public void testNullConfigValuesFail() throws MapperParsingException, IOException {
|
||||||
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject()
|
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject()
|
||||||
.startObject("type")
|
.startObject("type")
|
||||||
@ -677,5 +672,5 @@ public class AnnotatedTextFieldMapperTests extends ESSingleNodeTestCase {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -19,9 +19,11 @@
|
|||||||
|
|
||||||
package org.elasticsearch.repositories.s3;
|
package org.elasticsearch.repositories.s3;
|
||||||
|
|
||||||
|
import com.amazonaws.AmazonServiceException;
|
||||||
import com.amazonaws.services.s3.model.CannedAccessControlList;
|
import com.amazonaws.services.s3.model.CannedAccessControlList;
|
||||||
import com.amazonaws.services.s3.model.DeleteObjectsRequest;
|
import com.amazonaws.services.s3.model.DeleteObjectsRequest;
|
||||||
import com.amazonaws.services.s3.model.DeleteObjectsRequest.KeyVersion;
|
import com.amazonaws.services.s3.model.DeleteObjectsRequest.KeyVersion;
|
||||||
|
import com.amazonaws.services.s3.model.HeadBucketRequest;
|
||||||
import com.amazonaws.services.s3.model.ObjectListing;
|
import com.amazonaws.services.s3.model.ObjectListing;
|
||||||
import com.amazonaws.services.s3.model.S3ObjectSummary;
|
import com.amazonaws.services.s3.model.S3ObjectSummary;
|
||||||
import com.amazonaws.services.s3.model.StorageClass;
|
import com.amazonaws.services.s3.model.StorageClass;
|
||||||
@ -66,14 +68,23 @@ class S3BlobStore extends AbstractComponent implements BlobStore {
|
|||||||
|
|
||||||
// Note: the method client.doesBucketExist() may return 'true' is the bucket exists
|
// Note: the method client.doesBucketExist() may return 'true' is the bucket exists
|
||||||
// but we don't have access to it (ie, 403 Forbidden response code)
|
// but we don't have access to it (ie, 403 Forbidden response code)
|
||||||
// Also, if invalid security credentials are used to execute this method, the
|
|
||||||
// client is not able to distinguish between bucket permission errors and
|
|
||||||
// invalid credential errors, and this method could return an incorrect result.
|
|
||||||
try (AmazonS3Reference clientReference = clientReference()) {
|
try (AmazonS3Reference clientReference = clientReference()) {
|
||||||
SocketAccess.doPrivilegedVoid(() -> {
|
SocketAccess.doPrivilegedVoid(() -> {
|
||||||
if (clientReference.client().doesBucketExist(bucket) == false) {
|
try {
|
||||||
throw new IllegalArgumentException("The bucket [" + bucket + "] does not exist. Please create it before "
|
clientReference.client().headBucket(new HeadBucketRequest(bucket));
|
||||||
+ " creating an s3 snapshot repository backed by it.");
|
} catch (final AmazonServiceException e) {
|
||||||
|
if (e.getStatusCode() == 301) {
|
||||||
|
throw new IllegalArgumentException("the bucket [" + bucket + "] is in a different region than you configured", e);
|
||||||
|
} else if (e.getStatusCode() == 403) {
|
||||||
|
throw new IllegalArgumentException("you do not have permissions to access the bucket [" + bucket + "]", e);
|
||||||
|
} else if (e.getStatusCode() == 404) {
|
||||||
|
throw new IllegalArgumentException(
|
||||||
|
"the bucket [" + bucket + "] does not exist;"
|
||||||
|
+ " please create it before creating an S3 snapshot repository backed by it",
|
||||||
|
e);
|
||||||
|
} else {
|
||||||
|
throw new IllegalArgumentException("error checking the existence of bucket [" + bucket + "]", e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@ -158,7 +169,9 @@ class S3BlobStore extends AbstractComponent implements BlobStore {
|
|||||||
return cannedACL;
|
return cannedACL;
|
||||||
}
|
}
|
||||||
|
|
||||||
public StorageClass getStorageClass() { return storageClass; }
|
public StorageClass getStorageClass() {
|
||||||
|
return storageClass;
|
||||||
|
}
|
||||||
|
|
||||||
public static StorageClass initStorageClass(String storageClass) {
|
public static StorageClass initStorageClass(String storageClass) {
|
||||||
if ((storageClass == null) || storageClass.equals("")) {
|
if ((storageClass == null) || storageClass.equals("")) {
|
||||||
|
@ -20,6 +20,7 @@
|
|||||||
package org.elasticsearch.repositories.s3;
|
package org.elasticsearch.repositories.s3;
|
||||||
|
|
||||||
import com.amazonaws.AmazonClientException;
|
import com.amazonaws.AmazonClientException;
|
||||||
|
import com.amazonaws.AmazonServiceException;
|
||||||
import com.amazonaws.SdkClientException;
|
import com.amazonaws.SdkClientException;
|
||||||
import com.amazonaws.services.s3.AbstractAmazonS3;
|
import com.amazonaws.services.s3.AbstractAmazonS3;
|
||||||
import com.amazonaws.services.s3.model.AmazonS3Exception;
|
import com.amazonaws.services.s3.model.AmazonS3Exception;
|
||||||
@ -27,6 +28,8 @@ import com.amazonaws.services.s3.model.DeleteObjectRequest;
|
|||||||
import com.amazonaws.services.s3.model.DeleteObjectsRequest;
|
import com.amazonaws.services.s3.model.DeleteObjectsRequest;
|
||||||
import com.amazonaws.services.s3.model.DeleteObjectsResult;
|
import com.amazonaws.services.s3.model.DeleteObjectsResult;
|
||||||
import com.amazonaws.services.s3.model.GetObjectRequest;
|
import com.amazonaws.services.s3.model.GetObjectRequest;
|
||||||
|
import com.amazonaws.services.s3.model.HeadBucketRequest;
|
||||||
|
import com.amazonaws.services.s3.model.HeadBucketResult;
|
||||||
import com.amazonaws.services.s3.model.ListObjectsRequest;
|
import com.amazonaws.services.s3.model.ListObjectsRequest;
|
||||||
import com.amazonaws.services.s3.model.ObjectListing;
|
import com.amazonaws.services.s3.model.ObjectListing;
|
||||||
import com.amazonaws.services.s3.model.ObjectMetadata;
|
import com.amazonaws.services.s3.model.ObjectMetadata;
|
||||||
@ -73,8 +76,15 @@ class MockAmazonS3 extends AbstractAmazonS3 {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean doesBucketExist(final String bucket) {
|
public HeadBucketResult headBucket(final HeadBucketRequest headBucketRequest) throws SdkClientException, AmazonServiceException {
|
||||||
return this.bucket.equalsIgnoreCase(bucket);
|
if (this.bucket.equalsIgnoreCase(headBucketRequest.getBucketName())) {
|
||||||
|
return new HeadBucketResult();
|
||||||
|
} else {
|
||||||
|
final AmazonServiceException e =
|
||||||
|
new AmazonServiceException("bucket [" + headBucketRequest.getBucketName() + "] does not exist");
|
||||||
|
e.setStatusCode(404);
|
||||||
|
throw e;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -19,9 +19,13 @@
|
|||||||
|
|
||||||
package org.elasticsearch.repositories.s3;
|
package org.elasticsearch.repositories.s3;
|
||||||
|
|
||||||
|
import com.amazonaws.AmazonClientException;
|
||||||
|
import com.amazonaws.AmazonServiceException;
|
||||||
import com.amazonaws.auth.AWSCredentials;
|
import com.amazonaws.auth.AWSCredentials;
|
||||||
import com.amazonaws.auth.AWSCredentialsProvider;
|
import com.amazonaws.auth.AWSCredentialsProvider;
|
||||||
import com.amazonaws.services.s3.AmazonS3;
|
import com.amazonaws.services.s3.AmazonS3;
|
||||||
|
import com.amazonaws.services.s3.model.HeadBucketRequest;
|
||||||
|
import com.amazonaws.services.s3.model.HeadBucketResult;
|
||||||
import org.elasticsearch.cluster.metadata.RepositoryMetaData;
|
import org.elasticsearch.cluster.metadata.RepositoryMetaData;
|
||||||
import org.elasticsearch.common.SuppressForbidden;
|
import org.elasticsearch.common.SuppressForbidden;
|
||||||
import org.elasticsearch.common.settings.MockSecureSettings;
|
import org.elasticsearch.common.settings.MockSecureSettings;
|
||||||
@ -57,9 +61,10 @@ public class RepositoryCredentialsTests extends ESTestCase {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean doesBucketExist(String bucketName) {
|
public HeadBucketResult headBucket(HeadBucketRequest headBucketRequest) throws AmazonClientException, AmazonServiceException {
|
||||||
return true;
|
return new HeadBucketResult();
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
static final class ProxyS3Service extends S3Service {
|
static final class ProxyS3Service extends S3Service {
|
||||||
|
@ -19,7 +19,11 @@
|
|||||||
|
|
||||||
package org.elasticsearch.repositories.s3;
|
package org.elasticsearch.repositories.s3;
|
||||||
|
|
||||||
|
import com.amazonaws.AmazonServiceException;
|
||||||
|
import com.amazonaws.SdkClientException;
|
||||||
import com.amazonaws.services.s3.AbstractAmazonS3;
|
import com.amazonaws.services.s3.AbstractAmazonS3;
|
||||||
|
import com.amazonaws.services.s3.model.HeadBucketRequest;
|
||||||
|
import com.amazonaws.services.s3.model.HeadBucketResult;
|
||||||
import org.elasticsearch.cluster.metadata.RepositoryMetaData;
|
import org.elasticsearch.cluster.metadata.RepositoryMetaData;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.unit.ByteSizeUnit;
|
import org.elasticsearch.common.unit.ByteSizeUnit;
|
||||||
@ -42,8 +46,8 @@ public class S3RepositoryTests extends ESTestCase {
|
|||||||
private static class DummyS3Client extends AbstractAmazonS3 {
|
private static class DummyS3Client extends AbstractAmazonS3 {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean doesBucketExist(String bucketName) {
|
public HeadBucketResult headBucket(final HeadBucketRequest request) throws SdkClientException, AmazonServiceException {
|
||||||
return true;
|
return new HeadBucketResult();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -23,7 +23,7 @@
|
|||||||
},
|
},
|
||||||
"filter_path": {
|
"filter_path": {
|
||||||
"type": "list",
|
"type": "list",
|
||||||
"description": "A comma-separated list of filters used to reduce the respone."
|
"description": "A comma-separated list of filters used to reduce the response."
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -890,17 +890,6 @@ public class DateFormatters {
|
|||||||
private static final DateFormatter YEAR = new JavaDateFormatter("year",
|
private static final DateFormatter YEAR = new JavaDateFormatter("year",
|
||||||
new DateTimeFormatterBuilder().appendValue(ChronoField.YEAR).toFormatter(Locale.ROOT));
|
new DateTimeFormatterBuilder().appendValue(ChronoField.YEAR).toFormatter(Locale.ROOT));
|
||||||
|
|
||||||
/*
|
|
||||||
* Returns a formatter for parsing the seconds since the epoch
|
|
||||||
*/
|
|
||||||
private static final DateFormatter EPOCH_SECOND = new JavaDateFormatter("epoch_second",
|
|
||||||
new DateTimeFormatterBuilder().appendValue(ChronoField.INSTANT_SECONDS).toFormatter(Locale.ROOT));
|
|
||||||
|
|
||||||
/*
|
|
||||||
* Parses the milliseconds since/before the epoch
|
|
||||||
*/
|
|
||||||
private static final DateFormatter EPOCH_MILLIS = EpochMillisDateFormatter.INSTANCE;
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Returns a formatter that combines a full date and two digit hour of
|
* Returns a formatter that combines a full date and two digit hour of
|
||||||
* day. (yyyy-MM-dd'T'HH)
|
* day. (yyyy-MM-dd'T'HH)
|
||||||
@ -1375,9 +1364,9 @@ public class DateFormatters {
|
|||||||
} else if ("yearMonthDay".equals(input) || "year_month_day".equals(input)) {
|
} else if ("yearMonthDay".equals(input) || "year_month_day".equals(input)) {
|
||||||
return YEAR_MONTH_DAY;
|
return YEAR_MONTH_DAY;
|
||||||
} else if ("epoch_second".equals(input)) {
|
} else if ("epoch_second".equals(input)) {
|
||||||
return EPOCH_SECOND;
|
return EpochSecondsDateFormatter.INSTANCE;
|
||||||
} else if ("epoch_millis".equals(input)) {
|
} else if ("epoch_millis".equals(input)) {
|
||||||
return EPOCH_MILLIS;
|
return EpochMillisDateFormatter.INSTANCE;
|
||||||
// strict date formats here, must be at least 4 digits for year and two for months and two for day
|
// strict date formats here, must be at least 4 digits for year and two for months and two for day
|
||||||
} else if ("strictBasicWeekDate".equals(input) || "strict_basic_week_date".equals(input)) {
|
} else if ("strictBasicWeekDate".equals(input) || "strict_basic_week_date".equals(input)) {
|
||||||
return STRICT_BASIC_WEEK_DATE;
|
return STRICT_BASIC_WEEK_DATE;
|
||||||
|
@ -0,0 +1,85 @@
|
|||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.common.time;
|
||||||
|
|
||||||
|
import java.math.BigDecimal;
|
||||||
|
import java.time.Instant;
|
||||||
|
import java.time.ZoneId;
|
||||||
|
import java.time.ZoneOffset;
|
||||||
|
import java.time.format.DateTimeParseException;
|
||||||
|
import java.time.temporal.TemporalAccessor;
|
||||||
|
import java.time.temporal.TemporalField;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.regex.Pattern;
|
||||||
|
|
||||||
|
public class EpochSecondsDateFormatter implements DateFormatter {
|
||||||
|
|
||||||
|
public static DateFormatter INSTANCE = new EpochSecondsDateFormatter();
|
||||||
|
private static final Pattern SPLIT_BY_DOT_PATTERN = Pattern.compile("\\.");
|
||||||
|
|
||||||
|
private EpochSecondsDateFormatter() {}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public TemporalAccessor parse(String input) {
|
||||||
|
try {
|
||||||
|
if (input.contains(".")) {
|
||||||
|
String[] inputs = SPLIT_BY_DOT_PATTERN.split(input, 2);
|
||||||
|
Long seconds = Long.valueOf(inputs[0]);
|
||||||
|
if (inputs[1].length() == 0) {
|
||||||
|
// this is BWC compatible to joda time, nothing after the dot is allowed
|
||||||
|
return Instant.ofEpochSecond(seconds, 0).atZone(ZoneOffset.UTC);
|
||||||
|
}
|
||||||
|
if (inputs[1].length() > 9) {
|
||||||
|
throw new DateTimeParseException("too much granularity after dot [" + input + "]", input, 0);
|
||||||
|
}
|
||||||
|
Long nanos = new BigDecimal(inputs[1]).movePointRight(9 - inputs[1].length()).longValueExact();
|
||||||
|
return Instant.ofEpochSecond(seconds, nanos).atZone(ZoneOffset.UTC);
|
||||||
|
} else {
|
||||||
|
return Instant.ofEpochSecond(Long.valueOf(input)).atZone(ZoneOffset.UTC);
|
||||||
|
}
|
||||||
|
} catch (NumberFormatException e) {
|
||||||
|
throw new DateTimeParseException("invalid number [" + input + "]", input, 0, e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public DateFormatter withZone(ZoneId zoneId) {
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String format(TemporalAccessor accessor) {
|
||||||
|
Instant instant = Instant.from(accessor);
|
||||||
|
if (instant.getNano() != 0) {
|
||||||
|
return String.valueOf(instant.getEpochSecond()) + "." + String.valueOf(instant.getNano()).replaceAll("0*$", "");
|
||||||
|
}
|
||||||
|
return String.valueOf(instant.getEpochSecond());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String pattern() {
|
||||||
|
return "epoch_seconds";
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public DateFormatter parseDefaulting(Map<TemporalField, Long> fields) {
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
}
|
@ -19,6 +19,10 @@
|
|||||||
|
|
||||||
package org.elasticsearch.env;
|
package org.elasticsearch.env;
|
||||||
|
|
||||||
|
import java.io.UncheckedIOException;
|
||||||
|
import java.util.Iterator;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
import java.util.stream.Stream;
|
||||||
import org.apache.logging.log4j.Logger;
|
import org.apache.logging.log4j.Logger;
|
||||||
import org.apache.logging.log4j.LogManager;
|
import org.apache.logging.log4j.LogManager;
|
||||||
import org.apache.logging.log4j.message.ParameterizedMessage;
|
import org.apache.logging.log4j.message.ParameterizedMessage;
|
||||||
@ -486,12 +490,27 @@ public final class NodeEnvironment implements Closeable {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private static boolean assertPathsDoNotExist(final Path[] paths) {
|
private static boolean assertPathsDoNotExist(final Path[] paths) {
|
||||||
Set<Path> existingPaths = new HashSet<>();
|
Set<Path> existingPaths = Stream.of(paths)
|
||||||
for (Path path : paths) {
|
.filter(FileSystemUtils::exists)
|
||||||
if (FileSystemUtils.exists(path)) {
|
.filter(leftOver -> {
|
||||||
existingPaths.add(path);
|
// Relaxed assertion for the special case where only the empty state directory exists after deleting
|
||||||
}
|
// the shard directory because it was created again as a result of a metadata read action concurrently.
|
||||||
}
|
try (DirectoryStream<Path> children = Files.newDirectoryStream(leftOver)) {
|
||||||
|
Iterator<Path> iter = children.iterator();
|
||||||
|
if (iter.hasNext() == false) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
Path maybeState = iter.next();
|
||||||
|
if (iter.hasNext() || maybeState.equals(leftOver.resolve(MetaDataStateFormat.STATE_DIR_NAME)) == false) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
try (DirectoryStream<Path> stateChildren = Files.newDirectoryStream(maybeState)) {
|
||||||
|
return stateChildren.iterator().hasNext();
|
||||||
|
}
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new UncheckedIOException(e);
|
||||||
|
}
|
||||||
|
}).collect(Collectors.toSet());
|
||||||
assert existingPaths.size() == 0 : "Paths exist that should have been deleted: " + existingPaths;
|
assert existingPaths.size() == 0 : "Paths exist that should have been deleted: " + existingPaths;
|
||||||
return existingPaths.size() == 0;
|
return existingPaths.size() == 0;
|
||||||
}
|
}
|
||||||
@ -551,10 +570,8 @@ public final class NodeEnvironment implements Closeable {
|
|||||||
* @param index the index to lock shards for
|
* @param index the index to lock shards for
|
||||||
* @param lockTimeoutMS how long to wait for acquiring the indices shard locks
|
* @param lockTimeoutMS how long to wait for acquiring the indices shard locks
|
||||||
* @return the {@link ShardLock} instances for this index.
|
* @return the {@link ShardLock} instances for this index.
|
||||||
* @throws IOException if an IOException occurs.
|
|
||||||
*/
|
*/
|
||||||
public List<ShardLock> lockAllForIndex(Index index, IndexSettings settings, long lockTimeoutMS)
|
public List<ShardLock> lockAllForIndex(Index index, IndexSettings settings, long lockTimeoutMS) throws ShardLockObtainFailedException {
|
||||||
throws IOException, ShardLockObtainFailedException {
|
|
||||||
final int numShards = settings.getNumberOfShards();
|
final int numShards = settings.getNumberOfShards();
|
||||||
if (numShards <= 0) {
|
if (numShards <= 0) {
|
||||||
throw new IllegalArgumentException("settings must contain a non-null > 0 number of shards");
|
throw new IllegalArgumentException("settings must contain a non-null > 0 number of shards");
|
||||||
@ -842,7 +859,7 @@ public final class NodeEnvironment implements Closeable {
|
|||||||
/**
|
/**
|
||||||
* Resolves all existing paths to <code>indexFolderName</code> in ${data.paths}/nodes/{node.id}/indices
|
* Resolves all existing paths to <code>indexFolderName</code> in ${data.paths}/nodes/{node.id}/indices
|
||||||
*/
|
*/
|
||||||
public Path[] resolveIndexFolder(String indexFolderName) throws IOException {
|
public Path[] resolveIndexFolder(String indexFolderName) {
|
||||||
if (nodePaths == null || locks == null) {
|
if (nodePaths == null || locks == null) {
|
||||||
throw new IllegalStateException("node is not configured to store local location");
|
throw new IllegalStateException("node is not configured to store local location");
|
||||||
}
|
}
|
||||||
@ -987,17 +1004,6 @@ public final class NodeEnvironment implements Closeable {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Resolve the custom path for a index's shard.
|
|
||||||
* Uses the {@code IndexMetaData.SETTING_DATA_PATH} setting to determine
|
|
||||||
* the root path for the index.
|
|
||||||
*
|
|
||||||
* @param indexSettings settings for the index
|
|
||||||
*/
|
|
||||||
public Path resolveBaseCustomLocation(IndexSettings indexSettings) {
|
|
||||||
return resolveBaseCustomLocation(indexSettings, sharedDataPath, nodeLockId);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Resolve the custom path for a index's shard.
|
* Resolve the custom path for a index's shard.
|
||||||
* Uses the {@code IndexMetaData.SETTING_DATA_PATH} setting to determine
|
* Uses the {@code IndexMetaData.SETTING_DATA_PATH} setting to determine
|
||||||
|
@ -29,7 +29,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
|
|||||||
* directory. Internal processes should acquire a lock on a shard
|
* directory. Internal processes should acquire a lock on a shard
|
||||||
* before executing any write operations on the shards data directory.
|
* before executing any write operations on the shards data directory.
|
||||||
*
|
*
|
||||||
* @see org.elasticsearch.env.NodeEnvironment
|
* @see NodeEnvironment
|
||||||
*/
|
*/
|
||||||
public abstract class ShardLock implements Closeable {
|
public abstract class ShardLock implements Closeable {
|
||||||
|
|
||||||
@ -56,13 +56,6 @@ public abstract class ShardLock implements Closeable {
|
|||||||
|
|
||||||
protected abstract void closeInternal();
|
protected abstract void closeInternal();
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns true if this lock is still open ie. has not been closed yet.
|
|
||||||
*/
|
|
||||||
public final boolean isOpen() {
|
|
||||||
return closed.get() == false;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String toString() {
|
public String toString() {
|
||||||
return "ShardLock{" +
|
return "ShardLock{" +
|
||||||
|
@ -158,16 +158,8 @@ public final class AnalysisRegistry implements Closeable {
|
|||||||
|
|
||||||
public Map<String, TokenFilterFactory> buildTokenFilterFactories(IndexSettings indexSettings) throws IOException {
|
public Map<String, TokenFilterFactory> buildTokenFilterFactories(IndexSettings indexSettings) throws IOException {
|
||||||
final Map<String, Settings> tokenFiltersSettings = indexSettings.getSettings().getGroups(INDEX_ANALYSIS_FILTER);
|
final Map<String, Settings> tokenFiltersSettings = indexSettings.getSettings().getGroups(INDEX_ANALYSIS_FILTER);
|
||||||
Map<String, AnalysisModule.AnalysisProvider<TokenFilterFactory>> tokenFilters = new HashMap<>(this.tokenFilters);
|
return buildMapping(Component.FILTER, indexSettings, tokenFiltersSettings,
|
||||||
/*
|
Collections.unmodifiableMap(this.tokenFilters), prebuiltAnalysis.preConfiguredTokenFilters);
|
||||||
* synonym and synonym_graph are different than everything else since they need access to the tokenizer factories for the index.
|
|
||||||
* instead of building the infrastructure for plugins we rather make it a real exception to not pollute the general interface and
|
|
||||||
* hide internal data-structures as much as possible.
|
|
||||||
*/
|
|
||||||
tokenFilters.put("synonym", requiresAnalysisSettings((is, env, name, settings) -> new SynonymTokenFilterFactory(is, env, this, name, settings)));
|
|
||||||
tokenFilters.put("synonym_graph", requiresAnalysisSettings((is, env, name, settings) -> new SynonymGraphTokenFilterFactory(is, env, this, name, settings)));
|
|
||||||
|
|
||||||
return buildMapping(Component.FILTER, indexSettings, tokenFiltersSettings, Collections.unmodifiableMap(tokenFilters), prebuiltAnalysis.preConfiguredTokenFilters);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public Map<String, TokenizerFactory> buildTokenizerFactories(IndexSettings indexSettings) throws IOException {
|
public Map<String, TokenizerFactory> buildTokenizerFactories(IndexSettings indexSettings) throws IOException {
|
||||||
@ -222,18 +214,7 @@ public final class AnalysisRegistry implements Closeable {
|
|||||||
if (tokenFilterSettings.containsKey(tokenFilter)) {
|
if (tokenFilterSettings.containsKey(tokenFilter)) {
|
||||||
Settings currentSettings = tokenFilterSettings.get(tokenFilter);
|
Settings currentSettings = tokenFilterSettings.get(tokenFilter);
|
||||||
String typeName = currentSettings.get("type");
|
String typeName = currentSettings.get("type");
|
||||||
/*
|
return getAnalysisProvider(Component.FILTER, tokenFilters, tokenFilter, typeName);
|
||||||
* synonym and synonym_graph are different than everything else since they need access to the tokenizer factories for the index.
|
|
||||||
* instead of building the infrastructure for plugins we rather make it a real exception to not pollute the general interface and
|
|
||||||
* hide internal data-structures as much as possible.
|
|
||||||
*/
|
|
||||||
if ("synonym".equals(typeName)) {
|
|
||||||
return requiresAnalysisSettings((is, env, name, settings) -> new SynonymTokenFilterFactory(is, env, this, name, settings));
|
|
||||||
} else if ("synonym_graph".equals(typeName)) {
|
|
||||||
return requiresAnalysisSettings((is, env, name, settings) -> new SynonymGraphTokenFilterFactory(is, env, this, name, settings));
|
|
||||||
} else {
|
|
||||||
return getAnalysisProvider(Component.FILTER, tokenFilters, tokenFilter, typeName);
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
return getTokenFilterProvider(tokenFilter);
|
return getTokenFilterProvider(tokenFilter);
|
||||||
}
|
}
|
||||||
@ -257,19 +238,6 @@ public final class AnalysisRegistry implements Closeable {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static <T> AnalysisModule.AnalysisProvider<T> requiresAnalysisSettings(AnalysisModule.AnalysisProvider<T> provider) {
|
|
||||||
return new AnalysisModule.AnalysisProvider<T>() {
|
|
||||||
@Override
|
|
||||||
public T get(IndexSettings indexSettings, Environment environment, String name, Settings settings) throws IOException {
|
|
||||||
return provider.get(indexSettings, environment, name, settings);
|
|
||||||
}
|
|
||||||
@Override
|
|
||||||
public boolean requiresAnalysisSettings() {
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
enum Component {
|
enum Component {
|
||||||
ANALYZER {
|
ANALYZER {
|
||||||
@Override
|
@Override
|
||||||
|
@ -85,7 +85,7 @@ public final class IgnoredFieldMapper extends MetadataFieldMapper {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public static final class IgnoredFieldType extends TermBasedFieldType {
|
public static final class IgnoredFieldType extends StringFieldType {
|
||||||
|
|
||||||
public IgnoredFieldType() {
|
public IgnoredFieldType() {
|
||||||
}
|
}
|
||||||
|
@ -38,6 +38,7 @@ import org.elasticsearch.index.query.QueryShardContext;
|
|||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
import java.util.regex.Pattern;
|
||||||
|
|
||||||
|
|
||||||
public class IndexFieldMapper extends MetadataFieldMapper {
|
public class IndexFieldMapper extends MetadataFieldMapper {
|
||||||
@ -151,14 +152,43 @@ public class IndexFieldMapper extends MetadataFieldMapper {
|
|||||||
+ " vs. " + values);
|
+ " vs. " + values);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Query prefixQuery(String value,
|
||||||
|
@Nullable MultiTermQuery.RewriteMethod method,
|
||||||
|
QueryShardContext context) {
|
||||||
|
String indexName = context.getFullyQualifiedIndex().getName();
|
||||||
|
if (indexName.startsWith(value)) {
|
||||||
|
return Queries.newMatchAllQuery();
|
||||||
|
} else {
|
||||||
|
return Queries.newMatchNoDocsQuery("The index [" + indexName +
|
||||||
|
"] doesn't match the provided prefix [" + value + "].");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Query regexpQuery(String value, int flags, int maxDeterminizedStates,
|
||||||
|
MultiTermQuery.RewriteMethod method, QueryShardContext context) {
|
||||||
|
String indexName = context.getFullyQualifiedIndex().getName();
|
||||||
|
Pattern pattern = Regex.compile(value, Regex.flagsToString(flags));
|
||||||
|
|
||||||
|
if (pattern.matcher(indexName).matches()) {
|
||||||
|
return Queries.newMatchAllQuery();
|
||||||
|
} else {
|
||||||
|
return Queries.newMatchNoDocsQuery("The index [" + indexName +
|
||||||
|
"] doesn't match the provided pattern [" + value + "].");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Query wildcardQuery(String value,
|
public Query wildcardQuery(String value,
|
||||||
@Nullable MultiTermQuery.RewriteMethod method,
|
@Nullable MultiTermQuery.RewriteMethod method,
|
||||||
QueryShardContext context) {
|
QueryShardContext context) {
|
||||||
if (isSameIndex(value, context.getFullyQualifiedIndex().getName())) {
|
String indexName = context.getFullyQualifiedIndex().getName();
|
||||||
|
if (isSameIndex(value, indexName)) {
|
||||||
return Queries.newMatchAllQuery();
|
return Queries.newMatchAllQuery();
|
||||||
} else {
|
} else {
|
||||||
return Queries.newMatchNoDocsQuery("Index didn't match. Index queried: " + context.index().getName() + " vs. " + value);
|
return Queries.newMatchNoDocsQuery("The index [" + indexName +
|
||||||
|
"] doesn't match the provided pattern [" + value + "].");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -108,7 +108,7 @@ public class RoutingFieldMapper extends MetadataFieldMapper {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static final class RoutingFieldType extends TermBasedFieldType {
|
static final class RoutingFieldType extends StringFieldType {
|
||||||
|
|
||||||
RoutingFieldType() {
|
RoutingFieldType() {
|
||||||
}
|
}
|
||||||
|
@ -254,9 +254,6 @@ public class TransportNodesListShardStoreMetaData extends TransportNodesAction<T
|
|||||||
|
|
||||||
public static class NodesStoreFilesMetaData extends BaseNodesResponse<NodeStoreFilesMetaData> {
|
public static class NodesStoreFilesMetaData extends BaseNodesResponse<NodeStoreFilesMetaData> {
|
||||||
|
|
||||||
NodesStoreFilesMetaData() {
|
|
||||||
}
|
|
||||||
|
|
||||||
public NodesStoreFilesMetaData(ClusterName clusterName, List<NodeStoreFilesMetaData> nodes, List<FailedNodeException> failures) {
|
public NodesStoreFilesMetaData(ClusterName clusterName, List<NodeStoreFilesMetaData> nodes, List<FailedNodeException> failures) {
|
||||||
super(clusterName, nodes, failures);
|
super(clusterName, nodes, failures);
|
||||||
}
|
}
|
||||||
|
@ -0,0 +1,95 @@
|
|||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.ingest;
|
||||||
|
|
||||||
|
import org.elasticsearch.common.metrics.CounterMetric;
|
||||||
|
import org.elasticsearch.common.metrics.MeanMetric;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* <p>Metrics to measure ingest actions.
|
||||||
|
* <p>This counts measure documents and timings for a given scope.
|
||||||
|
* The scope is determined by the calling code. For example you can use this class to count all documents across all pipeline,
|
||||||
|
* or you can use this class to count documents for a given pipeline or a specific processor.
|
||||||
|
* This class does not make assumptions about it's given scope.
|
||||||
|
*/
|
||||||
|
class IngestMetric {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The time it takes to complete the measured item.
|
||||||
|
*/
|
||||||
|
private final MeanMetric ingestTime = new MeanMetric();
|
||||||
|
/**
|
||||||
|
* The current count of things being measure. Should most likely ever be 0 or 1.
|
||||||
|
* Useful when aggregating multiple metrics to see how many things are in flight.
|
||||||
|
*/
|
||||||
|
private final CounterMetric ingestCurrent = new CounterMetric();
|
||||||
|
/**
|
||||||
|
* The ever increasing count of things being measured
|
||||||
|
*/
|
||||||
|
private final CounterMetric ingestCount = new CounterMetric();
|
||||||
|
/**
|
||||||
|
* The only increasing count of failures
|
||||||
|
*/
|
||||||
|
private final CounterMetric ingestFailed = new CounterMetric();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Call this prior to the ingest action.
|
||||||
|
*/
|
||||||
|
void preIngest() {
|
||||||
|
ingestCurrent.inc();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Call this after the performing the ingest action, even if the action failed.
|
||||||
|
* @param ingestTimeInMillis The time it took to perform the action.
|
||||||
|
*/
|
||||||
|
void postIngest(long ingestTimeInMillis) {
|
||||||
|
ingestCurrent.dec();
|
||||||
|
ingestTime.inc(ingestTimeInMillis);
|
||||||
|
ingestCount.inc();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Call this if the ingest action failed.
|
||||||
|
*/
|
||||||
|
void ingestFailed() {
|
||||||
|
ingestFailed.inc();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* <p>Add two sets of metrics together.
|
||||||
|
* <p><strong>Note -</strong> this method does <strong>not</strong> add the current count values.
|
||||||
|
* The current count value is ephemeral and requires a increase/decrease operation pairs to keep the value correct.
|
||||||
|
*
|
||||||
|
* @param metrics The metric to add.
|
||||||
|
*/
|
||||||
|
void add(IngestMetric metrics) {
|
||||||
|
ingestCount.inc(metrics.ingestCount.count());
|
||||||
|
ingestTime.inc(metrics.ingestTime.sum());
|
||||||
|
ingestFailed.inc(metrics.ingestFailed.count());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a serializable representation for these metrics.
|
||||||
|
*/
|
||||||
|
IngestStats.Stats createStats() {
|
||||||
|
return new IngestStats.Stats(ingestCount.count(), ingestTime.sum(), ingestCurrent.count(), ingestFailed.count());
|
||||||
|
}
|
||||||
|
}
|
@ -23,15 +23,15 @@ import java.util.ArrayList;
|
|||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.Iterator;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
import java.util.Optional;
|
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.concurrent.TimeUnit;
|
import java.util.concurrent.TimeUnit;
|
||||||
import java.util.function.BiConsumer;
|
import java.util.function.BiConsumer;
|
||||||
import java.util.function.Consumer;
|
import java.util.function.Consumer;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
import org.elasticsearch.ElasticsearchParseException;
|
import org.elasticsearch.ElasticsearchParseException;
|
||||||
import org.elasticsearch.ExceptionsHelper;
|
import org.elasticsearch.ExceptionsHelper;
|
||||||
import org.elasticsearch.ResourceNotFoundException;
|
import org.elasticsearch.ResourceNotFoundException;
|
||||||
@ -49,8 +49,6 @@ import org.elasticsearch.cluster.ClusterStateApplier;
|
|||||||
import org.elasticsearch.cluster.metadata.MetaData;
|
import org.elasticsearch.cluster.metadata.MetaData;
|
||||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||||
import org.elasticsearch.cluster.service.ClusterService;
|
import org.elasticsearch.cluster.service.ClusterService;
|
||||||
import org.elasticsearch.common.metrics.CounterMetric;
|
|
||||||
import org.elasticsearch.common.metrics.MeanMetric;
|
|
||||||
import org.elasticsearch.common.regex.Regex;
|
import org.elasticsearch.common.regex.Regex;
|
||||||
import org.elasticsearch.common.unit.TimeValue;
|
import org.elasticsearch.common.unit.TimeValue;
|
||||||
import org.elasticsearch.common.util.concurrent.AbstractRunnable;
|
import org.elasticsearch.common.util.concurrent.AbstractRunnable;
|
||||||
@ -79,8 +77,7 @@ public class IngestService implements ClusterStateApplier {
|
|||||||
// are loaded, so in the cluster state we just save the pipeline config and here we keep the actual pipelines around.
|
// are loaded, so in the cluster state we just save the pipeline config and here we keep the actual pipelines around.
|
||||||
private volatile Map<String, Pipeline> pipelines = new HashMap<>();
|
private volatile Map<String, Pipeline> pipelines = new HashMap<>();
|
||||||
private final ThreadPool threadPool;
|
private final ThreadPool threadPool;
|
||||||
private final StatsHolder totalStats = new StatsHolder();
|
private final IngestMetric totalMetrics = new IngestMetric();
|
||||||
private volatile Map<String, StatsHolder> statsHolderPerPipeline = Collections.emptyMap();
|
|
||||||
|
|
||||||
public IngestService(ClusterService clusterService, ThreadPool threadPool,
|
public IngestService(ClusterService clusterService, ThreadPool threadPool,
|
||||||
Environment env, ScriptService scriptService, AnalysisRegistry analysisRegistry,
|
Environment env, ScriptService scriptService, AnalysisRegistry analysisRegistry,
|
||||||
@ -257,10 +254,16 @@ public class IngestService implements ClusterStateApplier {
|
|||||||
@Override
|
@Override
|
||||||
public void applyClusterState(final ClusterChangedEvent event) {
|
public void applyClusterState(final ClusterChangedEvent event) {
|
||||||
ClusterState state = event.state();
|
ClusterState state = event.state();
|
||||||
|
Map<String, Pipeline> originalPipelines = pipelines;
|
||||||
innerUpdatePipelines(event.previousState(), state);
|
innerUpdatePipelines(event.previousState(), state);
|
||||||
IngestMetadata ingestMetadata = state.getMetaData().custom(IngestMetadata.TYPE);
|
//pipelines changed, so add the old metrics to the new metrics
|
||||||
if (ingestMetadata != null) {
|
if (originalPipelines != pipelines) {
|
||||||
updatePipelineStats(ingestMetadata);
|
pipelines.forEach((id, pipeline) -> {
|
||||||
|
Pipeline originalPipeline = originalPipelines.get(id);
|
||||||
|
if (originalPipeline != null) {
|
||||||
|
pipeline.getMetrics().add(originalPipeline.getMetrics());
|
||||||
|
}
|
||||||
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -325,6 +328,7 @@ public class IngestService implements ClusterStateApplier {
|
|||||||
public void executeBulkRequest(Iterable<DocWriteRequest<?>> actionRequests,
|
public void executeBulkRequest(Iterable<DocWriteRequest<?>> actionRequests,
|
||||||
BiConsumer<IndexRequest, Exception> itemFailureHandler, Consumer<Exception> completionHandler,
|
BiConsumer<IndexRequest, Exception> itemFailureHandler, Consumer<Exception> completionHandler,
|
||||||
Consumer<IndexRequest> itemDroppedHandler) {
|
Consumer<IndexRequest> itemDroppedHandler) {
|
||||||
|
|
||||||
threadPool.executor(ThreadPool.Names.WRITE).execute(new AbstractRunnable() {
|
threadPool.executor(ThreadPool.Names.WRITE).execute(new AbstractRunnable() {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -367,37 +371,11 @@ public class IngestService implements ClusterStateApplier {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public IngestStats stats() {
|
public IngestStats stats() {
|
||||||
Map<String, StatsHolder> statsHolderPerPipeline = this.statsHolderPerPipeline;
|
|
||||||
|
|
||||||
Map<String, IngestStats.Stats> statsPerPipeline = new HashMap<>(statsHolderPerPipeline.size());
|
Map<String, IngestStats.Stats> statsPerPipeline =
|
||||||
for (Map.Entry<String, StatsHolder> entry : statsHolderPerPipeline.entrySet()) {
|
pipelines.entrySet().stream().collect(Collectors.toMap(Map.Entry::getKey, v -> v.getValue().getMetrics().createStats()));
|
||||||
statsPerPipeline.put(entry.getKey(), entry.getValue().createStats());
|
|
||||||
}
|
|
||||||
|
|
||||||
return new IngestStats(totalStats.createStats(), statsPerPipeline);
|
return new IngestStats(totalMetrics.createStats(), statsPerPipeline);
|
||||||
}
|
|
||||||
|
|
||||||
void updatePipelineStats(IngestMetadata ingestMetadata) {
|
|
||||||
boolean changed = false;
|
|
||||||
Map<String, StatsHolder> newStatsPerPipeline = new HashMap<>(statsHolderPerPipeline);
|
|
||||||
Iterator<String> iterator = newStatsPerPipeline.keySet().iterator();
|
|
||||||
while (iterator.hasNext()) {
|
|
||||||
String pipeline = iterator.next();
|
|
||||||
if (ingestMetadata.getPipelines().containsKey(pipeline) == false) {
|
|
||||||
iterator.remove();
|
|
||||||
changed = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for (String pipeline : ingestMetadata.getPipelines().keySet()) {
|
|
||||||
if (newStatsPerPipeline.containsKey(pipeline) == false) {
|
|
||||||
newStatsPerPipeline.put(pipeline, new StatsHolder());
|
|
||||||
changed = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (changed) {
|
|
||||||
statsHolderPerPipeline = Collections.unmodifiableMap(newStatsPerPipeline);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void innerExecute(IndexRequest indexRequest, Pipeline pipeline, Consumer<IndexRequest> itemDroppedHandler) throws Exception {
|
private void innerExecute(IndexRequest indexRequest, Pipeline pipeline, Consumer<IndexRequest> itemDroppedHandler) throws Exception {
|
||||||
@ -408,10 +386,8 @@ public class IngestService implements ClusterStateApplier {
|
|||||||
long startTimeInNanos = System.nanoTime();
|
long startTimeInNanos = System.nanoTime();
|
||||||
// the pipeline specific stat holder may not exist and that is fine:
|
// the pipeline specific stat holder may not exist and that is fine:
|
||||||
// (e.g. the pipeline may have been removed while we're ingesting a document
|
// (e.g. the pipeline may have been removed while we're ingesting a document
|
||||||
Optional<StatsHolder> pipelineStats = Optional.ofNullable(statsHolderPerPipeline.get(pipeline.getId()));
|
|
||||||
try {
|
try {
|
||||||
totalStats.preIngest();
|
totalMetrics.preIngest();
|
||||||
pipelineStats.ifPresent(StatsHolder::preIngest);
|
|
||||||
String index = indexRequest.index();
|
String index = indexRequest.index();
|
||||||
String type = indexRequest.type();
|
String type = indexRequest.type();
|
||||||
String id = indexRequest.id();
|
String id = indexRequest.id();
|
||||||
@ -437,13 +413,11 @@ public class IngestService implements ClusterStateApplier {
|
|||||||
indexRequest.source(ingestDocument.getSourceAndMetadata());
|
indexRequest.source(ingestDocument.getSourceAndMetadata());
|
||||||
}
|
}
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
totalStats.ingestFailed();
|
totalMetrics.ingestFailed();
|
||||||
pipelineStats.ifPresent(StatsHolder::ingestFailed);
|
|
||||||
throw e;
|
throw e;
|
||||||
} finally {
|
} finally {
|
||||||
long ingestTimeInMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startTimeInNanos);
|
long ingestTimeInMillis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startTimeInNanos);
|
||||||
totalStats.postIngest(ingestTimeInMillis);
|
totalMetrics.postIngest(ingestTimeInMillis);
|
||||||
pipelineStats.ifPresent(statsHolder -> statsHolder.postIngest(ingestTimeInMillis));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -480,27 +454,4 @@ public class IngestService implements ClusterStateApplier {
|
|||||||
ExceptionsHelper.rethrowAndSuppress(exceptions);
|
ExceptionsHelper.rethrowAndSuppress(exceptions);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static class StatsHolder {
|
|
||||||
|
|
||||||
private final MeanMetric ingestMetric = new MeanMetric();
|
|
||||||
private final CounterMetric ingestCurrent = new CounterMetric();
|
|
||||||
private final CounterMetric ingestFailed = new CounterMetric();
|
|
||||||
|
|
||||||
void preIngest() {
|
|
||||||
ingestCurrent.inc();
|
|
||||||
}
|
|
||||||
|
|
||||||
void postIngest(long ingestTimeInMillis) {
|
|
||||||
ingestCurrent.dec();
|
|
||||||
ingestMetric.inc(ingestTimeInMillis);
|
|
||||||
}
|
|
||||||
|
|
||||||
void ingestFailed() {
|
|
||||||
ingestFailed.inc();
|
|
||||||
}
|
|
||||||
|
|
||||||
IngestStats.Stats createStats() {
|
|
||||||
return new IngestStats.Stats(ingestMetric.count(), ingestMetric.sum(), ingestCurrent.count(), ingestFailed.count());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
@ -22,10 +22,12 @@ package org.elasticsearch.ingest;
|
|||||||
import org.elasticsearch.ElasticsearchParseException;
|
import org.elasticsearch.ElasticsearchParseException;
|
||||||
import org.elasticsearch.common.Nullable;
|
import org.elasticsearch.common.Nullable;
|
||||||
|
|
||||||
|
import java.time.Clock;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
import org.elasticsearch.script.ScriptService;
|
import org.elasticsearch.script.ScriptService;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -44,12 +46,21 @@ public final class Pipeline {
|
|||||||
@Nullable
|
@Nullable
|
||||||
private final Integer version;
|
private final Integer version;
|
||||||
private final CompoundProcessor compoundProcessor;
|
private final CompoundProcessor compoundProcessor;
|
||||||
|
private final IngestMetric metrics;
|
||||||
|
private final Clock clock;
|
||||||
|
|
||||||
public Pipeline(String id, @Nullable String description, @Nullable Integer version, CompoundProcessor compoundProcessor) {
|
public Pipeline(String id, @Nullable String description, @Nullable Integer version, CompoundProcessor compoundProcessor) {
|
||||||
|
this(id, description, version, compoundProcessor, Clock.systemUTC());
|
||||||
|
}
|
||||||
|
|
||||||
|
//package private for testing
|
||||||
|
Pipeline(String id, @Nullable String description, @Nullable Integer version, CompoundProcessor compoundProcessor, Clock clock) {
|
||||||
this.id = id;
|
this.id = id;
|
||||||
this.description = description;
|
this.description = description;
|
||||||
this.compoundProcessor = compoundProcessor;
|
this.compoundProcessor = compoundProcessor;
|
||||||
this.version = version;
|
this.version = version;
|
||||||
|
this.metrics = new IngestMetric();
|
||||||
|
this.clock = clock;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static Pipeline create(String id, Map<String, Object> config,
|
public static Pipeline create(String id, Map<String, Object> config,
|
||||||
@ -78,7 +89,17 @@ public final class Pipeline {
|
|||||||
* Modifies the data of a document to be indexed based on the processor this pipeline holds
|
* Modifies the data of a document to be indexed based on the processor this pipeline holds
|
||||||
*/
|
*/
|
||||||
public IngestDocument execute(IngestDocument ingestDocument) throws Exception {
|
public IngestDocument execute(IngestDocument ingestDocument) throws Exception {
|
||||||
return compoundProcessor.execute(ingestDocument);
|
long startTimeInMillis = clock.millis();
|
||||||
|
try {
|
||||||
|
metrics.preIngest();
|
||||||
|
return compoundProcessor.execute(ingestDocument);
|
||||||
|
} catch (Exception e) {
|
||||||
|
metrics.ingestFailed();
|
||||||
|
throw e;
|
||||||
|
} finally {
|
||||||
|
long ingestTimeInMillis = clock.millis() - startTimeInMillis;
|
||||||
|
metrics.postIngest(ingestTimeInMillis);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -136,4 +157,10 @@ public final class Pipeline {
|
|||||||
return compoundProcessor.flattenProcessors();
|
return compoundProcessor.flattenProcessors();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The metrics associated with this pipeline.
|
||||||
|
*/
|
||||||
|
public IngestMetric getMetrics() {
|
||||||
|
return metrics;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -19,11 +19,12 @@
|
|||||||
|
|
||||||
package org.elasticsearch.script;
|
package org.elasticsearch.script;
|
||||||
|
|
||||||
|
import org.apache.logging.log4j.LogManager;
|
||||||
|
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||||
|
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import org.apache.logging.log4j.LogManager;
|
|
||||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
|
||||||
|
|
||||||
public final class ParameterMap implements Map<String, Object> {
|
public final class ParameterMap implements Map<String, Object> {
|
||||||
|
|
||||||
@ -34,7 +35,7 @@ public final class ParameterMap implements Map<String, Object> {
|
|||||||
|
|
||||||
private final Map<String, String> deprecations;
|
private final Map<String, String> deprecations;
|
||||||
|
|
||||||
ParameterMap(Map<String, Object> params, Map<String, String> deprecations) {
|
public ParameterMap(Map<String, Object> params, Map<String, String> deprecations) {
|
||||||
this.params = params;
|
this.params = params;
|
||||||
this.deprecations = deprecations;
|
this.deprecations = deprecations;
|
||||||
}
|
}
|
||||||
|
@ -28,7 +28,7 @@ import org.elasticsearch.index.query.QueryRewriteContext;
|
|||||||
import org.elasticsearch.search.internal.SearchContext;
|
import org.elasticsearch.search.internal.SearchContext;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.List;
|
import java.util.Collection;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -79,12 +79,12 @@ public abstract class AggregationBuilder
|
|||||||
public abstract AggregationBuilder subAggregation(PipelineAggregationBuilder aggregation);
|
public abstract AggregationBuilder subAggregation(PipelineAggregationBuilder aggregation);
|
||||||
|
|
||||||
/** Return the configured set of subaggregations **/
|
/** Return the configured set of subaggregations **/
|
||||||
public List<AggregationBuilder> getSubAggregations() {
|
public Collection<AggregationBuilder> getSubAggregations() {
|
||||||
return factoriesBuilder.getAggregatorFactories();
|
return factoriesBuilder.getAggregatorFactories();
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Return the configured set of pipeline aggregations **/
|
/** Return the configured set of pipeline aggregations **/
|
||||||
public List<PipelineAggregationBuilder> getPipelineAggregations() {
|
public Collection<PipelineAggregationBuilder> getPipelineAggregations() {
|
||||||
return factoriesBuilder.getPipelineAggregatorFactories();
|
return factoriesBuilder.getPipelineAggregatorFactories();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -38,9 +38,11 @@ import org.elasticsearch.search.profile.aggregation.ProfilingAggregator;
|
|||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
import java.util.Collection;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
|
import java.util.LinkedHashSet;
|
||||||
import java.util.LinkedList;
|
import java.util.LinkedList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
@ -237,8 +239,11 @@ public class AggregatorFactories {
|
|||||||
|
|
||||||
public static class Builder implements Writeable, ToXContentObject {
|
public static class Builder implements Writeable, ToXContentObject {
|
||||||
private final Set<String> names = new HashSet<>();
|
private final Set<String> names = new HashSet<>();
|
||||||
private final List<AggregationBuilder> aggregationBuilders = new ArrayList<>();
|
|
||||||
private final List<PipelineAggregationBuilder> pipelineAggregatorBuilders = new ArrayList<>();
|
// Using LinkedHashSets to preserve the order of insertion, that makes the results
|
||||||
|
// ordered nicely, although technically order does not matter
|
||||||
|
private final Collection<AggregationBuilder> aggregationBuilders = new LinkedHashSet<>();
|
||||||
|
private final Collection<PipelineAggregationBuilder> pipelineAggregatorBuilders = new LinkedHashSet<>();
|
||||||
private boolean skipResolveOrder;
|
private boolean skipResolveOrder;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -322,29 +327,32 @@ public class AggregatorFactories {
|
|||||||
parent);
|
parent);
|
||||||
}
|
}
|
||||||
AggregatorFactory<?>[] aggFactories = new AggregatorFactory<?>[aggregationBuilders.size()];
|
AggregatorFactory<?>[] aggFactories = new AggregatorFactory<?>[aggregationBuilders.size()];
|
||||||
for (int i = 0; i < aggregationBuilders.size(); i++) {
|
|
||||||
aggFactories[i] = aggregationBuilders.get(i).build(context, parent);
|
int i = 0;
|
||||||
|
for (AggregationBuilder agg : aggregationBuilders) {
|
||||||
|
aggFactories[i] = agg.build(context, parent);
|
||||||
|
++i;
|
||||||
}
|
}
|
||||||
return new AggregatorFactories(aggFactories, orderedpipelineAggregators);
|
return new AggregatorFactories(aggFactories, orderedpipelineAggregators);
|
||||||
}
|
}
|
||||||
|
|
||||||
private List<PipelineAggregationBuilder> resolvePipelineAggregatorOrder(
|
private List<PipelineAggregationBuilder> resolvePipelineAggregatorOrder(
|
||||||
List<PipelineAggregationBuilder> pipelineAggregatorBuilders, List<AggregationBuilder> aggBuilders,
|
Collection<PipelineAggregationBuilder> pipelineAggregatorBuilders, Collection<AggregationBuilder> aggregationBuilders,
|
||||||
AggregatorFactory<?> parent) {
|
AggregatorFactory<?> parent) {
|
||||||
Map<String, PipelineAggregationBuilder> pipelineAggregatorBuildersMap = new HashMap<>();
|
Map<String, PipelineAggregationBuilder> pipelineAggregatorBuildersMap = new HashMap<>();
|
||||||
for (PipelineAggregationBuilder builder : pipelineAggregatorBuilders) {
|
for (PipelineAggregationBuilder builder : pipelineAggregatorBuilders) {
|
||||||
pipelineAggregatorBuildersMap.put(builder.getName(), builder);
|
pipelineAggregatorBuildersMap.put(builder.getName(), builder);
|
||||||
}
|
}
|
||||||
Map<String, AggregationBuilder> aggBuildersMap = new HashMap<>();
|
Map<String, AggregationBuilder> aggBuildersMap = new HashMap<>();
|
||||||
for (AggregationBuilder aggBuilder : aggBuilders) {
|
for (AggregationBuilder aggBuilder : aggregationBuilders) {
|
||||||
aggBuildersMap.put(aggBuilder.name, aggBuilder);
|
aggBuildersMap.put(aggBuilder.name, aggBuilder);
|
||||||
}
|
}
|
||||||
List<PipelineAggregationBuilder> orderedPipelineAggregatorrs = new LinkedList<>();
|
List<PipelineAggregationBuilder> orderedPipelineAggregatorrs = new LinkedList<>();
|
||||||
List<PipelineAggregationBuilder> unmarkedBuilders = new ArrayList<>(pipelineAggregatorBuilders);
|
List<PipelineAggregationBuilder> unmarkedBuilders = new ArrayList<>(pipelineAggregatorBuilders);
|
||||||
Set<PipelineAggregationBuilder> temporarilyMarked = new HashSet<>();
|
Collection<PipelineAggregationBuilder> temporarilyMarked = new HashSet<>();
|
||||||
while (!unmarkedBuilders.isEmpty()) {
|
while (!unmarkedBuilders.isEmpty()) {
|
||||||
PipelineAggregationBuilder builder = unmarkedBuilders.get(0);
|
PipelineAggregationBuilder builder = unmarkedBuilders.get(0);
|
||||||
builder.validate(parent, aggBuilders, pipelineAggregatorBuilders);
|
builder.validate(parent, aggregationBuilders, pipelineAggregatorBuilders);
|
||||||
resolvePipelineAggregatorOrder(aggBuildersMap, pipelineAggregatorBuildersMap, orderedPipelineAggregatorrs, unmarkedBuilders,
|
resolvePipelineAggregatorOrder(aggBuildersMap, pipelineAggregatorBuildersMap, orderedPipelineAggregatorrs, unmarkedBuilders,
|
||||||
temporarilyMarked, builder);
|
temporarilyMarked, builder);
|
||||||
}
|
}
|
||||||
@ -354,7 +362,7 @@ public class AggregatorFactories {
|
|||||||
private void resolvePipelineAggregatorOrder(Map<String, AggregationBuilder> aggBuildersMap,
|
private void resolvePipelineAggregatorOrder(Map<String, AggregationBuilder> aggBuildersMap,
|
||||||
Map<String, PipelineAggregationBuilder> pipelineAggregatorBuildersMap,
|
Map<String, PipelineAggregationBuilder> pipelineAggregatorBuildersMap,
|
||||||
List<PipelineAggregationBuilder> orderedPipelineAggregators, List<PipelineAggregationBuilder> unmarkedBuilders,
|
List<PipelineAggregationBuilder> orderedPipelineAggregators, List<PipelineAggregationBuilder> unmarkedBuilders,
|
||||||
Set<PipelineAggregationBuilder> temporarilyMarked, PipelineAggregationBuilder builder) {
|
Collection<PipelineAggregationBuilder> temporarilyMarked, PipelineAggregationBuilder builder) {
|
||||||
if (temporarilyMarked.contains(builder)) {
|
if (temporarilyMarked.contains(builder)) {
|
||||||
throw new IllegalArgumentException("Cyclical dependency found with pipeline aggregator [" + builder.getName() + "]");
|
throw new IllegalArgumentException("Cyclical dependency found with pipeline aggregator [" + builder.getName() + "]");
|
||||||
} else if (unmarkedBuilders.contains(builder)) {
|
} else if (unmarkedBuilders.contains(builder)) {
|
||||||
@ -375,7 +383,7 @@ public class AggregatorFactories {
|
|||||||
} else {
|
} else {
|
||||||
// Check the non-pipeline sub-aggregator
|
// Check the non-pipeline sub-aggregator
|
||||||
// factories
|
// factories
|
||||||
List<AggregationBuilder> subBuilders = aggBuilder.factoriesBuilder.aggregationBuilders;
|
Collection<AggregationBuilder> subBuilders = aggBuilder.factoriesBuilder.aggregationBuilders;
|
||||||
boolean foundSubBuilder = false;
|
boolean foundSubBuilder = false;
|
||||||
for (AggregationBuilder subBuilder : subBuilders) {
|
for (AggregationBuilder subBuilder : subBuilders) {
|
||||||
if (aggName.equals(subBuilder.name)) {
|
if (aggName.equals(subBuilder.name)) {
|
||||||
@ -386,7 +394,7 @@ public class AggregatorFactories {
|
|||||||
}
|
}
|
||||||
// Check the pipeline sub-aggregator factories
|
// Check the pipeline sub-aggregator factories
|
||||||
if (!foundSubBuilder && (i == bucketsPathElements.size() - 1)) {
|
if (!foundSubBuilder && (i == bucketsPathElements.size() - 1)) {
|
||||||
List<PipelineAggregationBuilder> subPipelineBuilders = aggBuilder.factoriesBuilder.pipelineAggregatorBuilders;
|
Collection<PipelineAggregationBuilder> subPipelineBuilders = aggBuilder.factoriesBuilder.pipelineAggregatorBuilders;
|
||||||
for (PipelineAggregationBuilder subFactory : subPipelineBuilders) {
|
for (PipelineAggregationBuilder subFactory : subPipelineBuilders) {
|
||||||
if (aggName.equals(subFactory.getName())) {
|
if (aggName.equals(subFactory.getName())) {
|
||||||
foundSubBuilder = true;
|
foundSubBuilder = true;
|
||||||
@ -417,12 +425,12 @@ public class AggregatorFactories {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public List<AggregationBuilder> getAggregatorFactories() {
|
public Collection<AggregationBuilder> getAggregatorFactories() {
|
||||||
return Collections.unmodifiableList(aggregationBuilders);
|
return Collections.unmodifiableCollection(aggregationBuilders);
|
||||||
}
|
}
|
||||||
|
|
||||||
public List<PipelineAggregationBuilder> getPipelineAggregatorFactories() {
|
public Collection<PipelineAggregationBuilder> getPipelineAggregatorFactories() {
|
||||||
return Collections.unmodifiableList(pipelineAggregatorBuilders);
|
return Collections.unmodifiableCollection(pipelineAggregatorBuilders);
|
||||||
}
|
}
|
||||||
|
|
||||||
public int count() {
|
public int count() {
|
||||||
@ -463,6 +471,7 @@ public class AggregatorFactories {
|
|||||||
if (getClass() != obj.getClass())
|
if (getClass() != obj.getClass())
|
||||||
return false;
|
return false;
|
||||||
Builder other = (Builder) obj;
|
Builder other = (Builder) obj;
|
||||||
|
|
||||||
if (!Objects.equals(aggregationBuilders, other.aggregationBuilders))
|
if (!Objects.equals(aggregationBuilders, other.aggregationBuilders))
|
||||||
return false;
|
return false;
|
||||||
if (!Objects.equals(pipelineAggregatorBuilders, other.pipelineAggregatorBuilders))
|
if (!Objects.equals(pipelineAggregatorBuilders, other.pipelineAggregatorBuilders))
|
||||||
|
@ -25,7 +25,7 @@ import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
|||||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.List;
|
import java.util.Collection;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -68,8 +68,8 @@ public abstract class PipelineAggregationBuilder implements NamedWriteable, Base
|
|||||||
* Internal: Validates the state of this factory (makes sure the factory is properly
|
* Internal: Validates the state of this factory (makes sure the factory is properly
|
||||||
* configured)
|
* configured)
|
||||||
*/
|
*/
|
||||||
protected abstract void validate(AggregatorFactory<?> parent, List<AggregationBuilder> factories,
|
protected abstract void validate(AggregatorFactory<?> parent, Collection<AggregationBuilder> aggregationBuilders,
|
||||||
List<PipelineAggregationBuilder> pipelineAggregatorFactories);
|
Collection<PipelineAggregationBuilder> pipelineAggregatorBuilders);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates the pipeline aggregator
|
* Creates the pipeline aggregator
|
||||||
|
@ -28,7 +28,7 @@ import org.elasticsearch.search.aggregations.PipelineAggregationBuilder;
|
|||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.List;
|
import java.util.Collection;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
|
|
||||||
@ -81,8 +81,8 @@ public abstract class AbstractPipelineAggregationBuilder<PAB extends AbstractPip
|
|||||||
* configured)
|
* configured)
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public final void validate(AggregatorFactory<?> parent, List<AggregationBuilder> factories,
|
public final void validate(AggregatorFactory<?> parent, Collection<AggregationBuilder> factories,
|
||||||
List<PipelineAggregationBuilder> pipelineAggregatorFactories) {
|
Collection<PipelineAggregationBuilder> pipelineAggregatorFactories) {
|
||||||
doValidate(parent, factories, pipelineAggregatorFactories);
|
doValidate(parent, factories, pipelineAggregatorFactories);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -99,8 +99,8 @@ public abstract class AbstractPipelineAggregationBuilder<PAB extends AbstractPip
|
|||||||
return aggregator;
|
return aggregator;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void doValidate(AggregatorFactory<?> parent, List<AggregationBuilder> factories,
|
public void doValidate(AggregatorFactory<?> parent, Collection<AggregationBuilder> factories,
|
||||||
List<PipelineAggregationBuilder> pipelineAggregatorFactories) {
|
Collection<PipelineAggregationBuilder> pipelineAggregatorFactories) {
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
|
@ -32,7 +32,7 @@ import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
|
|||||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.List;
|
import java.util.Collection;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
@ -109,8 +109,8 @@ public abstract class BucketMetricsPipelineAggregationBuilder<AF extends BucketM
|
|||||||
protected abstract PipelineAggregator createInternal(Map<String, Object> metaData) throws IOException;
|
protected abstract PipelineAggregator createInternal(Map<String, Object> metaData) throws IOException;
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void doValidate(AggregatorFactory<?> parent, List<AggregationBuilder> aggBuilders,
|
public void doValidate(AggregatorFactory<?> parent, Collection<AggregationBuilder> aggBuilders,
|
||||||
List<PipelineAggregationBuilder> pipelineAggregatorFactories) {
|
Collection<PipelineAggregationBuilder> pipelineAggregatorFactories) {
|
||||||
if (bucketsPaths.length != 1) {
|
if (bucketsPaths.length != 1) {
|
||||||
throw new IllegalStateException(PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName()
|
throw new IllegalStateException(PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName()
|
||||||
+ " must contain a single entry for aggregation [" + name + "]");
|
+ " must contain a single entry for aggregation [" + name + "]");
|
||||||
|
@ -35,7 +35,7 @@ import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetric
|
|||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.List;
|
import java.util.Collection;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
|
|
||||||
@ -95,8 +95,8 @@ public class PercentilesBucketPipelineAggregationBuilder
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void doValidate(AggregatorFactory<?> parent, List<AggregationBuilder> aggFactories,
|
public void doValidate(AggregatorFactory<?> parent, Collection<AggregationBuilder> aggFactories,
|
||||||
List<PipelineAggregationBuilder> pipelineAggregatorFactories) {
|
Collection<PipelineAggregationBuilder> pipelineAggregatorFactories) {
|
||||||
super.doValidate(parent, aggFactories, pipelineAggregatorFactories);
|
super.doValidate(parent, aggFactories, pipelineAggregatorFactories);
|
||||||
|
|
||||||
for (Double p : percents) {
|
for (Double p : percents) {
|
||||||
|
@ -29,7 +29,7 @@ import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
|||||||
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsPipelineAggregationBuilder;
|
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsPipelineAggregationBuilder;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.List;
|
import java.util.Collection;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
|
|
||||||
@ -82,8 +82,8 @@ public class ExtendedStatsBucketPipelineAggregationBuilder
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void doValidate(AggregatorFactory<?> parent, List<AggregationBuilder> aggBuilders,
|
public void doValidate(AggregatorFactory<?> parent, Collection<AggregationBuilder> aggBuilders,
|
||||||
List<PipelineAggregationBuilder> pipelineAggregatorFactories) {
|
Collection<PipelineAggregationBuilder> pipelineAggregatorFactories) {
|
||||||
super.doValidate(parent, aggBuilders, pipelineAggregatorFactories);
|
super.doValidate(parent, aggBuilders, pipelineAggregatorFactories);
|
||||||
|
|
||||||
if (sigma < 0.0 ) {
|
if (sigma < 0.0 ) {
|
||||||
|
@ -38,6 +38,7 @@ import org.elasticsearch.search.sort.SortBuilder;
|
|||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
import java.util.Collection;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Locale;
|
import java.util.Locale;
|
||||||
@ -145,8 +146,8 @@ public class BucketSortPipelineAggregationBuilder extends AbstractPipelineAggreg
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void doValidate(AggregatorFactory<?> parent, List<AggregationBuilder> aggFactories,
|
public void doValidate(AggregatorFactory<?> parent, Collection<AggregationBuilder> aggFactories,
|
||||||
List<PipelineAggregationBuilder> pipelineAggregatoractories) {
|
Collection<PipelineAggregationBuilder> pipelineAggregatoractories) {
|
||||||
if (sorts.isEmpty() && size == null && from == 0) {
|
if (sorts.isEmpty() && size == null && from == 0) {
|
||||||
throw new IllegalStateException("[" + name + "] is configured to perform nothing. Please set either of "
|
throw new IllegalStateException("[" + name + "] is configured to perform nothing. Please set either of "
|
||||||
+ Arrays.asList(SearchSourceBuilder.SORT_FIELD.getPreferredName(), SIZE.getPreferredName(), FROM.getPreferredName())
|
+ Arrays.asList(SearchSourceBuilder.SORT_FIELD.getPreferredName(), SIZE.getPreferredName(), FROM.getPreferredName())
|
||||||
|
@ -36,6 +36,7 @@ import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetric
|
|||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
import java.util.Collection;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
@ -97,8 +98,8 @@ public class CumulativeSumPipelineAggregationBuilder extends AbstractPipelineAgg
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void doValidate(AggregatorFactory<?> parent, List<AggregationBuilder> aggFactories,
|
public void doValidate(AggregatorFactory<?> parent, Collection<AggregationBuilder> aggFactories,
|
||||||
List<PipelineAggregationBuilder> pipelineAggregatorFactories) {
|
Collection<PipelineAggregationBuilder> pipelineAggregatorFactories) {
|
||||||
if (bucketsPaths.length != 1) {
|
if (bucketsPaths.length != 1) {
|
||||||
throw new IllegalStateException(BUCKETS_PATH.getPreferredName()
|
throw new IllegalStateException(BUCKETS_PATH.getPreferredName()
|
||||||
+ " must contain a single entry for aggregation [" + name + "]");
|
+ " must contain a single entry for aggregation [" + name + "]");
|
||||||
|
@ -42,6 +42,7 @@ import org.joda.time.DateTimeZone;
|
|||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
import java.util.Collection;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
@ -156,8 +157,8 @@ public class DerivativePipelineAggregationBuilder extends AbstractPipelineAggreg
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void doValidate(AggregatorFactory<?> parent, List<AggregationBuilder> aggFactories,
|
public void doValidate(AggregatorFactory<?> parent, Collection<AggregationBuilder> aggFactories,
|
||||||
List<PipelineAggregationBuilder> pipelineAggregatoractories) {
|
Collection<PipelineAggregationBuilder> pipelineAggregatoractories) {
|
||||||
if (bucketsPaths.length != 1) {
|
if (bucketsPaths.length != 1) {
|
||||||
throw new IllegalStateException(PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName()
|
throw new IllegalStateException(PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName()
|
||||||
+ " must contain a single entry for aggregation [" + name + "]");
|
+ " must contain a single entry for aggregation [" + name + "]");
|
||||||
|
@ -44,6 +44,7 @@ import org.elasticsearch.search.aggregations.pipeline.movavg.models.SimpleModel;
|
|||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.text.ParseException;
|
import java.text.ParseException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
import java.util.Collection;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
@ -260,8 +261,8 @@ public class MovAvgPipelineAggregationBuilder extends AbstractPipelineAggregatio
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void doValidate(AggregatorFactory<?> parent, List<AggregationBuilder> aggFactories,
|
public void doValidate(AggregatorFactory<?> parent, Collection<AggregationBuilder> aggFactories,
|
||||||
List<PipelineAggregationBuilder> pipelineAggregatoractories) {
|
Collection<PipelineAggregationBuilder> pipelineAggregatoractories) {
|
||||||
if (minimize != null && minimize && !model.canBeMinimized()) {
|
if (minimize != null && minimize && !model.canBeMinimized()) {
|
||||||
// If the user asks to minimize, but this model doesn't support
|
// If the user asks to minimize, but this model doesn't support
|
||||||
// it, throw exception
|
// it, throw exception
|
||||||
|
@ -39,7 +39,7 @@ import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
|
|||||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.List;
|
import java.util.Collection;
|
||||||
import java.util.Locale;
|
import java.util.Locale;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
@ -173,8 +173,8 @@ public class MovFnPipelineAggregationBuilder extends AbstractPipelineAggregation
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void doValidate(AggregatorFactory<?> parent, List<AggregationBuilder> aggFactories,
|
public void doValidate(AggregatorFactory<?> parent, Collection<AggregationBuilder> aggFactories,
|
||||||
List<PipelineAggregationBuilder> pipelineAggregatoractories) {
|
Collection<PipelineAggregationBuilder> pipelineAggregatoractories) {
|
||||||
if (window <= 0) {
|
if (window <= 0) {
|
||||||
throw new IllegalArgumentException("[" + WINDOW.getPreferredName() + "] must be a positive, non-zero integer.");
|
throw new IllegalArgumentException("[" + WINDOW.getPreferredName() + "] must be a positive, non-zero integer.");
|
||||||
}
|
}
|
||||||
|
@ -132,7 +132,7 @@ public class UpdateRequestTests extends ESTestCase {
|
|||||||
return null;
|
return null;
|
||||||
});
|
});
|
||||||
scripts.put("return", vars -> null);
|
scripts.put("return", vars -> null);
|
||||||
final MockScriptEngine engine = new MockScriptEngine("mock", scripts);
|
final MockScriptEngine engine = new MockScriptEngine("mock", scripts, Collections.emptyMap());
|
||||||
Map<String, ScriptEngine> engines = Collections.singletonMap(engine.getType(), engine);
|
Map<String, ScriptEngine> engines = Collections.singletonMap(engine.getType(), engine);
|
||||||
ScriptService scriptService = new ScriptService(baseSettings, engines, ScriptModule.CORE_CONTEXTS);
|
ScriptService scriptService = new ScriptService(baseSettings, engines, ScriptModule.CORE_CONTEXTS);
|
||||||
final Settings settings = settings(Version.CURRENT).build();
|
final Settings settings = settings(Version.CURRENT).build();
|
||||||
|
@ -71,6 +71,8 @@ public class JavaJodaTimeDuellingTests extends ESTestCase {
|
|||||||
|
|
||||||
public void testDuellingFormatsValidParsing() {
|
public void testDuellingFormatsValidParsing() {
|
||||||
assertSameDate("1522332219", "epoch_second");
|
assertSameDate("1522332219", "epoch_second");
|
||||||
|
assertSameDate("1522332219.", "epoch_second");
|
||||||
|
assertSameDate("1522332219.0", "epoch_second");
|
||||||
assertSameDate("0", "epoch_second");
|
assertSameDate("0", "epoch_second");
|
||||||
assertSameDate("1", "epoch_second");
|
assertSameDate("1", "epoch_second");
|
||||||
assertSameDate("-1", "epoch_second");
|
assertSameDate("-1", "epoch_second");
|
||||||
|
@ -21,6 +21,7 @@ package org.elasticsearch.common.time;
|
|||||||
|
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
|
||||||
|
import java.time.Instant;
|
||||||
import java.time.ZoneId;
|
import java.time.ZoneId;
|
||||||
import java.time.ZonedDateTime;
|
import java.time.ZonedDateTime;
|
||||||
import java.time.format.DateTimeParseException;
|
import java.time.format.DateTimeParseException;
|
||||||
@ -56,6 +57,42 @@ public class DateFormattersTests extends ESTestCase {
|
|||||||
assertSameFormat(formatter, 1);
|
assertSameFormat(formatter, 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// this is not in the duelling tests, because the epoch second parser in joda time drops the milliseconds after the comma
|
||||||
|
// but is able to parse the rest
|
||||||
|
// as this feature is supported it also makes sense to make it exact
|
||||||
|
public void testEpochSecondParser() {
|
||||||
|
DateFormatter formatter = DateFormatters.forPattern("epoch_second");
|
||||||
|
|
||||||
|
assertThat(Instant.from(formatter.parse("1234.567")).toEpochMilli(), is(1234567L));
|
||||||
|
assertThat(Instant.from(formatter.parse("1234.")).getNano(), is(0));
|
||||||
|
assertThat(Instant.from(formatter.parse("1234.")).getEpochSecond(), is(1234L));
|
||||||
|
assertThat(Instant.from(formatter.parse("1234.1")).getNano(), is(100_000_000));
|
||||||
|
assertThat(Instant.from(formatter.parse("1234.12")).getNano(), is(120_000_000));
|
||||||
|
assertThat(Instant.from(formatter.parse("1234.123")).getNano(), is(123_000_000));
|
||||||
|
assertThat(Instant.from(formatter.parse("1234.1234")).getNano(), is(123_400_000));
|
||||||
|
assertThat(Instant.from(formatter.parse("1234.12345")).getNano(), is(123_450_000));
|
||||||
|
assertThat(Instant.from(formatter.parse("1234.123456")).getNano(), is(123_456_000));
|
||||||
|
assertThat(Instant.from(formatter.parse("1234.1234567")).getNano(), is(123_456_700));
|
||||||
|
assertThat(Instant.from(formatter.parse("1234.12345678")).getNano(), is(123_456_780));
|
||||||
|
assertThat(Instant.from(formatter.parse("1234.123456789")).getNano(), is(123_456_789));
|
||||||
|
DateTimeParseException e = expectThrows(DateTimeParseException.class, () -> formatter.parse("1234.1234567890"));
|
||||||
|
assertThat(e.getMessage(), is("too much granularity after dot [1234.1234567890]"));
|
||||||
|
e = expectThrows(DateTimeParseException.class, () -> formatter.parse("1234.123456789013221"));
|
||||||
|
assertThat(e.getMessage(), is("too much granularity after dot [1234.123456789013221]"));
|
||||||
|
e = expectThrows(DateTimeParseException.class, () -> formatter.parse("abc"));
|
||||||
|
assertThat(e.getMessage(), is("invalid number [abc]"));
|
||||||
|
e = expectThrows(DateTimeParseException.class, () -> formatter.parse("1234.abc"));
|
||||||
|
assertThat(e.getMessage(), is("invalid number [1234.abc]"));
|
||||||
|
|
||||||
|
// different zone, should still yield the same output, as epoch is time zone independent
|
||||||
|
ZoneId zoneId = randomZone();
|
||||||
|
DateFormatter zonedFormatter = formatter.withZone(zoneId);
|
||||||
|
|
||||||
|
assertThatSameDateTime(formatter, zonedFormatter, randomLongBetween(-100_000_000, 100_000_000));
|
||||||
|
assertSameFormat(formatter, randomLongBetween(-100_000_000, 100_000_000));
|
||||||
|
assertThat(formatter.format(Instant.ofEpochSecond(1234, 567_000_000)), is("1234.567"));
|
||||||
|
}
|
||||||
|
|
||||||
public void testEpochMilliParsersWithDifferentFormatters() {
|
public void testEpochMilliParsersWithDifferentFormatters() {
|
||||||
DateFormatter formatter = DateFormatters.forPattern("strict_date_optional_time||epoch_millis");
|
DateFormatter formatter = DateFormatters.forPattern("strict_date_optional_time||epoch_millis");
|
||||||
TemporalAccessor accessor = formatter.parse("123");
|
TemporalAccessor accessor = formatter.parse("123");
|
||||||
|
@ -19,6 +19,14 @@
|
|||||||
|
|
||||||
package org.elasticsearch.index.mapper;
|
package org.elasticsearch.index.mapper;
|
||||||
|
|
||||||
|
import org.apache.lucene.index.IndexOptions;
|
||||||
|
import org.apache.lucene.index.Term;
|
||||||
|
import org.apache.lucene.search.PrefixQuery;
|
||||||
|
import org.apache.lucene.search.Query;
|
||||||
|
import org.apache.lucene.search.RegexpQuery;
|
||||||
|
import org.apache.lucene.search.WildcardQuery;
|
||||||
|
import org.apache.lucene.util.BytesRef;
|
||||||
|
|
||||||
public class IgnoredFieldTypeTests extends FieldTypeTestCase {
|
public class IgnoredFieldTypeTests extends FieldTypeTestCase {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -26,4 +34,30 @@ public class IgnoredFieldTypeTests extends FieldTypeTestCase {
|
|||||||
return new IgnoredFieldMapper.IgnoredFieldType();
|
return new IgnoredFieldMapper.IgnoredFieldType();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testPrefixQuery() {
|
||||||
|
MappedFieldType ft = createDefaultFieldType();
|
||||||
|
ft.setName("field");
|
||||||
|
ft.setIndexOptions(IndexOptions.DOCS);
|
||||||
|
|
||||||
|
Query expected = new PrefixQuery(new Term("field", new BytesRef("foo*")));
|
||||||
|
assertEquals(expected, ft.prefixQuery("foo*", null, null));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testRegexpQuery() {
|
||||||
|
MappedFieldType ft = createDefaultFieldType();
|
||||||
|
ft.setName("field");
|
||||||
|
ft.setIndexOptions(IndexOptions.DOCS);
|
||||||
|
|
||||||
|
Query expected = new RegexpQuery(new Term("field", new BytesRef("foo?")));
|
||||||
|
assertEquals(expected, ft.regexpQuery("foo?", 0, 10, null, null));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testWildcardQuery() {
|
||||||
|
MappedFieldType ft = createDefaultFieldType();
|
||||||
|
ft.setName("field");
|
||||||
|
ft.setIndexOptions(IndexOptions.DOCS);
|
||||||
|
|
||||||
|
Query expected = new WildcardQuery(new Term("field", new BytesRef("foo*")));
|
||||||
|
assertEquals(expected, ft.wildcardQuery("foo*", null, null));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -18,12 +18,56 @@
|
|||||||
*/
|
*/
|
||||||
package org.elasticsearch.index.mapper;
|
package org.elasticsearch.index.mapper;
|
||||||
|
|
||||||
import org.elasticsearch.index.mapper.IndexFieldMapper;
|
import org.apache.lucene.index.IndexOptions;
|
||||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||||
|
import org.apache.lucene.search.MatchNoDocsQuery;
|
||||||
|
import org.elasticsearch.index.Index;
|
||||||
|
import org.elasticsearch.index.query.QueryShardContext;
|
||||||
|
|
||||||
|
import static org.mockito.Mockito.mock;
|
||||||
|
import static org.mockito.Mockito.when;
|
||||||
|
|
||||||
public class IndexFieldTypeTests extends FieldTypeTestCase {
|
public class IndexFieldTypeTests extends FieldTypeTestCase {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected MappedFieldType createDefaultFieldType() {
|
protected MappedFieldType createDefaultFieldType() {
|
||||||
return new IndexFieldMapper.IndexFieldType();
|
return new IndexFieldMapper.IndexFieldType();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testPrefixQuery() {
|
||||||
|
MappedFieldType ft = createDefaultFieldType();
|
||||||
|
ft.setName("field");
|
||||||
|
ft.setIndexOptions(IndexOptions.DOCS);
|
||||||
|
|
||||||
|
assertEquals(new MatchAllDocsQuery(), ft.prefixQuery("ind", null, createContext()));
|
||||||
|
assertEquals(new MatchNoDocsQuery(), ft.prefixQuery("other_ind", null, createContext()));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testRegexpQuery() {
|
||||||
|
MappedFieldType ft = createDefaultFieldType();
|
||||||
|
ft.setName("field");
|
||||||
|
ft.setIndexOptions(IndexOptions.DOCS);
|
||||||
|
|
||||||
|
assertEquals(new MatchAllDocsQuery(), ft.regexpQuery("ind.x", 0, 10, null, createContext()));
|
||||||
|
assertEquals(new MatchNoDocsQuery(), ft.regexpQuery("ind?x", 0, 10, null, createContext()));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testWildcardQuery() {
|
||||||
|
MappedFieldType ft = createDefaultFieldType();
|
||||||
|
ft.setName("field");
|
||||||
|
ft.setIndexOptions(IndexOptions.DOCS);
|
||||||
|
|
||||||
|
assertEquals(new MatchAllDocsQuery(), ft.wildcardQuery("ind*x", null, createContext()));
|
||||||
|
assertEquals(new MatchNoDocsQuery(), ft.wildcardQuery("other_ind*x", null, createContext()));
|
||||||
|
}
|
||||||
|
|
||||||
|
private QueryShardContext createContext() {
|
||||||
|
QueryShardContext context = mock(QueryShardContext.class);
|
||||||
|
|
||||||
|
Index index = new Index("index", "123");
|
||||||
|
when(context.getFullyQualifiedIndex()).thenReturn(index);
|
||||||
|
when(context.index()).thenReturn(index);
|
||||||
|
|
||||||
|
return context;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -18,12 +18,44 @@
|
|||||||
*/
|
*/
|
||||||
package org.elasticsearch.index.mapper;
|
package org.elasticsearch.index.mapper;
|
||||||
|
|
||||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
import org.apache.lucene.index.IndexOptions;
|
||||||
import org.elasticsearch.index.mapper.RoutingFieldMapper;
|
import org.apache.lucene.index.Term;
|
||||||
|
import org.apache.lucene.search.PrefixQuery;
|
||||||
|
import org.apache.lucene.search.Query;
|
||||||
|
import org.apache.lucene.search.RegexpQuery;
|
||||||
|
import org.apache.lucene.search.WildcardQuery;
|
||||||
|
import org.apache.lucene.util.BytesRef;
|
||||||
|
|
||||||
public class RoutingFieldTypeTests extends FieldTypeTestCase {
|
public class RoutingFieldTypeTests extends FieldTypeTestCase {
|
||||||
@Override
|
@Override
|
||||||
protected MappedFieldType createDefaultFieldType() {
|
protected MappedFieldType createDefaultFieldType() {
|
||||||
return new RoutingFieldMapper.RoutingFieldType();
|
return new RoutingFieldMapper.RoutingFieldType();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testPrefixQuery() {
|
||||||
|
MappedFieldType ft = createDefaultFieldType();
|
||||||
|
ft.setName("field");
|
||||||
|
ft.setIndexOptions(IndexOptions.DOCS);
|
||||||
|
|
||||||
|
Query expected = new PrefixQuery(new Term("field", new BytesRef("foo*")));
|
||||||
|
assertEquals(expected, ft.prefixQuery("foo*", null, null));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testRegexpQuery() {
|
||||||
|
MappedFieldType ft = createDefaultFieldType();
|
||||||
|
ft.setName("field");
|
||||||
|
ft.setIndexOptions(IndexOptions.DOCS);
|
||||||
|
|
||||||
|
Query expected = new RegexpQuery(new Term("field", new BytesRef("foo?")));
|
||||||
|
assertEquals(expected, ft.regexpQuery("foo?", 0, 10, null, null));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testWildcardQuery() {
|
||||||
|
MappedFieldType ft = createDefaultFieldType();
|
||||||
|
ft.setName("field");
|
||||||
|
ft.setIndexOptions(IndexOptions.DOCS);
|
||||||
|
|
||||||
|
Query expected = new WildcardQuery(new Term("field", new BytesRef("foo*")));
|
||||||
|
assertEquals(expected, ft.wildcardQuery("foo*", null, null));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -19,6 +19,7 @@
|
|||||||
|
|
||||||
package org.elasticsearch.index.mapper;
|
package org.elasticsearch.index.mapper;
|
||||||
|
|
||||||
|
import org.apache.lucene.analysis.MockSynonymAnalyzer;
|
||||||
import org.apache.lucene.analysis.TokenStream;
|
import org.apache.lucene.analysis.TokenStream;
|
||||||
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
|
import org.apache.lucene.analysis.tokenattributes.CharTermAttribute;
|
||||||
import org.apache.lucene.document.FieldType;
|
import org.apache.lucene.document.FieldType;
|
||||||
@ -55,6 +56,7 @@ import org.elasticsearch.index.mapper.MapperService.MergeReason;
|
|||||||
import org.elasticsearch.index.mapper.TextFieldMapper.TextFieldType;
|
import org.elasticsearch.index.mapper.TextFieldMapper.TextFieldType;
|
||||||
import org.elasticsearch.index.query.MatchPhraseQueryBuilder;
|
import org.elasticsearch.index.query.MatchPhraseQueryBuilder;
|
||||||
import org.elasticsearch.index.query.QueryShardContext;
|
import org.elasticsearch.index.query.QueryShardContext;
|
||||||
|
import org.elasticsearch.index.search.MatchQuery;
|
||||||
import org.elasticsearch.index.shard.IndexShard;
|
import org.elasticsearch.index.shard.IndexShard;
|
||||||
import org.elasticsearch.plugins.Plugin;
|
import org.elasticsearch.plugins.Plugin;
|
||||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||||
@ -82,10 +84,6 @@ public class TextFieldMapperTests extends ESSingleNodeTestCase {
|
|||||||
@Before
|
@Before
|
||||||
public void setup() {
|
public void setup() {
|
||||||
Settings settings = Settings.builder()
|
Settings settings = Settings.builder()
|
||||||
.put("index.analysis.filter.mySynonyms.type", "synonym")
|
|
||||||
.putList("index.analysis.filter.mySynonyms.synonyms", Collections.singletonList("car, auto"))
|
|
||||||
.put("index.analysis.analyzer.synonym.tokenizer", "standard")
|
|
||||||
.put("index.analysis.analyzer.synonym.filter", "mySynonyms")
|
|
||||||
// Stop filter remains in server as it is part of lucene-core
|
// Stop filter remains in server as it is part of lucene-core
|
||||||
.put("index.analysis.analyzer.my_stop_analyzer.tokenizer", "standard")
|
.put("index.analysis.analyzer.my_stop_analyzer.tokenizer", "standard")
|
||||||
.put("index.analysis.analyzer.my_stop_analyzer.filter", "stop")
|
.put("index.analysis.analyzer.my_stop_analyzer.filter", "stop")
|
||||||
@ -734,7 +732,7 @@ public class TextFieldMapperTests extends ESSingleNodeTestCase {
|
|||||||
.endObject()
|
.endObject()
|
||||||
.startObject("synfield")
|
.startObject("synfield")
|
||||||
.field("type", "text")
|
.field("type", "text")
|
||||||
.field("analyzer", "synonym")
|
.field("analyzer", "standard") // will be replaced with MockSynonymAnalyzer
|
||||||
.field("index_phrases", true)
|
.field("index_phrases", true)
|
||||||
.endObject()
|
.endObject()
|
||||||
.endObject()
|
.endObject()
|
||||||
@ -761,11 +759,13 @@ public class TextFieldMapperTests extends ESSingleNodeTestCase {
|
|||||||
assertThat(q5,
|
assertThat(q5,
|
||||||
is(new PhraseQuery.Builder().add(new Term("field", "sparkle")).add(new Term("field", "stopword"), 2).build()));
|
is(new PhraseQuery.Builder().add(new Term("field", "sparkle")).add(new Term("field", "stopword"), 2).build()));
|
||||||
|
|
||||||
Query q6 = new MatchPhraseQueryBuilder("synfield", "motor car").toQuery(queryShardContext);
|
MatchQuery matchQuery = new MatchQuery(queryShardContext);
|
||||||
|
matchQuery.setAnalyzer(new MockSynonymAnalyzer());
|
||||||
|
Query q6 = matchQuery.parse(MatchQuery.Type.PHRASE, "synfield", "motor dogs");
|
||||||
assertThat(q6, is(new MultiPhraseQuery.Builder()
|
assertThat(q6, is(new MultiPhraseQuery.Builder()
|
||||||
.add(new Term[]{
|
.add(new Term[]{
|
||||||
new Term("synfield._index_phrase", "motor car"),
|
new Term("synfield._index_phrase", "motor dogs"),
|
||||||
new Term("synfield._index_phrase", "motor auto")})
|
new Term("synfield._index_phrase", "motor dog")})
|
||||||
.build()));
|
.build()));
|
||||||
|
|
||||||
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
|
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
|
||||||
|
@ -1,220 +0,0 @@
|
|||||||
/*
|
|
||||||
* Licensed to Elasticsearch under one or more contributor
|
|
||||||
* license agreements. See the NOTICE file distributed with
|
|
||||||
* this work for additional information regarding copyright
|
|
||||||
* ownership. Elasticsearch licenses this file to you under
|
|
||||||
* the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
* not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing,
|
|
||||||
* software distributed under the License is distributed on an
|
|
||||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
||||||
* KIND, either express or implied. See the License for the
|
|
||||||
* specific language governing permissions and limitations
|
|
||||||
* under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.elasticsearch.index.search;
|
|
||||||
|
|
||||||
import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder;
|
|
||||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
|
||||||
import org.elasticsearch.action.search.SearchResponse;
|
|
||||||
import org.elasticsearch.common.settings.Settings;
|
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
|
||||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
|
||||||
import org.elasticsearch.index.query.Operator;
|
|
||||||
import org.elasticsearch.index.query.QueryBuilders;
|
|
||||||
import org.elasticsearch.test.ESIntegTestCase;
|
|
||||||
import org.junit.Before;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.concurrent.ExecutionException;
|
|
||||||
|
|
||||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
|
||||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
|
|
||||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits;
|
|
||||||
|
|
||||||
public class MatchQueryIT extends ESIntegTestCase {
|
|
||||||
private static final String INDEX = "test";
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Test setup.
|
|
||||||
*/
|
|
||||||
@Before
|
|
||||||
public void setUp() throws Exception {
|
|
||||||
super.setUp();
|
|
||||||
CreateIndexRequestBuilder builder = prepareCreate(INDEX).setSettings(
|
|
||||||
Settings.builder()
|
|
||||||
.put(indexSettings())
|
|
||||||
.put("index.analysis.filter.syns.type", "synonym")
|
|
||||||
.putList("index.analysis.filter.syns.synonyms", "wtf, what the fudge", "foo, bar baz")
|
|
||||||
.put("index.analysis.analyzer.lower_syns.type", "custom")
|
|
||||||
.put("index.analysis.analyzer.lower_syns.tokenizer", "standard")
|
|
||||||
.putList("index.analysis.analyzer.lower_syns.filter", "lowercase", "syns")
|
|
||||||
.put("index.analysis.filter.graphsyns.type", "synonym_graph")
|
|
||||||
.putList("index.analysis.filter.graphsyns.synonyms", "wtf, what the fudge", "foo, bar baz")
|
|
||||||
.put("index.analysis.analyzer.lower_graphsyns.type", "custom")
|
|
||||||
.put("index.analysis.analyzer.lower_graphsyns.tokenizer", "standard")
|
|
||||||
.putList("index.analysis.analyzer.lower_graphsyns.filter", "lowercase", "graphsyns")
|
|
||||||
);
|
|
||||||
|
|
||||||
assertAcked(builder.addMapping(INDEX, createMapping()));
|
|
||||||
ensureGreen();
|
|
||||||
}
|
|
||||||
|
|
||||||
private List<IndexRequestBuilder> getDocs() {
|
|
||||||
List<IndexRequestBuilder> builders = new ArrayList<>();
|
|
||||||
builders.add(client().prepareIndex("test", "test", "1").setSource("field", "say wtf happened foo"));
|
|
||||||
builders.add(client().prepareIndex("test", "test", "2").setSource("field", "bar baz what the fudge man"));
|
|
||||||
builders.add(client().prepareIndex("test", "test", "3").setSource("field", "wtf"));
|
|
||||||
builders.add(client().prepareIndex("test", "test", "4").setSource("field", "what is the name for fudge"));
|
|
||||||
builders.add(client().prepareIndex("test", "test", "5").setSource("field", "bar two three"));
|
|
||||||
builders.add(client().prepareIndex("test", "test", "6").setSource("field", "bar baz two three"));
|
|
||||||
|
|
||||||
return builders;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Setup the index mappings for the test index.
|
|
||||||
*
|
|
||||||
* @return the json builder with the index mappings
|
|
||||||
* @throws IOException on error creating mapping json
|
|
||||||
*/
|
|
||||||
private XContentBuilder createMapping() throws IOException {
|
|
||||||
return XContentFactory.jsonBuilder()
|
|
||||||
.startObject()
|
|
||||||
.startObject(INDEX)
|
|
||||||
.startObject("properties")
|
|
||||||
.startObject("field")
|
|
||||||
.field("type", "text")
|
|
||||||
.endObject()
|
|
||||||
.endObject()
|
|
||||||
.endObject()
|
|
||||||
.endObject();
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testSimpleMultiTermPhrase() throws ExecutionException, InterruptedException {
|
|
||||||
indexRandom(true, false, getDocs());
|
|
||||||
|
|
||||||
// first search using regular synonym field using phrase
|
|
||||||
SearchResponse searchResponse = client().prepareSearch(INDEX)
|
|
||||||
.setQuery(QueryBuilders.matchPhraseQuery("field", "foo two three").analyzer("lower_syns")).get();
|
|
||||||
|
|
||||||
// because foo -> "bar baz" where "foo" and "bar" at position 0, "baz" and "two" at position 1.
|
|
||||||
// "bar two three", "bar baz three", "foo two three", "foo baz three"
|
|
||||||
assertHitCount(searchResponse, 1L);
|
|
||||||
assertSearchHits(searchResponse, "5"); // we should not match this but we do
|
|
||||||
|
|
||||||
// same query using graph should find correct result
|
|
||||||
searchResponse = client().prepareSearch(INDEX).setQuery(QueryBuilders.matchPhraseQuery("field", "foo two three")
|
|
||||||
.analyzer("lower_graphsyns")).get();
|
|
||||||
|
|
||||||
assertHitCount(searchResponse, 1L);
|
|
||||||
assertSearchHits(searchResponse, "6");
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testSimpleMultiTermAnd() throws ExecutionException, InterruptedException {
|
|
||||||
indexRandom(true, false, getDocs());
|
|
||||||
|
|
||||||
// first search using regular synonym field using phrase
|
|
||||||
SearchResponse searchResponse = client().prepareSearch(INDEX).setQuery(QueryBuilders.matchQuery("field", "say what the fudge")
|
|
||||||
.operator(Operator.AND).analyzer("lower_syns")).get();
|
|
||||||
|
|
||||||
// Old synonyms work fine in that case, but it is coincidental
|
|
||||||
assertHitCount(searchResponse, 1L);
|
|
||||||
assertSearchHits(searchResponse, "1");
|
|
||||||
|
|
||||||
// same query using graph should find correct result
|
|
||||||
searchResponse = client().prepareSearch(INDEX).setQuery(QueryBuilders.matchQuery("field", "say what the fudge")
|
|
||||||
.operator(Operator.AND).analyzer("lower_graphsyns")).get();
|
|
||||||
|
|
||||||
assertHitCount(searchResponse, 1L);
|
|
||||||
assertSearchHits(searchResponse, "1");
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testMinShouldMatch() throws ExecutionException, InterruptedException {
|
|
||||||
indexRandom(true, false, getDocs());
|
|
||||||
|
|
||||||
// no min should match
|
|
||||||
SearchResponse searchResponse = client().prepareSearch(INDEX)
|
|
||||||
.setQuery(
|
|
||||||
QueryBuilders.matchQuery("field", "three what the fudge foo")
|
|
||||||
.operator(Operator.OR).analyzer("lower_graphsyns").autoGenerateSynonymsPhraseQuery(false)
|
|
||||||
)
|
|
||||||
.get();
|
|
||||||
|
|
||||||
assertHitCount(searchResponse, 6L);
|
|
||||||
assertSearchHits(searchResponse, "1", "2", "3", "4", "5", "6");
|
|
||||||
|
|
||||||
// same query, with min_should_match of 2
|
|
||||||
searchResponse = client().prepareSearch(INDEX).setQuery(QueryBuilders.matchQuery("field", "three what the fudge foo")
|
|
||||||
.operator(Operator.OR).analyzer("lower_graphsyns").minimumShouldMatch("80%")).get();
|
|
||||||
|
|
||||||
// three wtf foo = 2 terms, match #1
|
|
||||||
// three wtf bar baz = 3 terms, match #6
|
|
||||||
// three what the fudge foo = 4 terms, no match
|
|
||||||
// three what the fudge bar baz = 4 terms, match #2
|
|
||||||
assertHitCount(searchResponse, 3L);
|
|
||||||
assertSearchHits(searchResponse, "1", "2", "6");
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testMultiTermsSynonymsPhrase() throws ExecutionException, InterruptedException {
|
|
||||||
List<IndexRequestBuilder> builders = getDocs();
|
|
||||||
indexRandom(true, false, builders);
|
|
||||||
SearchResponse searchResponse = client().prepareSearch(INDEX)
|
|
||||||
.setQuery(
|
|
||||||
QueryBuilders.matchQuery("field", "wtf")
|
|
||||||
.analyzer("lower_graphsyns")
|
|
||||||
.operator(Operator.AND))
|
|
||||||
.get();
|
|
||||||
assertHitCount(searchResponse, 3L);
|
|
||||||
assertSearchHits(searchResponse, "1", "2", "3");
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testPhrasePrefix() throws ExecutionException, InterruptedException {
|
|
||||||
List<IndexRequestBuilder> builders = getDocs();
|
|
||||||
builders.add(client().prepareIndex("test", "test", "7").setSource("field", "WTFD!"));
|
|
||||||
builders.add(client().prepareIndex("test", "test", "8").setSource("field", "Weird Al's WHAT THE FUDGESICLE"));
|
|
||||||
indexRandom(true, false, builders);
|
|
||||||
|
|
||||||
SearchResponse searchResponse = client().prepareSearch(INDEX).setQuery(QueryBuilders.matchPhrasePrefixQuery("field", "wtf")
|
|
||||||
.analyzer("lower_graphsyns")).get();
|
|
||||||
|
|
||||||
assertHitCount(searchResponse, 5L);
|
|
||||||
assertSearchHits(searchResponse, "1", "2", "3", "7", "8");
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testCommonTerms() throws ExecutionException, InterruptedException {
|
|
||||||
String route = "commonTermsTest";
|
|
||||||
List<IndexRequestBuilder> builders = getDocs();
|
|
||||||
for (IndexRequestBuilder indexRequet : builders) {
|
|
||||||
// route all docs to same shard for this test
|
|
||||||
indexRequet.setRouting(route);
|
|
||||||
}
|
|
||||||
indexRandom(true, false, builders);
|
|
||||||
|
|
||||||
// do a search with no cutoff frequency to show which docs should match
|
|
||||||
SearchResponse searchResponse = client().prepareSearch(INDEX)
|
|
||||||
.setRouting(route)
|
|
||||||
.setQuery(QueryBuilders.matchQuery("field", "bar three happened")
|
|
||||||
.operator(Operator.OR)).get();
|
|
||||||
|
|
||||||
assertHitCount(searchResponse, 4L);
|
|
||||||
assertSearchHits(searchResponse, "1", "2", "5", "6");
|
|
||||||
|
|
||||||
// do same search with cutoff and see less documents match
|
|
||||||
// in this case, essentially everything but "happened" gets excluded
|
|
||||||
searchResponse = client().prepareSearch(INDEX)
|
|
||||||
.setRouting(route)
|
|
||||||
.setQuery(QueryBuilders.matchQuery("field", "bar three happened")
|
|
||||||
.operator(Operator.OR).cutoffFrequency(1f)).get();
|
|
||||||
|
|
||||||
assertHitCount(searchResponse, 1L);
|
|
||||||
assertSearchHits(searchResponse, "1");
|
|
||||||
}
|
|
||||||
}
|
|
@ -73,11 +73,7 @@ public class MultiMatchQueryTests extends ESSingleNodeTestCase {
|
|||||||
|
|
||||||
@Before
|
@Before
|
||||||
public void setup() throws IOException {
|
public void setup() throws IOException {
|
||||||
Settings settings = Settings.builder()
|
Settings settings = Settings.builder().build();
|
||||||
.put("index.analysis.filter.syns.type","synonym")
|
|
||||||
.putList("index.analysis.filter.syns.synonyms","quick,fast")
|
|
||||||
.put("index.analysis.analyzer.syns.tokenizer","standard")
|
|
||||||
.put("index.analysis.analyzer.syns.filter","syns").build();
|
|
||||||
IndexService indexService = createIndex("test", settings);
|
IndexService indexService = createIndex("test", settings);
|
||||||
MapperService mapperService = indexService.mapperService();
|
MapperService mapperService = indexService.mapperService();
|
||||||
String mapping = "{\n" +
|
String mapping = "{\n" +
|
||||||
@ -87,11 +83,11 @@ public class MultiMatchQueryTests extends ESSingleNodeTestCase {
|
|||||||
" \"properties\":{\n" +
|
" \"properties\":{\n" +
|
||||||
" \"first\": {\n" +
|
" \"first\": {\n" +
|
||||||
" \"type\":\"text\",\n" +
|
" \"type\":\"text\",\n" +
|
||||||
" \"analyzer\":\"syns\"\n" +
|
" \"analyzer\":\"standard\"\n" +
|
||||||
" }," +
|
" }," +
|
||||||
" \"last\": {\n" +
|
" \"last\": {\n" +
|
||||||
" \"type\":\"text\",\n" +
|
" \"type\":\"text\",\n" +
|
||||||
" \"analyzer\":\"syns\"\n" +
|
" \"analyzer\":\"standard\"\n" +
|
||||||
" }" +
|
" }" +
|
||||||
" }" +
|
" }" +
|
||||||
" }\n" +
|
" }\n" +
|
||||||
@ -221,25 +217,27 @@ public class MultiMatchQueryTests extends ESSingleNodeTestCase {
|
|||||||
QueryShardContext queryShardContext = indexService.newQueryShardContext(
|
QueryShardContext queryShardContext = indexService.newQueryShardContext(
|
||||||
randomInt(20), null, () -> { throw new UnsupportedOperationException(); }, null);
|
randomInt(20), null, () -> { throw new UnsupportedOperationException(); }, null);
|
||||||
|
|
||||||
|
MultiMatchQuery parser = new MultiMatchQuery(queryShardContext);
|
||||||
|
parser.setAnalyzer(new MockSynonymAnalyzer());
|
||||||
|
Map<String, Float> fieldNames = new HashMap<>();
|
||||||
|
fieldNames.put("name.first", 1.0f);
|
||||||
|
|
||||||
// check that synonym query is used for a single field
|
// check that synonym query is used for a single field
|
||||||
Query parsedQuery =
|
Query parsedQuery = parser.parse(MultiMatchQueryBuilder.Type.CROSS_FIELDS, fieldNames, "dogs", null);
|
||||||
multiMatchQuery("quick").field("name.first")
|
|
||||||
.type(MultiMatchQueryBuilder.Type.CROSS_FIELDS).toQuery(queryShardContext);
|
|
||||||
Term[] terms = new Term[2];
|
Term[] terms = new Term[2];
|
||||||
terms[0] = new Term("name.first", "quick");
|
terms[0] = new Term("name.first", "dog");
|
||||||
terms[1] = new Term("name.first", "fast");
|
terms[1] = new Term("name.first", "dogs");
|
||||||
Query expectedQuery = new SynonymQuery(terms);
|
Query expectedQuery = new SynonymQuery(terms);
|
||||||
assertThat(parsedQuery, equalTo(expectedQuery));
|
assertThat(parsedQuery, equalTo(expectedQuery));
|
||||||
|
|
||||||
// check that blended term query is used for multiple fields
|
// check that blended term query is used for multiple fields
|
||||||
parsedQuery =
|
fieldNames.put("name.last", 1.0f);
|
||||||
multiMatchQuery("quick").field("name.first").field("name.last")
|
parsedQuery = parser.parse(MultiMatchQueryBuilder.Type.CROSS_FIELDS, fieldNames, "dogs", null);
|
||||||
.type(MultiMatchQueryBuilder.Type.CROSS_FIELDS).toQuery(queryShardContext);
|
|
||||||
terms = new Term[4];
|
terms = new Term[4];
|
||||||
terms[0] = new Term("name.first", "quick");
|
terms[0] = new Term("name.first", "dog");
|
||||||
terms[1] = new Term("name.first", "fast");
|
terms[1] = new Term("name.first", "dogs");
|
||||||
terms[2] = new Term("name.last", "quick");
|
terms[2] = new Term("name.last", "dog");
|
||||||
terms[3] = new Term("name.last", "fast");
|
terms[3] = new Term("name.last", "dogs");
|
||||||
float[] boosts = new float[4];
|
float[] boosts = new float[4];
|
||||||
Arrays.fill(boosts, 1.0f);
|
Arrays.fill(boosts, 1.0f);
|
||||||
expectedQuery = BlendedTermQuery.dismaxBlendedQuery(terms, boosts, 1.0f);
|
expectedQuery = BlendedTermQuery.dismaxBlendedQuery(terms, boosts, 1.0f);
|
||||||
|
@ -121,54 +121,6 @@ public class AnalyzeActionIT extends ESIntegTestCase {
|
|||||||
assertThat(analyzeResponse.getTokens().get(0).getPositionLength(), equalTo(1));
|
assertThat(analyzeResponse.getTokens().get(0).getPositionLength(), equalTo(1));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testAnalyzeWithNonDefaultPostionLength() throws Exception {
|
|
||||||
assertAcked(prepareCreate("test").addAlias(new Alias("alias"))
|
|
||||||
.setSettings(Settings.builder().put(indexSettings())
|
|
||||||
.put("index.analysis.filter.syns.type", "synonym")
|
|
||||||
.putList("index.analysis.filter.syns.synonyms", "wtf, what the fudge")
|
|
||||||
.put("index.analysis.analyzer.custom_syns.tokenizer", "standard")
|
|
||||||
.putList("index.analysis.analyzer.custom_syns.filter", "lowercase", "syns")));
|
|
||||||
ensureGreen();
|
|
||||||
|
|
||||||
AnalyzeResponse analyzeResponse = client().admin().indices().prepareAnalyze("say what the fudge").setIndex("test").setAnalyzer("custom_syns").get();
|
|
||||||
assertThat(analyzeResponse.getTokens().size(), equalTo(5));
|
|
||||||
|
|
||||||
AnalyzeResponse.AnalyzeToken token = analyzeResponse.getTokens().get(0);
|
|
||||||
assertThat(token.getTerm(), equalTo("say"));
|
|
||||||
assertThat(token.getPosition(), equalTo(0));
|
|
||||||
assertThat(token.getStartOffset(), equalTo(0));
|
|
||||||
assertThat(token.getEndOffset(), equalTo(3));
|
|
||||||
assertThat(token.getPositionLength(), equalTo(1));
|
|
||||||
|
|
||||||
token = analyzeResponse.getTokens().get(1);
|
|
||||||
assertThat(token.getTerm(), equalTo("what"));
|
|
||||||
assertThat(token.getPosition(), equalTo(1));
|
|
||||||
assertThat(token.getStartOffset(), equalTo(4));
|
|
||||||
assertThat(token.getEndOffset(), equalTo(8));
|
|
||||||
assertThat(token.getPositionLength(), equalTo(1));
|
|
||||||
|
|
||||||
token = analyzeResponse.getTokens().get(2);
|
|
||||||
assertThat(token.getTerm(), equalTo("wtf"));
|
|
||||||
assertThat(token.getPosition(), equalTo(1));
|
|
||||||
assertThat(token.getStartOffset(), equalTo(4));
|
|
||||||
assertThat(token.getEndOffset(), equalTo(18));
|
|
||||||
assertThat(token.getPositionLength(), equalTo(3));
|
|
||||||
|
|
||||||
token = analyzeResponse.getTokens().get(3);
|
|
||||||
assertThat(token.getTerm(), equalTo("the"));
|
|
||||||
assertThat(token.getPosition(), equalTo(2));
|
|
||||||
assertThat(token.getStartOffset(), equalTo(9));
|
|
||||||
assertThat(token.getEndOffset(), equalTo(12));
|
|
||||||
assertThat(token.getPositionLength(), equalTo(1));
|
|
||||||
|
|
||||||
token = analyzeResponse.getTokens().get(4);
|
|
||||||
assertThat(token.getTerm(), equalTo("fudge"));
|
|
||||||
assertThat(token.getPosition(), equalTo(3));
|
|
||||||
assertThat(token.getStartOffset(), equalTo(13));
|
|
||||||
assertThat(token.getEndOffset(), equalTo(18));
|
|
||||||
assertThat(token.getPositionLength(), equalTo(1));
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testAnalyzerWithFieldOrTypeTests() throws Exception {
|
public void testAnalyzerWithFieldOrTypeTests() throws Exception {
|
||||||
assertAcked(prepareCreate("test").addAlias(new Alias("alias")));
|
assertAcked(prepareCreate("test").addAlias(new Alias("alias")));
|
||||||
ensureGreen();
|
ensureGreen();
|
||||||
|
@ -246,7 +246,6 @@ public class IndexRecoveryIT extends ESIntegTestCase {
|
|||||||
validateIndexRecoveryState(nodeBRecoveryState.getIndex());
|
validateIndexRecoveryState(nodeBRecoveryState.getIndex());
|
||||||
}
|
}
|
||||||
|
|
||||||
@AwaitsFix(bugUrl="https://github.com/elastic/elasticsearch/issues/32686")
|
|
||||||
@TestLogging(
|
@TestLogging(
|
||||||
"_root:DEBUG,"
|
"_root:DEBUG,"
|
||||||
+ "org.elasticsearch.cluster.service:TRACE,"
|
+ "org.elasticsearch.cluster.service:TRACE,"
|
||||||
|
@ -19,13 +19,6 @@
|
|||||||
|
|
||||||
package org.elasticsearch.ingest;
|
package org.elasticsearch.ingest;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.concurrent.CompletableFuture;
|
|
||||||
import java.util.function.Consumer;
|
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.script.MockScriptEngine;
|
import org.elasticsearch.script.MockScriptEngine;
|
||||||
import org.elasticsearch.script.Script;
|
import org.elasticsearch.script.Script;
|
||||||
@ -34,6 +27,14 @@ import org.elasticsearch.script.ScriptService;
|
|||||||
import org.elasticsearch.script.ScriptType;
|
import org.elasticsearch.script.ScriptType;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.concurrent.CompletableFuture;
|
||||||
|
import java.util.function.Consumer;
|
||||||
|
|
||||||
import static org.hamcrest.Matchers.hasKey;
|
import static org.hamcrest.Matchers.hasKey;
|
||||||
import static org.hamcrest.Matchers.instanceOf;
|
import static org.hamcrest.Matchers.instanceOf;
|
||||||
import static org.hamcrest.Matchers.not;
|
import static org.hamcrest.Matchers.not;
|
||||||
@ -52,7 +53,8 @@ public class ConditionalProcessorTests extends ESTestCase {
|
|||||||
Script.DEFAULT_SCRIPT_LANG,
|
Script.DEFAULT_SCRIPT_LANG,
|
||||||
Collections.singletonMap(
|
Collections.singletonMap(
|
||||||
scriptName, ctx -> trueValue.equals(ctx.get(conditionalField))
|
scriptName, ctx -> trueValue.equals(ctx.get(conditionalField))
|
||||||
)
|
),
|
||||||
|
Collections.emptyMap()
|
||||||
)
|
)
|
||||||
),
|
),
|
||||||
new HashMap<>(ScriptModule.CORE_CONTEXTS)
|
new HashMap<>(ScriptModule.CORE_CONTEXTS)
|
||||||
@ -120,7 +122,8 @@ public class ConditionalProcessorTests extends ESTestCase {
|
|||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
)
|
),
|
||||||
|
Collections.emptyMap()
|
||||||
)
|
)
|
||||||
),
|
),
|
||||||
new HashMap<>(ScriptModule.CORE_CONTEXTS)
|
new HashMap<>(ScriptModule.CORE_CONTEXTS)
|
||||||
|
@ -19,16 +19,6 @@
|
|||||||
|
|
||||||
package org.elasticsearch.ingest;
|
package org.elasticsearch.ingest;
|
||||||
|
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.Comparator;
|
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
|
||||||
import java.util.Objects;
|
|
||||||
import java.util.concurrent.ExecutorService;
|
|
||||||
import java.util.function.BiConsumer;
|
|
||||||
import java.util.function.Consumer;
|
|
||||||
import org.apache.lucene.util.SetOnce;
|
import org.apache.lucene.util.SetOnce;
|
||||||
import org.elasticsearch.ElasticsearchException;
|
import org.elasticsearch.ElasticsearchException;
|
||||||
import org.elasticsearch.ElasticsearchParseException;
|
import org.elasticsearch.ElasticsearchParseException;
|
||||||
@ -59,13 +49,22 @@ import org.hamcrest.CustomTypeSafeMatcher;
|
|||||||
import org.mockito.ArgumentMatcher;
|
import org.mockito.ArgumentMatcher;
|
||||||
import org.mockito.invocation.InvocationOnMock;
|
import org.mockito.invocation.InvocationOnMock;
|
||||||
|
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.Comparator;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.Objects;
|
||||||
|
import java.util.concurrent.ExecutorService;
|
||||||
|
import java.util.function.BiConsumer;
|
||||||
|
import java.util.function.Consumer;
|
||||||
|
|
||||||
import static java.util.Collections.emptyMap;
|
import static java.util.Collections.emptyMap;
|
||||||
import static java.util.Collections.emptySet;
|
import static java.util.Collections.emptySet;
|
||||||
import static org.hamcrest.Matchers.equalTo;
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
import static org.hamcrest.Matchers.hasKey;
|
|
||||||
import static org.hamcrest.Matchers.instanceOf;
|
import static org.hamcrest.Matchers.instanceOf;
|
||||||
import static org.hamcrest.Matchers.is;
|
import static org.hamcrest.Matchers.is;
|
||||||
import static org.hamcrest.Matchers.not;
|
|
||||||
import static org.hamcrest.Matchers.notNullValue;
|
import static org.hamcrest.Matchers.notNullValue;
|
||||||
import static org.hamcrest.Matchers.nullValue;
|
import static org.hamcrest.Matchers.nullValue;
|
||||||
import static org.hamcrest.Matchers.sameInstance;
|
import static org.hamcrest.Matchers.sameInstance;
|
||||||
@ -769,16 +768,14 @@ public class IngestServiceTests extends ESTestCase {
|
|||||||
previousClusterState = clusterState;
|
previousClusterState = clusterState;
|
||||||
clusterState = IngestService.innerPut(putRequest, clusterState);
|
clusterState = IngestService.innerPut(putRequest, clusterState);
|
||||||
ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState));
|
ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState));
|
||||||
final Map<String, PipelineConfiguration> configurationMap = new HashMap<>();
|
|
||||||
configurationMap.put("_id1", new PipelineConfiguration("_id1", new BytesArray("{}"), XContentType.JSON));
|
|
||||||
configurationMap.put("_id2", new PipelineConfiguration("_id2", new BytesArray("{}"), XContentType.JSON));
|
|
||||||
ingestService.updatePipelineStats(new IngestMetadata(configurationMap));
|
|
||||||
|
|
||||||
@SuppressWarnings("unchecked") final BiConsumer<IndexRequest, Exception> failureHandler = mock(BiConsumer.class);
|
@SuppressWarnings("unchecked") final BiConsumer<IndexRequest, Exception> failureHandler = mock(BiConsumer.class);
|
||||||
@SuppressWarnings("unchecked") final Consumer<Exception> completionHandler = mock(Consumer.class);
|
@SuppressWarnings("unchecked") final Consumer<Exception> completionHandler = mock(Consumer.class);
|
||||||
|
|
||||||
final IndexRequest indexRequest = new IndexRequest("_index");
|
final IndexRequest indexRequest = new IndexRequest("_index");
|
||||||
indexRequest.setPipeline("_id1");
|
indexRequest.setPipeline("_id1");
|
||||||
|
indexRequest.source(randomAlphaOfLength(10), randomAlphaOfLength(10));
|
||||||
ingestService.executeBulkRequest(Collections.singletonList(indexRequest), failureHandler, completionHandler, indexReq -> {});
|
ingestService.executeBulkRequest(Collections.singletonList(indexRequest), failureHandler, completionHandler, indexReq -> {});
|
||||||
final IngestStats afterFirstRequestStats = ingestService.stats();
|
final IngestStats afterFirstRequestStats = ingestService.stats();
|
||||||
assertThat(afterFirstRequestStats.getStatsPerPipeline().size(), equalTo(2));
|
assertThat(afterFirstRequestStats.getStatsPerPipeline().size(), equalTo(2));
|
||||||
@ -793,23 +790,21 @@ public class IngestServiceTests extends ESTestCase {
|
|||||||
assertThat(afterSecondRequestStats.getStatsPerPipeline().get("_id1").getIngestCount(), equalTo(1L));
|
assertThat(afterSecondRequestStats.getStatsPerPipeline().get("_id1").getIngestCount(), equalTo(1L));
|
||||||
assertThat(afterSecondRequestStats.getStatsPerPipeline().get("_id2").getIngestCount(), equalTo(1L));
|
assertThat(afterSecondRequestStats.getStatsPerPipeline().get("_id2").getIngestCount(), equalTo(1L));
|
||||||
assertThat(afterSecondRequestStats.getTotalStats().getIngestCount(), equalTo(2L));
|
assertThat(afterSecondRequestStats.getTotalStats().getIngestCount(), equalTo(2L));
|
||||||
}
|
|
||||||
|
|
||||||
// issue: https://github.com/elastic/elasticsearch/issues/18126
|
//update cluster state and ensure that new stats are added to old stats
|
||||||
public void testUpdatingStatsWhenRemovingPipelineWorks() {
|
putRequest = new PutPipelineRequest("_id1",
|
||||||
IngestService ingestService = createWithProcessors();
|
new BytesArray("{\"processors\": [{\"mock\" : {}}, {\"mock\" : {}}]}"), XContentType.JSON);
|
||||||
Map<String, PipelineConfiguration> configurationMap = new HashMap<>();
|
previousClusterState = clusterState;
|
||||||
configurationMap.put("_id1", new PipelineConfiguration("_id1", new BytesArray("{}"), XContentType.JSON));
|
clusterState = IngestService.innerPut(putRequest, clusterState);
|
||||||
configurationMap.put("_id2", new PipelineConfiguration("_id2", new BytesArray("{}"), XContentType.JSON));
|
ingestService.applyClusterState(new ClusterChangedEvent("", clusterState, previousClusterState));
|
||||||
ingestService.updatePipelineStats(new IngestMetadata(configurationMap));
|
indexRequest.setPipeline("_id1");
|
||||||
assertThat(ingestService.stats().getStatsPerPipeline(), hasKey("_id1"));
|
ingestService.executeBulkRequest(Collections.singletonList(indexRequest), failureHandler, completionHandler, indexReq -> {});
|
||||||
assertThat(ingestService.stats().getStatsPerPipeline(), hasKey("_id2"));
|
final IngestStats afterThirdRequestStats = ingestService.stats();
|
||||||
|
assertThat(afterThirdRequestStats.getStatsPerPipeline().size(), equalTo(2));
|
||||||
|
assertThat(afterThirdRequestStats.getStatsPerPipeline().get("_id1").getIngestCount(), equalTo(2L));
|
||||||
|
assertThat(afterThirdRequestStats.getStatsPerPipeline().get("_id2").getIngestCount(), equalTo(1L));
|
||||||
|
assertThat(afterThirdRequestStats.getTotalStats().getIngestCount(), equalTo(3L));
|
||||||
|
|
||||||
configurationMap = new HashMap<>();
|
|
||||||
configurationMap.put("_id3", new PipelineConfiguration("_id3", new BytesArray("{}"), XContentType.JSON));
|
|
||||||
ingestService.updatePipelineStats(new IngestMetadata(configurationMap));
|
|
||||||
assertThat(ingestService.stats().getStatsPerPipeline(), not(hasKey("_id1")));
|
|
||||||
assertThat(ingestService.stats().getStatsPerPipeline(), not(hasKey("_id2")));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testExecuteWithDrop() {
|
public void testExecuteWithDrop() {
|
||||||
|
@ -18,20 +18,17 @@
|
|||||||
*/
|
*/
|
||||||
package org.elasticsearch.ingest;
|
package org.elasticsearch.ingest;
|
||||||
|
|
||||||
|
import org.elasticsearch.ElasticsearchException;
|
||||||
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
|
||||||
|
import java.time.Clock;
|
||||||
|
import java.util.Arrays;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.concurrent.CompletableFuture;
|
import java.util.concurrent.CompletableFuture;
|
||||||
import org.elasticsearch.ElasticsearchException;
|
|
||||||
import org.elasticsearch.ingest.CompoundProcessor;
|
|
||||||
import org.elasticsearch.ingest.IngestDocument;
|
|
||||||
import org.elasticsearch.ingest.IngestService;
|
|
||||||
import org.elasticsearch.ingest.Pipeline;
|
|
||||||
import org.elasticsearch.ingest.PipelineProcessor;
|
|
||||||
import org.elasticsearch.ingest.Processor;
|
|
||||||
import org.elasticsearch.ingest.RandomDocumentPicks;
|
|
||||||
import org.elasticsearch.test.ESTestCase;
|
|
||||||
|
|
||||||
|
import static org.hamcrest.CoreMatchers.equalTo;
|
||||||
import static org.mockito.Mockito.mock;
|
import static org.mockito.Mockito.mock;
|
||||||
import static org.mockito.Mockito.when;
|
import static org.mockito.Mockito.when;
|
||||||
|
|
||||||
@ -130,4 +127,81 @@ public class PipelineProcessorTests extends ESTestCase {
|
|||||||
outerProc.execute(testIngestDocument);
|
outerProc.execute(testIngestDocument);
|
||||||
outerProc.execute(testIngestDocument);
|
outerProc.execute(testIngestDocument);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testPipelineProcessorWithPipelineChain() throws Exception {
|
||||||
|
String pipeline1Id = "pipeline1";
|
||||||
|
String pipeline2Id = "pipeline2";
|
||||||
|
String pipeline3Id = "pipeline3";
|
||||||
|
IngestService ingestService = mock(IngestService.class);
|
||||||
|
PipelineProcessor.Factory factory = new PipelineProcessor.Factory(ingestService);
|
||||||
|
|
||||||
|
Map<String, Object> pipeline1ProcessorConfig = new HashMap<>();
|
||||||
|
pipeline1ProcessorConfig.put("pipeline", pipeline2Id);
|
||||||
|
PipelineProcessor pipeline1Processor = factory.create(Collections.emptyMap(), null, pipeline1ProcessorConfig);
|
||||||
|
|
||||||
|
Map<String, Object> pipeline2ProcessorConfig = new HashMap<>();
|
||||||
|
pipeline2ProcessorConfig.put("pipeline", pipeline3Id);
|
||||||
|
PipelineProcessor pipeline2Processor = factory.create(Collections.emptyMap(), null, pipeline2ProcessorConfig);
|
||||||
|
|
||||||
|
Clock clock = mock(Clock.class);
|
||||||
|
when(clock.millis()).thenReturn(0L).thenReturn(0L);
|
||||||
|
Pipeline pipeline1 = new Pipeline(
|
||||||
|
pipeline1Id, null, null, new CompoundProcessor(pipeline1Processor), clock
|
||||||
|
);
|
||||||
|
|
||||||
|
String key1 = randomAlphaOfLength(10);
|
||||||
|
clock = mock(Clock.class);
|
||||||
|
when(clock.millis()).thenReturn(0L).thenReturn(3L);
|
||||||
|
Pipeline pipeline2 = new Pipeline(
|
||||||
|
pipeline2Id, null, null, new CompoundProcessor(true,
|
||||||
|
Arrays.asList(
|
||||||
|
new TestProcessor(ingestDocument -> {
|
||||||
|
ingestDocument.setFieldValue(key1, randomInt());
|
||||||
|
}),
|
||||||
|
pipeline2Processor),
|
||||||
|
Collections.emptyList()),
|
||||||
|
clock
|
||||||
|
);
|
||||||
|
clock = mock(Clock.class);
|
||||||
|
when(clock.millis()).thenReturn(0L).thenReturn(2L);
|
||||||
|
Pipeline pipeline3 = new Pipeline(
|
||||||
|
pipeline3Id, null, null, new CompoundProcessor(
|
||||||
|
new TestProcessor(ingestDocument -> {
|
||||||
|
throw new RuntimeException("error");
|
||||||
|
})), clock
|
||||||
|
);
|
||||||
|
when(ingestService.getPipeline(pipeline1Id)).thenReturn(pipeline1);
|
||||||
|
when(ingestService.getPipeline(pipeline2Id)).thenReturn(pipeline2);
|
||||||
|
when(ingestService.getPipeline(pipeline3Id)).thenReturn(pipeline3);
|
||||||
|
|
||||||
|
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>());
|
||||||
|
//start the chain
|
||||||
|
ingestDocument.executePipeline(pipeline1);
|
||||||
|
assertNotNull(ingestDocument.getSourceAndMetadata().get(key1));
|
||||||
|
|
||||||
|
//check the stats
|
||||||
|
IngestStats.Stats pipeline1Stats = pipeline1.getMetrics().createStats();
|
||||||
|
IngestStats.Stats pipeline2Stats = pipeline2.getMetrics().createStats();
|
||||||
|
IngestStats.Stats pipeline3Stats = pipeline3.getMetrics().createStats();
|
||||||
|
|
||||||
|
//current
|
||||||
|
assertThat(pipeline1Stats.getIngestCurrent(), equalTo(0L));
|
||||||
|
assertThat(pipeline2Stats.getIngestCurrent(), equalTo(0L));
|
||||||
|
assertThat(pipeline3Stats.getIngestCurrent(), equalTo(0L));
|
||||||
|
|
||||||
|
//count
|
||||||
|
assertThat(pipeline1Stats.getIngestCount(), equalTo(1L));
|
||||||
|
assertThat(pipeline2Stats.getIngestCount(), equalTo(1L));
|
||||||
|
assertThat(pipeline3Stats.getIngestCount(), equalTo(1L));
|
||||||
|
|
||||||
|
//time
|
||||||
|
assertThat(pipeline1Stats.getIngestTimeInMillis(), equalTo(0L));
|
||||||
|
assertThat(pipeline2Stats.getIngestTimeInMillis(), equalTo(3L));
|
||||||
|
assertThat(pipeline3Stats.getIngestTimeInMillis(), equalTo(2L));
|
||||||
|
|
||||||
|
//failure
|
||||||
|
assertThat(pipeline1Stats.getIngestFailedCount(), equalTo(0L));
|
||||||
|
assertThat(pipeline2Stats.getIngestFailedCount(), equalTo(0L));
|
||||||
|
assertThat(pipeline3Stats.getIngestFailedCount(), equalTo(1L));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -66,12 +66,12 @@ public class ScriptServiceTests extends ESTestCase {
|
|||||||
scripts.put(i + "+" + i, p -> null); // only care about compilation, not execution
|
scripts.put(i + "+" + i, p -> null); // only care about compilation, not execution
|
||||||
}
|
}
|
||||||
scripts.put("script", p -> null);
|
scripts.put("script", p -> null);
|
||||||
scriptEngine = new MockScriptEngine(Script.DEFAULT_SCRIPT_LANG, scripts);
|
scriptEngine = new MockScriptEngine(Script.DEFAULT_SCRIPT_LANG, scripts, Collections.emptyMap());
|
||||||
//prevent duplicates using map
|
//prevent duplicates using map
|
||||||
contexts = new HashMap<>(ScriptModule.CORE_CONTEXTS);
|
contexts = new HashMap<>(ScriptModule.CORE_CONTEXTS);
|
||||||
engines = new HashMap<>();
|
engines = new HashMap<>();
|
||||||
engines.put(scriptEngine.getType(), scriptEngine);
|
engines.put(scriptEngine.getType(), scriptEngine);
|
||||||
engines.put("test", new MockScriptEngine("test", scripts));
|
engines.put("test", new MockScriptEngine("test", scripts, Collections.emptyMap()));
|
||||||
logger.info("--> setup script service");
|
logger.info("--> setup script service");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -0,0 +1,162 @@
|
|||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.search.aggregations;
|
||||||
|
|
||||||
|
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
|
||||||
|
import org.elasticsearch.common.io.stream.Writeable.Reader;
|
||||||
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
import org.elasticsearch.search.SearchModule;
|
||||||
|
import org.elasticsearch.search.aggregations.AggregatorFactories.Builder;
|
||||||
|
import org.elasticsearch.search.aggregations.pipeline.cumulativesum.CumulativeSumPipelineAggregationBuilder;
|
||||||
|
import org.elasticsearch.test.AbstractSerializingTestCase;
|
||||||
|
import org.junit.Before;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.HashSet;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
|
import static java.util.Collections.emptyList;
|
||||||
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
|
||||||
|
public class AggregatorFactoriesBuilderTests extends AbstractSerializingTestCase<AggregatorFactories.Builder> {
|
||||||
|
|
||||||
|
private NamedWriteableRegistry namedWriteableRegistry;
|
||||||
|
private NamedXContentRegistry namedXContentRegistry;
|
||||||
|
|
||||||
|
@Before
|
||||||
|
public void setUp() throws Exception {
|
||||||
|
super.setUp();
|
||||||
|
|
||||||
|
// register aggregations as NamedWriteable
|
||||||
|
SearchModule searchModule = new SearchModule(Settings.EMPTY, false, emptyList());
|
||||||
|
namedWriteableRegistry = new NamedWriteableRegistry(searchModule.getNamedWriteables());
|
||||||
|
namedXContentRegistry = new NamedXContentRegistry(searchModule.getNamedXContents());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected NamedWriteableRegistry getNamedWriteableRegistry() {
|
||||||
|
return namedWriteableRegistry;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected NamedXContentRegistry xContentRegistry() {
|
||||||
|
return namedXContentRegistry;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Builder doParseInstance(XContentParser parser) throws IOException {
|
||||||
|
// parseAggregators expects to be already inside the xcontent object
|
||||||
|
assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT));
|
||||||
|
AggregatorFactories.Builder builder = AggregatorFactories.parseAggregators(parser);
|
||||||
|
return builder;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Builder createTestInstance() {
|
||||||
|
AggregatorFactories.Builder builder = new AggregatorFactories.Builder();
|
||||||
|
|
||||||
|
// ensure that the unlikely does not happen: 2 aggs share the same name
|
||||||
|
Set<String> names = new HashSet<>();
|
||||||
|
for (int i = 0; i < randomIntBetween(1, 20); ++i) {
|
||||||
|
AggregationBuilder aggBuilder = getRandomAggregation();
|
||||||
|
if (names.add(aggBuilder.getName())) {
|
||||||
|
builder.addAggregator(aggBuilder);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (int i = 0; i < randomIntBetween(0, 20); ++i) {
|
||||||
|
PipelineAggregationBuilder aggBuilder = getRandomPipelineAggregation();
|
||||||
|
if (names.add(aggBuilder.getName())) {
|
||||||
|
builder.addPipelineAggregator(aggBuilder);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return builder;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected Reader<Builder> instanceReader() {
|
||||||
|
return AggregatorFactories.Builder::new;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testUnorderedEqualsSubSet() {
|
||||||
|
Set<String> names = new HashSet<>();
|
||||||
|
List<AggregationBuilder> aggBuilders = new ArrayList<>();
|
||||||
|
|
||||||
|
while (names.size() < 2) {
|
||||||
|
AggregationBuilder aggBuilder = getRandomAggregation();
|
||||||
|
|
||||||
|
if (names.add(aggBuilder.getName())) {
|
||||||
|
aggBuilders.add(aggBuilder);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
AggregatorFactories.Builder builder1 = new AggregatorFactories.Builder();
|
||||||
|
AggregatorFactories.Builder builder2 = new AggregatorFactories.Builder();
|
||||||
|
|
||||||
|
builder1.addAggregator(aggBuilders.get(0));
|
||||||
|
builder1.addAggregator(aggBuilders.get(1));
|
||||||
|
builder2.addAggregator(aggBuilders.get(1));
|
||||||
|
|
||||||
|
assertFalse(builder1.equals(builder2));
|
||||||
|
assertFalse(builder2.equals(builder1));
|
||||||
|
assertNotEquals(builder1.hashCode(), builder2.hashCode());
|
||||||
|
|
||||||
|
builder2.addAggregator(aggBuilders.get(0));
|
||||||
|
assertTrue(builder1.equals(builder2));
|
||||||
|
assertTrue(builder2.equals(builder1));
|
||||||
|
assertEquals(builder1.hashCode(), builder2.hashCode());
|
||||||
|
|
||||||
|
builder1.addPipelineAggregator(getRandomPipelineAggregation());
|
||||||
|
assertFalse(builder1.equals(builder2));
|
||||||
|
assertFalse(builder2.equals(builder1));
|
||||||
|
assertNotEquals(builder1.hashCode(), builder2.hashCode());
|
||||||
|
}
|
||||||
|
|
||||||
|
private static AggregationBuilder getRandomAggregation() {
|
||||||
|
// just a couple of aggregations, sufficient for the purpose of this test
|
||||||
|
final int randomAggregatorPoolSize = 4;
|
||||||
|
switch (randomIntBetween(1, randomAggregatorPoolSize)) {
|
||||||
|
case 1:
|
||||||
|
return AggregationBuilders.avg(randomAlphaOfLengthBetween(3, 10));
|
||||||
|
case 2:
|
||||||
|
return AggregationBuilders.min(randomAlphaOfLengthBetween(3, 10));
|
||||||
|
case 3:
|
||||||
|
return AggregationBuilders.max(randomAlphaOfLengthBetween(3, 10));
|
||||||
|
case 4:
|
||||||
|
return AggregationBuilders.sum(randomAlphaOfLengthBetween(3, 10));
|
||||||
|
}
|
||||||
|
|
||||||
|
// never reached
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static PipelineAggregationBuilder getRandomPipelineAggregation() {
|
||||||
|
// just 1 type of pipeline agg, sufficient for the purpose of this test
|
||||||
|
String name = randomAlphaOfLengthBetween(3, 20);
|
||||||
|
String bucketsPath = randomAlphaOfLengthBetween(3, 20);
|
||||||
|
PipelineAggregationBuilder builder = new CumulativeSumPipelineAggregationBuilder(name, bucketsPath);
|
||||||
|
return builder;
|
||||||
|
}
|
||||||
|
}
|
@ -41,7 +41,7 @@ import org.elasticsearch.search.aggregations.pipeline.bucketscript.BucketScriptP
|
|||||||
import org.elasticsearch.test.AbstractQueryTestCase;
|
import org.elasticsearch.test.AbstractQueryTestCase;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
|
||||||
import java.util.List;
|
import java.util.Collection;
|
||||||
import java.util.Random;
|
import java.util.Random;
|
||||||
import java.util.regex.Matcher;
|
import java.util.regex.Matcher;
|
||||||
import java.util.regex.Pattern;
|
import java.util.regex.Pattern;
|
||||||
@ -74,7 +74,7 @@ public class AggregatorFactoriesTests extends ESTestCase {
|
|||||||
|
|
||||||
public void testGetAggregatorFactories_returnsUnmodifiableList() {
|
public void testGetAggregatorFactories_returnsUnmodifiableList() {
|
||||||
AggregatorFactories.Builder builder = new AggregatorFactories.Builder().addAggregator(AggregationBuilders.avg("foo"));
|
AggregatorFactories.Builder builder = new AggregatorFactories.Builder().addAggregator(AggregationBuilders.avg("foo"));
|
||||||
List<AggregationBuilder> aggregatorFactories = builder.getAggregatorFactories();
|
Collection<AggregationBuilder> aggregatorFactories = builder.getAggregatorFactories();
|
||||||
assertThat(aggregatorFactories.size(), equalTo(1));
|
assertThat(aggregatorFactories.size(), equalTo(1));
|
||||||
expectThrows(UnsupportedOperationException.class, () -> aggregatorFactories.add(AggregationBuilders.avg("bar")));
|
expectThrows(UnsupportedOperationException.class, () -> aggregatorFactories.add(AggregationBuilders.avg("bar")));
|
||||||
}
|
}
|
||||||
@ -82,7 +82,7 @@ public class AggregatorFactoriesTests extends ESTestCase {
|
|||||||
public void testGetPipelineAggregatorFactories_returnsUnmodifiableList() {
|
public void testGetPipelineAggregatorFactories_returnsUnmodifiableList() {
|
||||||
AggregatorFactories.Builder builder = new AggregatorFactories.Builder().addPipelineAggregator(
|
AggregatorFactories.Builder builder = new AggregatorFactories.Builder().addPipelineAggregator(
|
||||||
PipelineAggregatorBuilders.avgBucket("foo", "path1"));
|
PipelineAggregatorBuilders.avgBucket("foo", "path1"));
|
||||||
List<PipelineAggregationBuilder> pipelineAggregatorFactories = builder.getPipelineAggregatorFactories();
|
Collection<PipelineAggregationBuilder> pipelineAggregatorFactories = builder.getPipelineAggregatorFactories();
|
||||||
assertThat(pipelineAggregatorFactories.size(), equalTo(1));
|
assertThat(pipelineAggregatorFactories.size(), equalTo(1));
|
||||||
expectThrows(UnsupportedOperationException.class,
|
expectThrows(UnsupportedOperationException.class,
|
||||||
() -> pipelineAggregatorFactories.add(PipelineAggregatorBuilders.avgBucket("bar", "path2")));
|
() -> pipelineAggregatorFactories.add(PipelineAggregatorBuilders.avgBucket("bar", "path2")));
|
||||||
@ -269,10 +269,10 @@ public class AggregatorFactoriesTests extends ESTestCase {
|
|||||||
AggregatorFactories.Builder rewritten = builder
|
AggregatorFactories.Builder rewritten = builder
|
||||||
.rewrite(new QueryRewriteContext(xContentRegistry, null, null, () -> 0L));
|
.rewrite(new QueryRewriteContext(xContentRegistry, null, null, () -> 0L));
|
||||||
assertNotSame(builder, rewritten);
|
assertNotSame(builder, rewritten);
|
||||||
List<AggregationBuilder> aggregatorFactories = rewritten.getAggregatorFactories();
|
Collection<AggregationBuilder> aggregatorFactories = rewritten.getAggregatorFactories();
|
||||||
assertEquals(1, aggregatorFactories.size());
|
assertEquals(1, aggregatorFactories.size());
|
||||||
assertThat(aggregatorFactories.get(0), instanceOf(FilterAggregationBuilder.class));
|
assertThat(aggregatorFactories.iterator().next(), instanceOf(FilterAggregationBuilder.class));
|
||||||
FilterAggregationBuilder rewrittenFilterAggBuilder = (FilterAggregationBuilder) aggregatorFactories.get(0);
|
FilterAggregationBuilder rewrittenFilterAggBuilder = (FilterAggregationBuilder) aggregatorFactories.iterator().next();
|
||||||
assertNotSame(filterAggBuilder, rewrittenFilterAggBuilder);
|
assertNotSame(filterAggBuilder, rewrittenFilterAggBuilder);
|
||||||
assertNotEquals(filterAggBuilder, rewrittenFilterAggBuilder);
|
assertNotEquals(filterAggBuilder, rewrittenFilterAggBuilder);
|
||||||
// Check the filter was rewritten from a wrapper query to a terms query
|
// Check the filter was rewritten from a wrapper query to a terms query
|
||||||
|
@ -121,7 +121,7 @@ public abstract class BasePipelineAggregationTestCase<AF extends AbstractPipelin
|
|||||||
AggregatorFactories.Builder parsed = AggregatorFactories.parseAggregators(parser);
|
AggregatorFactories.Builder parsed = AggregatorFactories.parseAggregators(parser);
|
||||||
assertThat(parsed.getAggregatorFactories(), hasSize(0));
|
assertThat(parsed.getAggregatorFactories(), hasSize(0));
|
||||||
assertThat(parsed.getPipelineAggregatorFactories(), hasSize(1));
|
assertThat(parsed.getPipelineAggregatorFactories(), hasSize(1));
|
||||||
PipelineAggregationBuilder newAgg = parsed.getPipelineAggregatorFactories().get(0);
|
PipelineAggregationBuilder newAgg = parsed.getPipelineAggregatorFactories().iterator().next();
|
||||||
assertNull(parser.nextToken());
|
assertNull(parser.nextToken());
|
||||||
assertNotNull(newAgg);
|
assertNotNull(newAgg);
|
||||||
return newAgg;
|
return newAgg;
|
||||||
|
@ -36,7 +36,7 @@ public class DateRangeTests extends BaseAggregationTestCase<DateRangeAggregation
|
|||||||
@Override
|
@Override
|
||||||
protected DateRangeAggregationBuilder createTestAggregatorBuilder() {
|
protected DateRangeAggregationBuilder createTestAggregatorBuilder() {
|
||||||
int numRanges = randomIntBetween(1, 10);
|
int numRanges = randomIntBetween(1, 10);
|
||||||
DateRangeAggregationBuilder factory = new DateRangeAggregationBuilder("foo");
|
DateRangeAggregationBuilder factory = new DateRangeAggregationBuilder(randomAlphaOfLengthBetween(3, 10));
|
||||||
for (int i = 0; i < numRanges; i++) {
|
for (int i = 0; i < numRanges; i++) {
|
||||||
String key = null;
|
String key = null;
|
||||||
if (randomBoolean()) {
|
if (randomBoolean()) {
|
||||||
|
@ -171,9 +171,9 @@ public class FiltersTests extends BaseAggregationTestCase<FiltersAggregationBuil
|
|||||||
assertNotEquals(original, rewritten);
|
assertNotEquals(original, rewritten);
|
||||||
assertThat(rewritten, instanceOf(TermsAggregationBuilder.class));
|
assertThat(rewritten, instanceOf(TermsAggregationBuilder.class));
|
||||||
assertThat(rewritten.getSubAggregations().size(), equalTo(1));
|
assertThat(rewritten.getSubAggregations().size(), equalTo(1));
|
||||||
AggregationBuilder subAgg = rewritten.getSubAggregations().get(0);
|
AggregationBuilder subAgg = rewritten.getSubAggregations().iterator().next();
|
||||||
assertThat(subAgg, instanceOf(FiltersAggregationBuilder.class));
|
assertThat(subAgg, instanceOf(FiltersAggregationBuilder.class));
|
||||||
assertNotSame(original.getSubAggregations().get(0), subAgg);
|
assertNotSame(original.getSubAggregations().iterator().next(), subAgg);
|
||||||
assertEquals("my-agg", subAgg.getName());
|
assertEquals("my-agg", subAgg.getName());
|
||||||
assertSame(rewritten,
|
assertSame(rewritten,
|
||||||
rewritten.rewrite(new QueryRewriteContext(xContentRegistry(), null, null, () -> 0L)));
|
rewritten.rewrite(new QueryRewriteContext(xContentRegistry(), null, null, () -> 0L)));
|
||||||
|
@ -41,7 +41,7 @@ public class GeoDistanceRangeTests extends BaseAggregationTestCase<GeoDistanceAg
|
|||||||
protected GeoDistanceAggregationBuilder createTestAggregatorBuilder() {
|
protected GeoDistanceAggregationBuilder createTestAggregatorBuilder() {
|
||||||
int numRanges = randomIntBetween(1, 10);
|
int numRanges = randomIntBetween(1, 10);
|
||||||
GeoPoint origin = RandomShapeGenerator.randomPoint(random());
|
GeoPoint origin = RandomShapeGenerator.randomPoint(random());
|
||||||
GeoDistanceAggregationBuilder factory = new GeoDistanceAggregationBuilder("foo", origin);
|
GeoDistanceAggregationBuilder factory = new GeoDistanceAggregationBuilder(randomAlphaOfLengthBetween(3, 10), origin);
|
||||||
for (int i = 0; i < numRanges; i++) {
|
for (int i = 0; i < numRanges; i++) {
|
||||||
String key = null;
|
String key = null;
|
||||||
if (randomBoolean()) {
|
if (randomBoolean()) {
|
||||||
|
@ -33,7 +33,7 @@ public class HistogramTests extends BaseAggregationTestCase<HistogramAggregation
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected HistogramAggregationBuilder createTestAggregatorBuilder() {
|
protected HistogramAggregationBuilder createTestAggregatorBuilder() {
|
||||||
HistogramAggregationBuilder factory = new HistogramAggregationBuilder("foo");
|
HistogramAggregationBuilder factory = new HistogramAggregationBuilder(randomAlphaOfLengthBetween(3, 10));
|
||||||
factory.field(INT_FIELD_NAME);
|
factory.field(INT_FIELD_NAME);
|
||||||
factory.interval(randomDouble() * 1000);
|
factory.interval(randomDouble() * 1000);
|
||||||
if (randomBoolean()) {
|
if (randomBoolean()) {
|
||||||
|
@ -47,7 +47,7 @@ public class IpRangeTests extends BaseAggregationTestCase<IpRangeAggregationBuil
|
|||||||
@Override
|
@Override
|
||||||
protected IpRangeAggregationBuilder createTestAggregatorBuilder() {
|
protected IpRangeAggregationBuilder createTestAggregatorBuilder() {
|
||||||
int numRanges = randomIntBetween(1, 10);
|
int numRanges = randomIntBetween(1, 10);
|
||||||
IpRangeAggregationBuilder factory = new IpRangeAggregationBuilder("foo");
|
IpRangeAggregationBuilder factory = new IpRangeAggregationBuilder(randomAlphaOfLengthBetween(3, 10));
|
||||||
for (int i = 0; i < numRanges; i++) {
|
for (int i = 0; i < numRanges; i++) {
|
||||||
String key = null;
|
String key = null;
|
||||||
if (randomBoolean()) {
|
if (randomBoolean()) {
|
||||||
|
@ -26,7 +26,7 @@ public class MissingTests extends BaseAggregationTestCase<MissingAggregationBuil
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected final MissingAggregationBuilder createTestAggregatorBuilder() {
|
protected final MissingAggregationBuilder createTestAggregatorBuilder() {
|
||||||
MissingAggregationBuilder factory = new MissingAggregationBuilder("foo", null);
|
MissingAggregationBuilder factory = new MissingAggregationBuilder(randomAlphaOfLengthBetween(3, 10), null);
|
||||||
String field = randomNumericField();
|
String field = randomNumericField();
|
||||||
randomFieldOrScript(factory, field);
|
randomFieldOrScript(factory, field);
|
||||||
return factory;
|
return factory;
|
||||||
|
@ -36,7 +36,7 @@ public class RangeTests extends BaseAggregationTestCase<RangeAggregationBuilder>
|
|||||||
@Override
|
@Override
|
||||||
protected RangeAggregationBuilder createTestAggregatorBuilder() {
|
protected RangeAggregationBuilder createTestAggregatorBuilder() {
|
||||||
int numRanges = randomIntBetween(1, 10);
|
int numRanges = randomIntBetween(1, 10);
|
||||||
RangeAggregationBuilder factory = new RangeAggregationBuilder("foo");
|
RangeAggregationBuilder factory = new RangeAggregationBuilder(randomAlphaOfLengthBetween(3, 10));
|
||||||
for (int i = 0; i < numRanges; i++) {
|
for (int i = 0; i < numRanges; i++) {
|
||||||
String key = null;
|
String key = null;
|
||||||
if (randomBoolean()) {
|
if (randomBoolean()) {
|
||||||
|
@ -26,7 +26,7 @@ public class SamplerTests extends BaseAggregationTestCase<SamplerAggregationBuil
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected final SamplerAggregationBuilder createTestAggregatorBuilder() {
|
protected final SamplerAggregationBuilder createTestAggregatorBuilder() {
|
||||||
SamplerAggregationBuilder factory = new SamplerAggregationBuilder("foo");
|
SamplerAggregationBuilder factory = new SamplerAggregationBuilder(randomAlphaOfLengthBetween(3, 10));
|
||||||
if (randomBoolean()) {
|
if (randomBoolean()) {
|
||||||
factory.shardSize(randomIntBetween(1, 1000));
|
factory.shardSize(randomIntBetween(1, 1000));
|
||||||
}
|
}
|
||||||
|
@ -41,7 +41,7 @@ public class DateHistogramTests extends BaseAggregationTestCase<DateHistogramAgg
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected DateHistogramAggregationBuilder createTestAggregatorBuilder() {
|
protected DateHistogramAggregationBuilder createTestAggregatorBuilder() {
|
||||||
DateHistogramAggregationBuilder factory = new DateHistogramAggregationBuilder("foo");
|
DateHistogramAggregationBuilder factory = new DateHistogramAggregationBuilder(randomAlphaOfLengthBetween(3, 10));
|
||||||
factory.field(INT_FIELD_NAME);
|
factory.field(INT_FIELD_NAME);
|
||||||
if (randomBoolean()) {
|
if (randomBoolean()) {
|
||||||
factory.interval(randomIntBetween(1, 100000));
|
factory.interval(randomIntBetween(1, 100000));
|
||||||
|
@ -26,7 +26,7 @@ public class DiversifiedAggregationBuilderTests extends BaseAggregationTestCase<
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected final DiversifiedAggregationBuilder createTestAggregatorBuilder() {
|
protected final DiversifiedAggregationBuilder createTestAggregatorBuilder() {
|
||||||
DiversifiedAggregationBuilder factory = new DiversifiedAggregationBuilder("foo");
|
DiversifiedAggregationBuilder factory = new DiversifiedAggregationBuilder(randomAlphaOfLengthBetween(3, 10));
|
||||||
String field = randomNumericField();
|
String field = randomNumericField();
|
||||||
randomFieldOrScript(factory, field);
|
randomFieldOrScript(factory, field);
|
||||||
if (randomBoolean()) {
|
if (randomBoolean()) {
|
||||||
|
@ -23,7 +23,7 @@ public class AvgTests extends AbstractNumericMetricTestCase<AvgAggregationBuilde
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected AvgAggregationBuilder doCreateTestAggregatorFactory() {
|
protected AvgAggregationBuilder doCreateTestAggregatorFactory() {
|
||||||
return new AvgAggregationBuilder("foo");
|
return new AvgAggregationBuilder(randomAlphaOfLengthBetween(3, 10));
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user