mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-02-08 22:14:59 +00:00
Merge branch 'master' into index-lifecycle
This commit is contained in:
commit
fb907706ec
@ -42,11 +42,6 @@
|
|||||||
<suppress files="client[/\\]rest-high-level[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]documentation[/\\]StoredScriptsDocumentationIT.java" id="SnippetLength" />
|
<suppress files="client[/\\]rest-high-level[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]documentation[/\\]StoredScriptsDocumentationIT.java" id="SnippetLength" />
|
||||||
<suppress files="client[/\\]rest-high-level[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]documentation[/\\]TasksClientDocumentationIT.java" id="SnippetLength" />
|
<suppress files="client[/\\]rest-high-level[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]documentation[/\\]TasksClientDocumentationIT.java" id="SnippetLength" />
|
||||||
<suppress files="client[/\\]rest-high-level[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]documentation[/\\]WatcherDocumentationIT.java" id="SnippetLength" />
|
<suppress files="client[/\\]rest-high-level[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]documentation[/\\]WatcherDocumentationIT.java" id="SnippetLength" />
|
||||||
<!--
|
|
||||||
This one is in plugins/examples/script-expert-scoring but we need to
|
|
||||||
suppress it like this because we build that project twice, once in for
|
|
||||||
real and once as a test for our build system. -->
|
|
||||||
<suppress files="src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]example[/\\]expertscript[/\\]ExpertScriptPlugin.java" id="SnippetLength" />
|
|
||||||
<suppress files="modules[/\\]reindex[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]documentation[/\\]ReindexDocumentationIT.jav" id="SnippetLength" />
|
<suppress files="modules[/\\]reindex[/\\]src[/\\]test[/\\]java[/\\]org[/\\]elasticsearch[/\\]client[/\\]documentation[/\\]ReindexDocumentationIT.jav" id="SnippetLength" />
|
||||||
|
|
||||||
<!-- Hopefully temporary suppression of LineLength on files that don't pass it. We should remove these when we the
|
<!-- Hopefully temporary suppression of LineLength on files that don't pass it. We should remove these when we the
|
||||||
@ -166,7 +161,6 @@
|
|||||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]master[/\\]AcknowledgedRequestBuilder.java" checks="LineLength" />
|
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]master[/\\]AcknowledgedRequestBuilder.java" checks="LineLength" />
|
||||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]master[/\\]MasterNodeOperationRequestBuilder.java" checks="LineLength" />
|
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]master[/\\]MasterNodeOperationRequestBuilder.java" checks="LineLength" />
|
||||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]master[/\\]MasterNodeReadOperationRequestBuilder.java" checks="LineLength" />
|
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]master[/\\]MasterNodeReadOperationRequestBuilder.java" checks="LineLength" />
|
||||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]master[/\\]TransportMasterNodeAction.java" checks="LineLength" />
|
|
||||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]master[/\\]info[/\\]ClusterInfoRequest.java" checks="LineLength" />
|
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]master[/\\]info[/\\]ClusterInfoRequest.java" checks="LineLength" />
|
||||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]master[/\\]info[/\\]ClusterInfoRequestBuilder.java" checks="LineLength" />
|
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]master[/\\]info[/\\]ClusterInfoRequestBuilder.java" checks="LineLength" />
|
||||||
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]master[/\\]info[/\\]TransportClusterInfoAction.java" checks="LineLength" />
|
<suppress files="server[/\\]src[/\\]main[/\\]java[/\\]org[/\\]elasticsearch[/\\]action[/\\]support[/\\]master[/\\]info[/\\]TransportClusterInfoAction.java" checks="LineLength" />
|
||||||
|
@ -492,9 +492,9 @@ public class RestHighLevelClient implements Closeable {
|
|||||||
* @param listener the listener to be notified upon request completion
|
* @param listener the listener to be notified upon request completion
|
||||||
*/
|
*/
|
||||||
public final void updateByQueryAsync(UpdateByQueryRequest updateByQueryRequest, RequestOptions options,
|
public final void updateByQueryAsync(UpdateByQueryRequest updateByQueryRequest, RequestOptions options,
|
||||||
ActionListener<BulkByScrollResponse> listener) {
|
ActionListener<BulkByScrollResponse> listener) {
|
||||||
performRequestAsyncAndParseEntity(
|
performRequestAsyncAndParseEntity(
|
||||||
updateByQueryRequest, RequestConverters::updateByQuery, options, BulkByScrollResponse::fromXContent, listener, emptySet()
|
updateByQueryRequest, RequestConverters::updateByQuery, options, BulkByScrollResponse::fromXContent, listener, emptySet()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -524,36 +524,38 @@ public class RestHighLevelClient implements Closeable {
|
|||||||
public final void deleteByQueryAsync(DeleteByQueryRequest deleteByQueryRequest, RequestOptions options,
|
public final void deleteByQueryAsync(DeleteByQueryRequest deleteByQueryRequest, RequestOptions options,
|
||||||
ActionListener<BulkByScrollResponse> listener) {
|
ActionListener<BulkByScrollResponse> listener) {
|
||||||
performRequestAsyncAndParseEntity(
|
performRequestAsyncAndParseEntity(
|
||||||
deleteByQueryRequest, RequestConverters::deleteByQuery, options, BulkByScrollResponse::fromXContent, listener, emptySet()
|
deleteByQueryRequest, RequestConverters::deleteByQuery, options, BulkByScrollResponse::fromXContent, listener, emptySet()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Executes a reindex rethrottling request.
|
* Executes a reindex rethrottling request.
|
||||||
* See the <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-reindex.html#docs-reindex-rethrottle">
|
* See the <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-reindex.html#docs-reindex-rethrottle">
|
||||||
* Reindex rethrottling API on elastic.co</a>
|
* Reindex rethrottling API on elastic.co</a>
|
||||||
|
*
|
||||||
* @param rethrottleRequest the request
|
* @param rethrottleRequest the request
|
||||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||||
* @return the response
|
* @return the response
|
||||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||||
*/
|
*/
|
||||||
public final ListTasksResponse reindexRethrottle(RethrottleRequest rethrottleRequest, RequestOptions options) throws IOException {
|
public final ListTasksResponse reindexRethrottle(RethrottleRequest rethrottleRequest, RequestOptions options) throws IOException {
|
||||||
return performRequestAndParseEntity(rethrottleRequest, RequestConverters::rethrottle, options, ListTasksResponse::fromXContent,
|
return performRequestAndParseEntity(rethrottleRequest, RequestConverters::rethrottle, options, ListTasksResponse::fromXContent,
|
||||||
emptySet());
|
emptySet());
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Executes a reindex rethrottling request.
|
* Executes a reindex rethrottling request.
|
||||||
* See the <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-reindex.html#docs-reindex-rethrottle">
|
* See the <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-reindex.html#docs-reindex-rethrottle">
|
||||||
* Reindex rethrottling API on elastic.co</a>
|
* Reindex rethrottling API on elastic.co</a>
|
||||||
|
*
|
||||||
* @param rethrottleRequest the request
|
* @param rethrottleRequest the request
|
||||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||||
* @param listener the listener to be notified upon request completion
|
* @param listener the listener to be notified upon request completion
|
||||||
*/
|
*/
|
||||||
public final void reindexRethrottleAsync(RethrottleRequest rethrottleRequest, RequestOptions options,
|
public final void reindexRethrottleAsync(RethrottleRequest rethrottleRequest, RequestOptions options,
|
||||||
ActionListener<ListTasksResponse> listener) {
|
ActionListener<ListTasksResponse> listener) {
|
||||||
performRequestAsyncAndParseEntity(rethrottleRequest, RequestConverters::rethrottle, options, ListTasksResponse::fromXContent,
|
performRequestAsyncAndParseEntity(rethrottleRequest, RequestConverters::rethrottle, options, ListTasksResponse::fromXContent,
|
||||||
listener, emptySet());
|
listener, emptySet());
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -20,6 +20,8 @@
|
|||||||
package org.elasticsearch.client;
|
package org.elasticsearch.client;
|
||||||
|
|
||||||
import org.elasticsearch.action.ActionListener;
|
import org.elasticsearch.action.ActionListener;
|
||||||
|
import org.elasticsearch.client.rollup.GetRollupJobRequest;
|
||||||
|
import org.elasticsearch.client.rollup.GetRollupJobResponse;
|
||||||
import org.elasticsearch.client.rollup.PutRollupJobRequest;
|
import org.elasticsearch.client.rollup.PutRollupJobRequest;
|
||||||
import org.elasticsearch.client.rollup.PutRollupJobResponse;
|
import org.elasticsearch.client.rollup.PutRollupJobResponse;
|
||||||
|
|
||||||
@ -73,4 +75,37 @@ public class RollupClient {
|
|||||||
PutRollupJobResponse::fromXContent,
|
PutRollupJobResponse::fromXContent,
|
||||||
listener, Collections.emptySet());
|
listener, Collections.emptySet());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get a rollup job from the cluster.
|
||||||
|
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/rollup-put-job.html">
|
||||||
|
* the docs</a> for more.
|
||||||
|
* @param request the request
|
||||||
|
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||||
|
* @return the response
|
||||||
|
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||||
|
*/
|
||||||
|
public GetRollupJobResponse getRollupJob(GetRollupJobRequest request, RequestOptions options) throws IOException {
|
||||||
|
return restHighLevelClient.performRequestAndParseEntity(request,
|
||||||
|
RollupRequestConverters::getJob,
|
||||||
|
options,
|
||||||
|
GetRollupJobResponse::fromXContent,
|
||||||
|
Collections.emptySet());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Asynchronously get a rollup job from the cluster.
|
||||||
|
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/rollup-put-job.html">
|
||||||
|
* the docs</a> for more.
|
||||||
|
* @param request the request
|
||||||
|
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||||
|
* @param listener the listener to be notified upon request completion
|
||||||
|
*/
|
||||||
|
public void getRollupJobAsync(GetRollupJobRequest request, RequestOptions options, ActionListener<GetRollupJobResponse> listener) {
|
||||||
|
restHighLevelClient.performRequestAsyncAndParseEntity(request,
|
||||||
|
RollupRequestConverters::getJob,
|
||||||
|
options,
|
||||||
|
GetRollupJobResponse::fromXContent,
|
||||||
|
listener, Collections.emptySet());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -18,7 +18,9 @@
|
|||||||
*/
|
*/
|
||||||
package org.elasticsearch.client;
|
package org.elasticsearch.client;
|
||||||
|
|
||||||
|
import org.apache.http.client.methods.HttpGet;
|
||||||
import org.apache.http.client.methods.HttpPut;
|
import org.apache.http.client.methods.HttpPut;
|
||||||
|
import org.elasticsearch.client.rollup.GetRollupJobRequest;
|
||||||
import org.elasticsearch.client.rollup.PutRollupJobRequest;
|
import org.elasticsearch.client.rollup.PutRollupJobRequest;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
@ -42,4 +44,14 @@ final class RollupRequestConverters {
|
|||||||
request.setEntity(createEntity(putRollupJobRequest, REQUEST_BODY_CONTENT_TYPE));
|
request.setEntity(createEntity(putRollupJobRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||||
return request;
|
return request;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static Request getJob(final GetRollupJobRequest getRollupJobRequest) {
|
||||||
|
String endpoint = new RequestConverters.EndpointBuilder()
|
||||||
|
.addPathPartAsIs("_xpack")
|
||||||
|
.addPathPartAsIs("rollup")
|
||||||
|
.addPathPartAsIs("job")
|
||||||
|
.addPathPart(getRollupJobRequest.getJobId())
|
||||||
|
.build();
|
||||||
|
return new Request(HttpGet.METHOD_NAME, endpoint);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -25,6 +25,7 @@ import org.elasticsearch.client.security.EnableUserRequest;
|
|||||||
import org.elasticsearch.client.security.PutUserRequest;
|
import org.elasticsearch.client.security.PutUserRequest;
|
||||||
import org.elasticsearch.client.security.PutUserResponse;
|
import org.elasticsearch.client.security.PutUserResponse;
|
||||||
import org.elasticsearch.client.security.EmptyResponse;
|
import org.elasticsearch.client.security.EmptyResponse;
|
||||||
|
import org.elasticsearch.client.security.ChangePasswordRequest;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
@ -47,6 +48,7 @@ public final class SecurityClient {
|
|||||||
* Create/update a user in the native realm synchronously.
|
* Create/update a user in the native realm synchronously.
|
||||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-users.html">
|
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-users.html">
|
||||||
* the docs</a> for more.
|
* the docs</a> for more.
|
||||||
|
*
|
||||||
* @param request the request with the user's information
|
* @param request the request with the user's information
|
||||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||||
* @return the response from the put user call
|
* @return the response from the put user call
|
||||||
@ -61,8 +63,9 @@ public final class SecurityClient {
|
|||||||
* Asynchronously create/update a user in the native realm.
|
* Asynchronously create/update a user in the native realm.
|
||||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-users.html">
|
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-users.html">
|
||||||
* the docs</a> for more.
|
* the docs</a> for more.
|
||||||
* @param request the request with the user's information
|
*
|
||||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
* @param request the request with the user's information
|
||||||
|
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||||
* @param listener the listener to be notified upon request completion
|
* @param listener the listener to be notified upon request completion
|
||||||
*/
|
*/
|
||||||
public void putUserAsync(PutUserRequest request, RequestOptions options, ActionListener<PutUserResponse> listener) {
|
public void putUserAsync(PutUserRequest request, RequestOptions options, ActionListener<PutUserResponse> listener) {
|
||||||
@ -74,6 +77,7 @@ public final class SecurityClient {
|
|||||||
* Enable a native realm or built-in user synchronously.
|
* Enable a native realm or built-in user synchronously.
|
||||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-enable-user.html">
|
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-enable-user.html">
|
||||||
* the docs</a> for more.
|
* the docs</a> for more.
|
||||||
|
*
|
||||||
* @param request the request with the user to enable
|
* @param request the request with the user to enable
|
||||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||||
* @return the response from the enable user call
|
* @return the response from the enable user call
|
||||||
@ -88,12 +92,13 @@ public final class SecurityClient {
|
|||||||
* Enable a native realm or built-in user asynchronously.
|
* Enable a native realm or built-in user asynchronously.
|
||||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-enable-user.html">
|
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-enable-user.html">
|
||||||
* the docs</a> for more.
|
* the docs</a> for more.
|
||||||
* @param request the request with the user to enable
|
*
|
||||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
* @param request the request with the user to enable
|
||||||
|
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||||
* @param listener the listener to be notified upon request completion
|
* @param listener the listener to be notified upon request completion
|
||||||
*/
|
*/
|
||||||
public void enableUserAsync(EnableUserRequest request, RequestOptions options,
|
public void enableUserAsync(EnableUserRequest request, RequestOptions options,
|
||||||
ActionListener<EmptyResponse> listener) {
|
ActionListener<EmptyResponse> listener) {
|
||||||
restHighLevelClient.performRequestAsyncAndParseEntity(request, SecurityRequestConverters::enableUser, options,
|
restHighLevelClient.performRequestAsyncAndParseEntity(request, SecurityRequestConverters::enableUser, options,
|
||||||
EmptyResponse::fromXContent, listener, emptySet());
|
EmptyResponse::fromXContent, listener, emptySet());
|
||||||
}
|
}
|
||||||
@ -102,6 +107,7 @@ public final class SecurityClient {
|
|||||||
* Disable a native realm or built-in user synchronously.
|
* Disable a native realm or built-in user synchronously.
|
||||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-disable-user.html">
|
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-disable-user.html">
|
||||||
* the docs</a> for more.
|
* the docs</a> for more.
|
||||||
|
*
|
||||||
* @param request the request with the user to disable
|
* @param request the request with the user to disable
|
||||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||||
* @return the response from the enable user call
|
* @return the response from the enable user call
|
||||||
@ -116,13 +122,44 @@ public final class SecurityClient {
|
|||||||
* Disable a native realm or built-in user asynchronously.
|
* Disable a native realm or built-in user asynchronously.
|
||||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-disable-user.html">
|
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-disable-user.html">
|
||||||
* the docs</a> for more.
|
* the docs</a> for more.
|
||||||
* @param request the request with the user to disable
|
*
|
||||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
* @param request the request with the user to disable
|
||||||
|
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||||
* @param listener the listener to be notified upon request completion
|
* @param listener the listener to be notified upon request completion
|
||||||
*/
|
*/
|
||||||
public void disableUserAsync(DisableUserRequest request, RequestOptions options,
|
public void disableUserAsync(DisableUserRequest request, RequestOptions options,
|
||||||
ActionListener<EmptyResponse> listener) {
|
ActionListener<EmptyResponse> listener) {
|
||||||
restHighLevelClient.performRequestAsyncAndParseEntity(request, SecurityRequestConverters::disableUser, options,
|
restHighLevelClient.performRequestAsyncAndParseEntity(request, SecurityRequestConverters::disableUser, options,
|
||||||
EmptyResponse::fromXContent, listener, emptySet());
|
EmptyResponse::fromXContent, listener, emptySet());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Change the password of a user of a native realm or built-in user synchronously.
|
||||||
|
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-change-password.html">
|
||||||
|
* the docs</a> for more.
|
||||||
|
*
|
||||||
|
* @param request the request with the user's new password
|
||||||
|
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||||
|
* @return the response from the change user password call
|
||||||
|
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||||
|
*/
|
||||||
|
public EmptyResponse changePassword(ChangePasswordRequest request, RequestOptions options) throws IOException {
|
||||||
|
return restHighLevelClient.performRequestAndParseEntity(request, SecurityRequestConverters::changePassword, options,
|
||||||
|
EmptyResponse::fromXContent, emptySet());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Change the password of a user of a native realm or built-in user asynchronously.
|
||||||
|
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/security-api-change-password.html">
|
||||||
|
* the docs</a> for more.
|
||||||
|
*
|
||||||
|
* @param request the request with the user's new password
|
||||||
|
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||||
|
* @param listener the listener to be notified upon request completion
|
||||||
|
*/
|
||||||
|
public void changePasswordAsync(ChangePasswordRequest request, RequestOptions options,
|
||||||
|
ActionListener<EmptyResponse> listener) {
|
||||||
|
restHighLevelClient.performRequestAsyncAndParseEntity(request, SecurityRequestConverters::changePassword, options,
|
||||||
|
EmptyResponse::fromXContent, listener, emptySet());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -19,9 +19,11 @@
|
|||||||
|
|
||||||
package org.elasticsearch.client;
|
package org.elasticsearch.client;
|
||||||
|
|
||||||
|
import org.apache.http.client.methods.HttpPost;
|
||||||
import org.apache.http.client.methods.HttpPut;
|
import org.apache.http.client.methods.HttpPut;
|
||||||
import org.elasticsearch.client.security.DisableUserRequest;
|
import org.elasticsearch.client.security.DisableUserRequest;
|
||||||
import org.elasticsearch.client.security.EnableUserRequest;
|
import org.elasticsearch.client.security.EnableUserRequest;
|
||||||
|
import org.elasticsearch.client.security.ChangePasswordRequest;
|
||||||
import org.elasticsearch.client.security.PutUserRequest;
|
import org.elasticsearch.client.security.PutUserRequest;
|
||||||
import org.elasticsearch.client.security.SetUserEnabledRequest;
|
import org.elasticsearch.client.security.SetUserEnabledRequest;
|
||||||
|
|
||||||
@ -34,6 +36,19 @@ final class SecurityRequestConverters {
|
|||||||
|
|
||||||
private SecurityRequestConverters() {}
|
private SecurityRequestConverters() {}
|
||||||
|
|
||||||
|
static Request changePassword(ChangePasswordRequest changePasswordRequest) throws IOException {
|
||||||
|
String endpoint = new RequestConverters.EndpointBuilder()
|
||||||
|
.addPathPartAsIs("_xpack/security/user")
|
||||||
|
.addPathPart(changePasswordRequest.getUsername())
|
||||||
|
.addPathPartAsIs("_password")
|
||||||
|
.build();
|
||||||
|
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||||
|
request.setEntity(createEntity(changePasswordRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||||
|
RequestConverters.Params params = new RequestConverters.Params(request);
|
||||||
|
params.withRefreshPolicy(changePasswordRequest.getRefreshPolicy());
|
||||||
|
return request;
|
||||||
|
}
|
||||||
|
|
||||||
static Request putUser(PutUserRequest putUserRequest) throws IOException {
|
static Request putUser(PutUserRequest putUserRequest) throws IOException {
|
||||||
String endpoint = new RequestConverters.EndpointBuilder()
|
String endpoint = new RequestConverters.EndpointBuilder()
|
||||||
.addPathPartAsIs("_xpack/security/user")
|
.addPathPartAsIs("_xpack/security/user")
|
||||||
|
@ -0,0 +1,77 @@
|
|||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.client.rollup;
|
||||||
|
|
||||||
|
import org.elasticsearch.client.Validatable;
|
||||||
|
import org.elasticsearch.client.ValidationException;
|
||||||
|
|
||||||
|
import java.util.Objects;
|
||||||
|
import java.util.Optional;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Request to fetch rollup jobs.
|
||||||
|
*/
|
||||||
|
public class GetRollupJobRequest implements Validatable {
|
||||||
|
private final String jobId;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a requets .
|
||||||
|
* @param jobId id of the job to return or {@code _all} to return all jobs
|
||||||
|
*/
|
||||||
|
public GetRollupJobRequest(final String jobId) {
|
||||||
|
Objects.requireNonNull(jobId, "jobId is required");
|
||||||
|
if ("_all".equals(jobId)) {
|
||||||
|
throw new IllegalArgumentException("use the default ctor to ask for all jobs");
|
||||||
|
}
|
||||||
|
this.jobId = jobId;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a request to load all rollup jobs.
|
||||||
|
*/
|
||||||
|
public GetRollupJobRequest() {
|
||||||
|
this.jobId = "_all";
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* ID of the job to return.
|
||||||
|
*/
|
||||||
|
public String getJobId() {
|
||||||
|
return jobId;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Optional<ValidationException> validate() {
|
||||||
|
return Optional.empty();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object o) {
|
||||||
|
if (this == o) return true;
|
||||||
|
if (o == null || getClass() != o.getClass()) return false;
|
||||||
|
final GetRollupJobRequest that = (GetRollupJobRequest) o;
|
||||||
|
return jobId.equals(that.jobId);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return Objects.hash(jobId);
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,374 @@
|
|||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.client.rollup;
|
||||||
|
|
||||||
|
import org.elasticsearch.client.rollup.job.config.RollupJobConfig;
|
||||||
|
import org.elasticsearch.common.ParseField;
|
||||||
|
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||||
|
import org.elasticsearch.common.xcontent.ObjectParser;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Objects;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Locale;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||||
|
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
|
||||||
|
import static java.util.Collections.unmodifiableList;
|
||||||
|
import static java.util.stream.Collectors.joining;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Response from rollup's get jobs api.
|
||||||
|
*/
|
||||||
|
public class GetRollupJobResponse {
|
||||||
|
static final ParseField JOBS = new ParseField("jobs");
|
||||||
|
static final ParseField CONFIG = new ParseField("config");
|
||||||
|
static final ParseField STATS = new ParseField("stats");
|
||||||
|
static final ParseField STATUS = new ParseField("status");
|
||||||
|
static final ParseField NUM_PAGES = new ParseField("pages_processed");
|
||||||
|
static final ParseField NUM_INPUT_DOCUMENTS = new ParseField("documents_processed");
|
||||||
|
static final ParseField NUM_OUTPUT_DOCUMENTS = new ParseField("rollups_indexed");
|
||||||
|
static final ParseField NUM_INVOCATIONS = new ParseField("trigger_count");
|
||||||
|
static final ParseField STATE = new ParseField("job_state");
|
||||||
|
static final ParseField CURRENT_POSITION = new ParseField("current_position");
|
||||||
|
static final ParseField UPGRADED_DOC_ID = new ParseField("upgraded_doc_id");
|
||||||
|
|
||||||
|
private List<JobWrapper> jobs;
|
||||||
|
|
||||||
|
GetRollupJobResponse(final List<JobWrapper> jobs) {
|
||||||
|
this.jobs = Objects.requireNonNull(jobs, "jobs is required");
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Jobs returned by the request.
|
||||||
|
*/
|
||||||
|
public List<JobWrapper> getJobs() {
|
||||||
|
return jobs;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object o) {
|
||||||
|
if (this == o) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (o == null || getClass() != o.getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
final GetRollupJobResponse that = (GetRollupJobResponse) o;
|
||||||
|
return jobs.equals(that.jobs);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return Objects.hash(jobs);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static final ConstructingObjectParser<GetRollupJobResponse, Void> PARSER = new ConstructingObjectParser<>(
|
||||||
|
"get_rollup_job_response",
|
||||||
|
true,
|
||||||
|
args -> {
|
||||||
|
@SuppressWarnings("unchecked") // We're careful about the type in the list
|
||||||
|
List<JobWrapper> jobs = (List<JobWrapper>) args[0];
|
||||||
|
return new GetRollupJobResponse(unmodifiableList(jobs));
|
||||||
|
});
|
||||||
|
static {
|
||||||
|
PARSER.declareObjectArray(constructorArg(), JobWrapper.PARSER::apply, JOBS);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static GetRollupJobResponse fromXContent(final XContentParser parser) throws IOException {
|
||||||
|
return PARSER.parse(parser, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public final String toString() {
|
||||||
|
return "{jobs=" + jobs.stream().map(Object::toString).collect(joining("\n")) + "\n}";
|
||||||
|
}
|
||||||
|
|
||||||
|
public static class JobWrapper {
|
||||||
|
private final RollupJobConfig job;
|
||||||
|
private final RollupIndexerJobStats stats;
|
||||||
|
private final RollupJobStatus status;
|
||||||
|
|
||||||
|
JobWrapper(RollupJobConfig job, RollupIndexerJobStats stats, RollupJobStatus status) {
|
||||||
|
this.job = job;
|
||||||
|
this.stats = stats;
|
||||||
|
this.status = status;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Configuration of the job.
|
||||||
|
*/
|
||||||
|
public RollupJobConfig getJob() {
|
||||||
|
return job;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Statistics about the execution of the job.
|
||||||
|
*/
|
||||||
|
public RollupIndexerJobStats getStats() {
|
||||||
|
return stats;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Current state of the job.
|
||||||
|
*/
|
||||||
|
public RollupJobStatus getStatus() {
|
||||||
|
return status;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static final ConstructingObjectParser<JobWrapper, Void> PARSER = new ConstructingObjectParser<>(
|
||||||
|
"job",
|
||||||
|
true,
|
||||||
|
a -> new JobWrapper((RollupJobConfig) a[0], (RollupIndexerJobStats) a[1], (RollupJobStatus) a[2]));
|
||||||
|
static {
|
||||||
|
PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> RollupJobConfig.fromXContent(p, null), CONFIG);
|
||||||
|
PARSER.declareObject(ConstructingObjectParser.constructorArg(), RollupIndexerJobStats.PARSER::apply, STATS);
|
||||||
|
PARSER.declareObject(ConstructingObjectParser.constructorArg(), RollupJobStatus.PARSER::apply, STATUS);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object obj) {
|
||||||
|
if (obj == null) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
if (getClass() != obj.getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
JobWrapper other = (JobWrapper) obj;
|
||||||
|
return Objects.equals(job, other.job)
|
||||||
|
&& Objects.equals(stats, other.stats)
|
||||||
|
&& Objects.equals(status, other.status);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return Objects.hash(job, stats, status);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public final String toString() {
|
||||||
|
return "{job=" + job
|
||||||
|
+ ", stats=" + stats
|
||||||
|
+ ", status=" + status + "}";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The Rollup specialization of stats for the AsyncTwoPhaseIndexer.
|
||||||
|
* Note: instead of `documents_indexed`, this XContent show `rollups_indexed`
|
||||||
|
*/
|
||||||
|
public static class RollupIndexerJobStats {
|
||||||
|
private final long numPages;
|
||||||
|
private final long numInputDocuments;
|
||||||
|
private final long numOuputDocuments;
|
||||||
|
private final long numInvocations;
|
||||||
|
|
||||||
|
RollupIndexerJobStats(long numPages, long numInputDocuments, long numOuputDocuments, long numInvocations) {
|
||||||
|
this.numPages = numPages;
|
||||||
|
this.numInputDocuments = numInputDocuments;
|
||||||
|
this.numOuputDocuments = numOuputDocuments;
|
||||||
|
this.numInvocations = numInvocations;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The number of pages read from the input indices.
|
||||||
|
*/
|
||||||
|
public long getNumPages() {
|
||||||
|
return numPages;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The number of documents read from the input indices.
|
||||||
|
*/
|
||||||
|
public long getNumDocuments() {
|
||||||
|
return numInputDocuments;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Number of times that the job woke up to write documents.
|
||||||
|
*/
|
||||||
|
public long getNumInvocations() {
|
||||||
|
return numInvocations;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Number of documents written to the result indices.
|
||||||
|
*/
|
||||||
|
public long getOutputDocuments() {
|
||||||
|
return numOuputDocuments;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static final ConstructingObjectParser<RollupIndexerJobStats, Void> PARSER = new ConstructingObjectParser<>(
|
||||||
|
STATS.getPreferredName(),
|
||||||
|
true,
|
||||||
|
args -> new RollupIndexerJobStats((long) args[0], (long) args[1], (long) args[2], (long) args[3]));
|
||||||
|
static {
|
||||||
|
PARSER.declareLong(constructorArg(), NUM_PAGES);
|
||||||
|
PARSER.declareLong(constructorArg(), NUM_INPUT_DOCUMENTS);
|
||||||
|
PARSER.declareLong(constructorArg(), NUM_OUTPUT_DOCUMENTS);
|
||||||
|
PARSER.declareLong(constructorArg(), NUM_INVOCATIONS);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object other) {
|
||||||
|
if (this == other) return true;
|
||||||
|
if (other == null || getClass() != other.getClass()) return false;
|
||||||
|
RollupIndexerJobStats that = (RollupIndexerJobStats) other;
|
||||||
|
return Objects.equals(this.numPages, that.numPages)
|
||||||
|
&& Objects.equals(this.numInputDocuments, that.numInputDocuments)
|
||||||
|
&& Objects.equals(this.numOuputDocuments, that.numOuputDocuments)
|
||||||
|
&& Objects.equals(this.numInvocations, that.numInvocations);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return Objects.hash(numPages, numInputDocuments, numOuputDocuments, numInvocations);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public final String toString() {
|
||||||
|
return "{pages=" + numPages
|
||||||
|
+ ", input_docs=" + numInputDocuments
|
||||||
|
+ ", output_docs=" + numOuputDocuments
|
||||||
|
+ ", invocations=" + numInvocations + "}";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Status of the rollup job.
|
||||||
|
*/
|
||||||
|
public static class RollupJobStatus {
|
||||||
|
private final IndexerState state;
|
||||||
|
private final Map<String, Object> currentPosition;
|
||||||
|
private final boolean upgradedDocumentId;
|
||||||
|
|
||||||
|
RollupJobStatus(IndexerState state, Map<String, Object> position, boolean upgradedDocumentId) {
|
||||||
|
this.state = state;
|
||||||
|
this.currentPosition = position;
|
||||||
|
this.upgradedDocumentId = upgradedDocumentId;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The state of the writer.
|
||||||
|
*/
|
||||||
|
public IndexerState getState() {
|
||||||
|
return state;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* The current position of the writer.
|
||||||
|
*/
|
||||||
|
public Map<String, Object> getCurrentPosition() {
|
||||||
|
return currentPosition;
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* Flag holds the state of the ID scheme, e.g. if it has been upgraded
|
||||||
|
* to the concatenation scheme.
|
||||||
|
*/
|
||||||
|
public boolean getUpgradedDocumentId() {
|
||||||
|
return upgradedDocumentId;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static final ConstructingObjectParser<RollupJobStatus, Void> PARSER = new ConstructingObjectParser<>(
|
||||||
|
STATUS.getPreferredName(),
|
||||||
|
true,
|
||||||
|
args -> {
|
||||||
|
IndexerState state = (IndexerState) args[0];
|
||||||
|
@SuppressWarnings("unchecked") // We're careful of the contents
|
||||||
|
Map<String, Object> currentPosition = (Map<String, Object>) args[1];
|
||||||
|
Boolean upgradedDocumentId = (Boolean) args[2];
|
||||||
|
return new RollupJobStatus(state, currentPosition, upgradedDocumentId == null ? false : upgradedDocumentId);
|
||||||
|
});
|
||||||
|
static {
|
||||||
|
PARSER.declareField(constructorArg(), p -> IndexerState.fromString(p.text()), STATE, ObjectParser.ValueType.STRING);
|
||||||
|
PARSER.declareField(optionalConstructorArg(), p -> {
|
||||||
|
if (p.currentToken() == XContentParser.Token.START_OBJECT) {
|
||||||
|
return p.map();
|
||||||
|
}
|
||||||
|
if (p.currentToken() == XContentParser.Token.VALUE_NULL) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]");
|
||||||
|
}, CURRENT_POSITION, ObjectParser.ValueType.VALUE_OBJECT_ARRAY);
|
||||||
|
|
||||||
|
// Optional to accommodate old versions of state
|
||||||
|
PARSER.declareBoolean(ConstructingObjectParser.optionalConstructorArg(), UPGRADED_DOC_ID);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object other) {
|
||||||
|
if (this == other) return true;
|
||||||
|
if (other == null || getClass() != other.getClass()) return false;
|
||||||
|
RollupJobStatus that = (RollupJobStatus) other;
|
||||||
|
return Objects.equals(state, that.state)
|
||||||
|
&& Objects.equals(currentPosition, that.currentPosition)
|
||||||
|
&& upgradedDocumentId == that.upgradedDocumentId;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return Objects.hash(state, currentPosition, upgradedDocumentId);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public final String toString() {
|
||||||
|
return "{stats=" + state
|
||||||
|
+ ", currentPosition=" + currentPosition
|
||||||
|
+ ", upgradedDocumentId=" + upgradedDocumentId + "}";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* IndexerState represents the internal state of the indexer. It
|
||||||
|
* is also persistent when changing from started/stopped in case the allocated
|
||||||
|
* task is restarted elsewhere.
|
||||||
|
*/
|
||||||
|
public enum IndexerState {
|
||||||
|
/** Indexer is running, but not actively indexing data (e.g. it's idle). */
|
||||||
|
STARTED,
|
||||||
|
|
||||||
|
/** Indexer is actively indexing data. */
|
||||||
|
INDEXING,
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Transition state to where an indexer has acknowledged the stop
|
||||||
|
* but is still in process of halting.
|
||||||
|
*/
|
||||||
|
STOPPING,
|
||||||
|
|
||||||
|
/** Indexer is "paused" and ignoring scheduled triggers. */
|
||||||
|
STOPPED,
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Something (internal or external) has requested the indexer abort
|
||||||
|
* and shutdown.
|
||||||
|
*/
|
||||||
|
ABORTING;
|
||||||
|
|
||||||
|
static IndexerState fromString(String name) {
|
||||||
|
return valueOf(name.trim().toUpperCase(Locale.ROOT));
|
||||||
|
}
|
||||||
|
|
||||||
|
String value() {
|
||||||
|
return name().toLowerCase(Locale.ROOT);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,76 @@
|
|||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.client.security;
|
||||||
|
|
||||||
|
import org.elasticsearch.client.Validatable;
|
||||||
|
import org.elasticsearch.common.CharArrays;
|
||||||
|
import org.elasticsearch.common.Nullable;
|
||||||
|
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.Objects;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Request object to change the password of a user of a native realm or a built-in user.
|
||||||
|
*/
|
||||||
|
public final class ChangePasswordRequest implements Validatable, ToXContentObject {
|
||||||
|
|
||||||
|
private final String username;
|
||||||
|
private final char[] password;
|
||||||
|
private final RefreshPolicy refreshPolicy;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param username The username of the user whose password should be changed or null for the current user.
|
||||||
|
* @param password The new password. The password array is not cleared by the {@link ChangePasswordRequest} object so the
|
||||||
|
* calling code must clear it after receiving the response.
|
||||||
|
* @param refreshPolicy The refresh policy for the request.
|
||||||
|
*/
|
||||||
|
public ChangePasswordRequest(@Nullable String username, char[] password, RefreshPolicy refreshPolicy) {
|
||||||
|
this.username = username;
|
||||||
|
this.password = Objects.requireNonNull(password, "password is required");
|
||||||
|
this.refreshPolicy = refreshPolicy == null ? RefreshPolicy.getDefault() : refreshPolicy;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getUsername() {
|
||||||
|
return username;
|
||||||
|
}
|
||||||
|
|
||||||
|
public char[] getPassword() {
|
||||||
|
return password;
|
||||||
|
}
|
||||||
|
|
||||||
|
public RefreshPolicy getRefreshPolicy() {
|
||||||
|
return refreshPolicy;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
|
byte[] charBytes = CharArrays.toUtf8Bytes(password);
|
||||||
|
try {
|
||||||
|
return builder.startObject()
|
||||||
|
.field("password").utf8Value(charBytes, 0, charBytes.length)
|
||||||
|
.endObject();
|
||||||
|
} finally {
|
||||||
|
Arrays.fill(charBytes, (byte) 0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -25,7 +25,6 @@ import org.elasticsearch.common.CharArrays;
|
|||||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
|
||||||
import java.io.Closeable;
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
@ -37,7 +36,7 @@ import java.util.Optional;
|
|||||||
/**
|
/**
|
||||||
* Request object to create or update a user in the native realm.
|
* Request object to create or update a user in the native realm.
|
||||||
*/
|
*/
|
||||||
public final class PutUserRequest implements Validatable, Closeable, ToXContentObject {
|
public final class PutUserRequest implements Validatable, ToXContentObject {
|
||||||
|
|
||||||
private final String username;
|
private final String username;
|
||||||
private final List<String> roles;
|
private final List<String> roles;
|
||||||
@ -48,6 +47,20 @@ public final class PutUserRequest implements Validatable, Closeable, ToXContentO
|
|||||||
private final boolean enabled;
|
private final boolean enabled;
|
||||||
private final RefreshPolicy refreshPolicy;
|
private final RefreshPolicy refreshPolicy;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a new request that is used to create or update a user in the native realm.
|
||||||
|
*
|
||||||
|
* @param username the username of the user to be created or updated
|
||||||
|
* @param password the password of the user. The password array is not modified by this class.
|
||||||
|
* It is the responsibility of the caller to clear the password after receiving
|
||||||
|
* a response.
|
||||||
|
* @param roles the roles that this user is assigned
|
||||||
|
* @param fullName the full name of the user that may be used for display purposes
|
||||||
|
* @param email the email address of the user
|
||||||
|
* @param enabled true if the user is enabled and allowed to access elasticsearch
|
||||||
|
* @param metadata a map of additional user attributes that may be used in templating roles
|
||||||
|
* @param refreshPolicy the refresh policy for the request.
|
||||||
|
*/
|
||||||
public PutUserRequest(String username, char[] password, List<String> roles, String fullName, String email, boolean enabled,
|
public PutUserRequest(String username, char[] password, List<String> roles, String fullName, String email, boolean enabled,
|
||||||
Map<String, Object> metadata, RefreshPolicy refreshPolicy) {
|
Map<String, Object> metadata, RefreshPolicy refreshPolicy) {
|
||||||
this.username = Objects.requireNonNull(username, "username is required");
|
this.username = Objects.requireNonNull(username, "username is required");
|
||||||
@ -114,13 +127,6 @@ public final class PutUserRequest implements Validatable, Closeable, ToXContentO
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public void close() {
|
|
||||||
if (password != null) {
|
|
||||||
Arrays.fill(password, (char) 0);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Optional<ValidationException> validate() {
|
public Optional<ValidationException> validate() {
|
||||||
if (metadata != null && metadata.keySet().stream().anyMatch(s -> s.startsWith("_"))) {
|
if (metadata != null && metadata.keySet().stream().anyMatch(s -> s.startsWith("_"))) {
|
||||||
@ -137,7 +143,11 @@ public final class PutUserRequest implements Validatable, Closeable, ToXContentO
|
|||||||
builder.field("username", username);
|
builder.field("username", username);
|
||||||
if (password != null) {
|
if (password != null) {
|
||||||
byte[] charBytes = CharArrays.toUtf8Bytes(password);
|
byte[] charBytes = CharArrays.toUtf8Bytes(password);
|
||||||
builder.field("password").utf8Value(charBytes, 0, charBytes.length);
|
try {
|
||||||
|
builder.field("password").utf8Value(charBytes, 0, charBytes.length);
|
||||||
|
} finally {
|
||||||
|
Arrays.fill(charBytes, (byte) 0);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
if (roles != null) {
|
if (roles != null) {
|
||||||
builder.field("roles", roles);
|
builder.field("roles", roles);
|
||||||
|
@ -90,7 +90,7 @@ public class RankEvalIT extends ESRestHighLevelClientTestCase {
|
|||||||
if (id.equals("berlin") || id.equals("amsterdam5")) {
|
if (id.equals("berlin") || id.equals("amsterdam5")) {
|
||||||
assertFalse(hit.getRating().isPresent());
|
assertFalse(hit.getRating().isPresent());
|
||||||
} else {
|
} else {
|
||||||
assertEquals(1, hit.getRating().get().intValue());
|
assertEquals(1, hit.getRating().getAsInt());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
EvalQueryQuality berlinQueryQuality = partialResults.get("berlin_query");
|
EvalQueryQuality berlinQueryQuality = partialResults.get("berlin_query");
|
||||||
@ -100,7 +100,7 @@ public class RankEvalIT extends ESRestHighLevelClientTestCase {
|
|||||||
for (RatedSearchHit hit : hitsAndRatings) {
|
for (RatedSearchHit hit : hitsAndRatings) {
|
||||||
String id = hit.getSearchHit().getId();
|
String id = hit.getSearchHit().getId();
|
||||||
if (id.equals("berlin")) {
|
if (id.equals("berlin")) {
|
||||||
assertEquals(1, hit.getRating().get().intValue());
|
assertEquals(1, hit.getRating().getAsInt());
|
||||||
} else {
|
} else {
|
||||||
assertFalse(hit.getRating().isPresent());
|
assertFalse(hit.getRating().isPresent());
|
||||||
}
|
}
|
||||||
|
@ -27,6 +27,10 @@ import org.elasticsearch.action.index.IndexRequest;
|
|||||||
import org.elasticsearch.action.search.SearchRequest;
|
import org.elasticsearch.action.search.SearchRequest;
|
||||||
import org.elasticsearch.action.search.SearchResponse;
|
import org.elasticsearch.action.search.SearchResponse;
|
||||||
import org.elasticsearch.action.support.WriteRequest;
|
import org.elasticsearch.action.support.WriteRequest;
|
||||||
|
import org.elasticsearch.client.rollup.GetRollupJobRequest;
|
||||||
|
import org.elasticsearch.client.rollup.GetRollupJobResponse;
|
||||||
|
import org.elasticsearch.client.rollup.GetRollupJobResponse.IndexerState;
|
||||||
|
import org.elasticsearch.client.rollup.GetRollupJobResponse.JobWrapper;
|
||||||
import org.elasticsearch.client.rollup.PutRollupJobRequest;
|
import org.elasticsearch.client.rollup.PutRollupJobRequest;
|
||||||
import org.elasticsearch.client.rollup.PutRollupJobResponse;
|
import org.elasticsearch.client.rollup.PutRollupJobResponse;
|
||||||
import org.elasticsearch.client.rollup.job.config.DateHistogramGroupConfig;
|
import org.elasticsearch.client.rollup.job.config.DateHistogramGroupConfig;
|
||||||
@ -50,6 +54,13 @@ import java.util.Locale;
|
|||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||||
|
import static org.hamcrest.Matchers.either;
|
||||||
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
import static org.hamcrest.Matchers.greaterThan;
|
||||||
|
import static org.hamcrest.Matchers.hasKey;
|
||||||
|
import static org.hamcrest.Matchers.hasSize;
|
||||||
|
import static org.hamcrest.Matchers.empty;
|
||||||
|
import static org.hamcrest.Matchers.lessThan;
|
||||||
|
|
||||||
public class RollupIT extends ESRestHighLevelClientTestCase {
|
public class RollupIT extends ESRestHighLevelClientTestCase {
|
||||||
|
|
||||||
@ -57,7 +68,7 @@ public class RollupIT extends ESRestHighLevelClientTestCase {
|
|||||||
SumAggregationBuilder.NAME, AvgAggregationBuilder.NAME, ValueCountAggregationBuilder.NAME);
|
SumAggregationBuilder.NAME, AvgAggregationBuilder.NAME, ValueCountAggregationBuilder.NAME);
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
public void testPutRollupJob() throws Exception {
|
public void testPutAndGetRollupJob() throws Exception {
|
||||||
double sum = 0.0d;
|
double sum = 0.0d;
|
||||||
int max = Integer.MIN_VALUE;
|
int max = Integer.MIN_VALUE;
|
||||||
int min = Integer.MAX_VALUE;
|
int min = Integer.MAX_VALUE;
|
||||||
@ -90,7 +101,7 @@ public class RollupIT extends ESRestHighLevelClientTestCase {
|
|||||||
|
|
||||||
BulkResponse bulkResponse = highLevelClient().bulk(bulkRequest, RequestOptions.DEFAULT);
|
BulkResponse bulkResponse = highLevelClient().bulk(bulkRequest, RequestOptions.DEFAULT);
|
||||||
assertEquals(RestStatus.OK, bulkResponse.status());
|
assertEquals(RestStatus.OK, bulkResponse.status());
|
||||||
if (bulkResponse.hasFailures()) {
|
if (bulkResponse.hasFailures()) {
|
||||||
for (BulkItemResponse itemResponse : bulkResponse.getItems()) {
|
for (BulkItemResponse itemResponse : bulkResponse.getItems()) {
|
||||||
if (itemResponse.isFailed()) {
|
if (itemResponse.isFailed()) {
|
||||||
logger.fatal(itemResponse.getFailureMessage());
|
logger.fatal(itemResponse.getFailureMessage());
|
||||||
@ -158,5 +169,26 @@ public class RollupIT extends ESRestHighLevelClientTestCase {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// TODO when we move cleaning rollup into ESTestCase we can randomly choose the _all version of this request
|
||||||
|
GetRollupJobRequest getRollupJobRequest = new GetRollupJobRequest(id);
|
||||||
|
GetRollupJobResponse getResponse = execute(getRollupJobRequest, rollupClient::getRollupJob, rollupClient::getRollupJobAsync);
|
||||||
|
assertThat(getResponse.getJobs(), hasSize(1));
|
||||||
|
JobWrapper job = getResponse.getJobs().get(0);
|
||||||
|
assertEquals(putRollupJobRequest.getConfig(), job.getJob());
|
||||||
|
assertThat(job.getStats().getNumPages(), lessThan(10L));
|
||||||
|
assertEquals(numDocs, job.getStats().getNumDocuments());
|
||||||
|
assertThat(job.getStats().getNumInvocations(), greaterThan(0L));
|
||||||
|
assertEquals(1, job.getStats().getOutputDocuments());
|
||||||
|
assertThat(job.getStatus().getState(), either(equalTo(IndexerState.STARTED)).or(equalTo(IndexerState.INDEXING)));
|
||||||
|
assertThat(job.getStatus().getCurrentPosition(), hasKey("date.date_histogram"));
|
||||||
|
assertEquals(true, job.getStatus().getUpgradedDocumentId());
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testGetMissingRollupJob() throws Exception {
|
||||||
|
GetRollupJobRequest getRollupJobRequest = new GetRollupJobRequest("missing");
|
||||||
|
RollupClient rollupClient = highLevelClient().rollup();
|
||||||
|
GetRollupJobResponse getResponse = execute(getRollupJobRequest, rollupClient::getRollupJob, rollupClient::getRollupJobAsync);
|
||||||
|
assertThat(getResponse.getJobs(), empty());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -0,0 +1,61 @@
|
|||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.client;
|
||||||
|
|
||||||
|
import org.apache.http.client.methods.HttpGet;
|
||||||
|
import org.apache.http.client.methods.HttpPut;
|
||||||
|
import org.elasticsearch.client.rollup.GetRollupJobRequest;
|
||||||
|
import org.elasticsearch.client.rollup.PutRollupJobRequest;
|
||||||
|
import org.elasticsearch.client.rollup.job.config.RollupJobConfig;
|
||||||
|
import org.elasticsearch.client.rollup.job.config.RollupJobConfigTests;
|
||||||
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
import static org.hamcrest.Matchers.empty;
|
||||||
|
import static org.hamcrest.Matchers.nullValue;
|
||||||
|
|
||||||
|
public class RollupRequestConvertersTests extends ESTestCase {
|
||||||
|
public void testPutJob() throws IOException {
|
||||||
|
String job = randomAlphaOfLength(5);
|
||||||
|
|
||||||
|
RollupJobConfig config = RollupJobConfigTests.randomRollupJobConfig(job);
|
||||||
|
PutRollupJobRequest put = new PutRollupJobRequest(config);
|
||||||
|
|
||||||
|
Request request = RollupRequestConverters.putJob(put);
|
||||||
|
assertThat(request.getEndpoint(), equalTo("/_xpack/rollup/job/" + job));
|
||||||
|
assertThat(HttpPut.METHOD_NAME, equalTo(request.getMethod()));
|
||||||
|
assertThat(request.getParameters().keySet(), empty());
|
||||||
|
RequestConvertersTests.assertToXContentBody(put, request.getEntity());
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testGetJob() {
|
||||||
|
boolean getAll = randomBoolean();
|
||||||
|
String job = getAll ? "_all" : RequestConvertersTests.randomIndicesNames(1, 1)[0];
|
||||||
|
GetRollupJobRequest get = getAll ? new GetRollupJobRequest() : new GetRollupJobRequest(job);
|
||||||
|
|
||||||
|
Request request = RollupRequestConverters.getJob(get);
|
||||||
|
assertThat(request.getEndpoint(), equalTo("/_xpack/rollup/job/" + job));
|
||||||
|
assertThat(HttpGet.METHOD_NAME, equalTo(request.getMethod()));
|
||||||
|
assertThat(request.getParameters().keySet(), empty());
|
||||||
|
assertThat(request.getEntity(), nullValue());
|
||||||
|
}
|
||||||
|
}
|
@ -19,9 +19,11 @@
|
|||||||
|
|
||||||
package org.elasticsearch.client;
|
package org.elasticsearch.client;
|
||||||
|
|
||||||
|
import org.apache.http.client.methods.HttpPost;
|
||||||
import org.apache.http.client.methods.HttpPut;
|
import org.apache.http.client.methods.HttpPut;
|
||||||
import org.elasticsearch.client.security.DisableUserRequest;
|
import org.elasticsearch.client.security.DisableUserRequest;
|
||||||
import org.elasticsearch.client.security.EnableUserRequest;
|
import org.elasticsearch.client.security.EnableUserRequest;
|
||||||
|
import org.elasticsearch.client.security.ChangePasswordRequest;
|
||||||
import org.elasticsearch.client.security.PutUserRequest;
|
import org.elasticsearch.client.security.PutUserRequest;
|
||||||
import org.elasticsearch.client.security.RefreshPolicy;
|
import org.elasticsearch.client.security.RefreshPolicy;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
@ -91,9 +93,34 @@ public class SecurityRequestConvertersTests extends ESTestCase {
|
|||||||
|
|
||||||
private static Map<String, String> getExpectedParamsFromRefreshPolicy(RefreshPolicy refreshPolicy) {
|
private static Map<String, String> getExpectedParamsFromRefreshPolicy(RefreshPolicy refreshPolicy) {
|
||||||
if (refreshPolicy != RefreshPolicy.NONE) {
|
if (refreshPolicy != RefreshPolicy.NONE) {
|
||||||
return Collections.singletonMap("refresh", refreshPolicy.getValue());
|
return Collections.singletonMap("refresh", refreshPolicy.getValue());
|
||||||
} else {
|
} else {
|
||||||
return Collections.emptyMap();
|
return Collections.emptyMap();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testChangePassword() throws IOException {
|
||||||
|
final String username = randomAlphaOfLengthBetween(4, 12);
|
||||||
|
final char[] password = randomAlphaOfLengthBetween(8, 12).toCharArray();
|
||||||
|
final RefreshPolicy refreshPolicy = randomFrom(RefreshPolicy.values());
|
||||||
|
final Map<String, String> expectedParams = getExpectedParamsFromRefreshPolicy(refreshPolicy);
|
||||||
|
ChangePasswordRequest changePasswordRequest = new ChangePasswordRequest(username, password, refreshPolicy);
|
||||||
|
Request request = SecurityRequestConverters.changePassword(changePasswordRequest);
|
||||||
|
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
|
||||||
|
assertEquals("/_xpack/security/user/" + changePasswordRequest.getUsername() + "/_password", request.getEndpoint());
|
||||||
|
assertEquals(expectedParams, request.getParameters());
|
||||||
|
assertToXContentBody(changePasswordRequest, request.getEntity());
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testSelfChangePassword() throws IOException {
|
||||||
|
final char[] password = randomAlphaOfLengthBetween(8, 12).toCharArray();
|
||||||
|
final RefreshPolicy refreshPolicy = randomFrom(RefreshPolicy.values());
|
||||||
|
final Map<String, String> expectedParams = getExpectedParamsFromRefreshPolicy(refreshPolicy);
|
||||||
|
ChangePasswordRequest changePasswordRequest = new ChangePasswordRequest(null, password, refreshPolicy);
|
||||||
|
Request request = SecurityRequestConverters.changePassword(changePasswordRequest);
|
||||||
|
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
|
||||||
|
assertEquals("/_xpack/security/user/_password", request.getEndpoint());
|
||||||
|
assertEquals(expectedParams, request.getParameters());
|
||||||
|
assertToXContentBody(changePasswordRequest, request.getEntity());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -27,8 +27,15 @@ import org.elasticsearch.action.bulk.BulkResponse;
|
|||||||
import org.elasticsearch.action.index.IndexRequest;
|
import org.elasticsearch.action.index.IndexRequest;
|
||||||
import org.elasticsearch.action.support.WriteRequest;
|
import org.elasticsearch.action.support.WriteRequest;
|
||||||
import org.elasticsearch.client.ESRestHighLevelClientTestCase;
|
import org.elasticsearch.client.ESRestHighLevelClientTestCase;
|
||||||
|
import org.elasticsearch.client.Request;
|
||||||
import org.elasticsearch.client.RequestOptions;
|
import org.elasticsearch.client.RequestOptions;
|
||||||
|
import org.elasticsearch.client.Response;
|
||||||
import org.elasticsearch.client.RestHighLevelClient;
|
import org.elasticsearch.client.RestHighLevelClient;
|
||||||
|
import org.elasticsearch.client.rollup.GetRollupJobRequest;
|
||||||
|
import org.elasticsearch.client.rollup.GetRollupJobResponse;
|
||||||
|
import org.elasticsearch.client.rollup.GetRollupJobResponse.JobWrapper;
|
||||||
|
import org.elasticsearch.client.rollup.GetRollupJobResponse.RollupIndexerJobStats;
|
||||||
|
import org.elasticsearch.client.rollup.GetRollupJobResponse.RollupJobStatus;
|
||||||
import org.elasticsearch.client.rollup.PutRollupJobRequest;
|
import org.elasticsearch.client.rollup.PutRollupJobRequest;
|
||||||
import org.elasticsearch.client.rollup.PutRollupJobResponse;
|
import org.elasticsearch.client.rollup.PutRollupJobResponse;
|
||||||
import org.elasticsearch.client.rollup.job.config.DateHistogramGroupConfig;
|
import org.elasticsearch.client.rollup.job.config.DateHistogramGroupConfig;
|
||||||
@ -38,19 +45,26 @@ import org.elasticsearch.client.rollup.job.config.MetricConfig;
|
|||||||
import org.elasticsearch.client.rollup.job.config.RollupJobConfig;
|
import org.elasticsearch.client.rollup.job.config.RollupJobConfig;
|
||||||
import org.elasticsearch.client.rollup.job.config.TermsGroupConfig;
|
import org.elasticsearch.client.rollup.job.config.TermsGroupConfig;
|
||||||
import org.elasticsearch.common.unit.TimeValue;
|
import org.elasticsearch.common.unit.TimeValue;
|
||||||
|
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
||||||
import org.elasticsearch.rest.RestStatus;
|
import org.elasticsearch.rest.RestStatus;
|
||||||
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
|
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
|
||||||
|
import org.junit.After;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
|
|
||||||
|
import java.io.BufferedReader;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.io.InputStreamReader;
|
||||||
|
import java.nio.charset.StandardCharsets;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Locale;
|
import java.util.Locale;
|
||||||
|
import java.util.Map;
|
||||||
import java.util.concurrent.CountDownLatch;
|
import java.util.concurrent.CountDownLatch;
|
||||||
import java.util.concurrent.TimeUnit;
|
import java.util.concurrent.TimeUnit;
|
||||||
|
|
||||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||||
|
import static org.hamcrest.Matchers.hasSize;
|
||||||
|
|
||||||
public class RollupDocumentationIT extends ESRestHighLevelClientTestCase {
|
public class RollupDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||||
|
|
||||||
@ -160,4 +174,110 @@ public class RollupDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testGetRollupJob() throws Exception {
|
||||||
|
testCreateRollupJob();
|
||||||
|
RestHighLevelClient client = highLevelClient();
|
||||||
|
|
||||||
|
|
||||||
|
// tag::x-pack-rollup-get-rollup-job-request
|
||||||
|
GetRollupJobRequest getAll = new GetRollupJobRequest(); // <1>
|
||||||
|
GetRollupJobRequest getJob = new GetRollupJobRequest("job_1"); // <2>
|
||||||
|
// end::x-pack-rollup-get-rollup-job-request
|
||||||
|
|
||||||
|
// tag::x-pack-rollup-get-rollup-job-execute
|
||||||
|
GetRollupJobResponse response = client.rollup().getRollupJob(getJob, RequestOptions.DEFAULT);
|
||||||
|
// end::x-pack-rollup-get-rollup-job-execute
|
||||||
|
|
||||||
|
// tag::x-pack-rollup-get-rollup-job-response
|
||||||
|
assertThat(response.getJobs(), hasSize(1));
|
||||||
|
JobWrapper job = response.getJobs().get(0); // <1>
|
||||||
|
RollupJobConfig config = job.getJob();
|
||||||
|
RollupJobStatus status = job.getStatus();
|
||||||
|
RollupIndexerJobStats stats = job.getStats();
|
||||||
|
// end::x-pack-rollup-get-rollup-job-response
|
||||||
|
assertNotNull(config);
|
||||||
|
assertNotNull(status);
|
||||||
|
assertNotNull(status);
|
||||||
|
|
||||||
|
// tag::x-pack-rollup-get-rollup-job-execute-listener
|
||||||
|
ActionListener<GetRollupJobResponse> listener = new ActionListener<GetRollupJobResponse>() {
|
||||||
|
@Override
|
||||||
|
public void onResponse(GetRollupJobResponse response) {
|
||||||
|
// <1>
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onFailure(Exception e) {
|
||||||
|
// <2>
|
||||||
|
}
|
||||||
|
};
|
||||||
|
// end::x-pack-rollup-get-rollup-job-execute-listener
|
||||||
|
|
||||||
|
// Replace the empty listener by a blocking listener in test
|
||||||
|
final CountDownLatch latch = new CountDownLatch(1);
|
||||||
|
listener = new LatchedActionListener<>(listener, latch);
|
||||||
|
|
||||||
|
// tag::x-pack-rollup-get-rollup-job-execute-async
|
||||||
|
client.rollup().getRollupJobAsync(getJob, RequestOptions.DEFAULT, listener); // <1>
|
||||||
|
// end::x-pack-rollup-get-rollup-job-execute-async
|
||||||
|
|
||||||
|
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||||
|
}
|
||||||
|
|
||||||
|
@After
|
||||||
|
public void wipeRollup() throws Exception {
|
||||||
|
// TODO move this to ESRestTestCase
|
||||||
|
deleteRollupJobs();
|
||||||
|
waitForPendingRollupTasks();
|
||||||
|
}
|
||||||
|
|
||||||
|
private void deleteRollupJobs() throws Exception {
|
||||||
|
Response response = adminClient().performRequest(new Request("GET", "/_xpack/rollup/job/_all"));
|
||||||
|
Map<String, Object> jobs = entityAsMap(response);
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
List<Map<String, Object>> jobConfigs =
|
||||||
|
(List<Map<String, Object>>) XContentMapValues.extractValue("jobs", jobs);
|
||||||
|
|
||||||
|
if (jobConfigs == null) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (Map<String, Object> jobConfig : jobConfigs) {
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
String jobId = (String) ((Map<String, Object>) jobConfig.get("config")).get("id");
|
||||||
|
Request request = new Request("DELETE", "/_xpack/rollup/job/" + jobId);
|
||||||
|
request.addParameter("ignore", "404"); // Ignore 404s because they imply someone was racing us to delete this
|
||||||
|
adminClient().performRequest(request);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void waitForPendingRollupTasks() throws Exception {
|
||||||
|
assertBusy(() -> {
|
||||||
|
try {
|
||||||
|
Request request = new Request("GET", "/_cat/tasks");
|
||||||
|
request.addParameter("detailed", "true");
|
||||||
|
Response response = adminClient().performRequest(request);
|
||||||
|
|
||||||
|
try (BufferedReader responseReader = new BufferedReader(
|
||||||
|
new InputStreamReader(response.getEntity().getContent(), StandardCharsets.UTF_8))) {
|
||||||
|
int activeTasks = 0;
|
||||||
|
String line;
|
||||||
|
StringBuilder tasksListString = new StringBuilder();
|
||||||
|
while ((line = responseReader.readLine()) != null) {
|
||||||
|
|
||||||
|
// We only care about Rollup jobs, otherwise this fails too easily due to unrelated tasks
|
||||||
|
if (line.startsWith("xpack/rollup/job") == true) {
|
||||||
|
activeTasks++;
|
||||||
|
tasksListString.append(line).append('\n');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
assertEquals(activeTasks + " active tasks found:\n" + tasksListString, 0, activeTasks);
|
||||||
|
}
|
||||||
|
} catch (IOException e) {
|
||||||
|
// Throw an assertion error so we retry
|
||||||
|
throw new AssertionError("Error getting active tasks list", e);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -24,6 +24,7 @@ import org.elasticsearch.action.LatchedActionListener;
|
|||||||
import org.elasticsearch.client.ESRestHighLevelClientTestCase;
|
import org.elasticsearch.client.ESRestHighLevelClientTestCase;
|
||||||
import org.elasticsearch.client.RequestOptions;
|
import org.elasticsearch.client.RequestOptions;
|
||||||
import org.elasticsearch.client.RestHighLevelClient;
|
import org.elasticsearch.client.RestHighLevelClient;
|
||||||
|
import org.elasticsearch.client.security.ChangePasswordRequest;
|
||||||
import org.elasticsearch.client.security.DisableUserRequest;
|
import org.elasticsearch.client.security.DisableUserRequest;
|
||||||
import org.elasticsearch.client.security.EnableUserRequest;
|
import org.elasticsearch.client.security.EnableUserRequest;
|
||||||
import org.elasticsearch.client.security.PutUserRequest;
|
import org.elasticsearch.client.security.PutUserRequest;
|
||||||
@ -42,7 +43,7 @@ public class SecurityDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||||||
|
|
||||||
{
|
{
|
||||||
//tag::put-user-execute
|
//tag::put-user-execute
|
||||||
char[] password = new char[] { 'p', 'a', 's', 's', 'w', 'o', 'r', 'd' };
|
char[] password = new char[]{'p', 'a', 's', 's', 'w', 'o', 'r', 'd'};
|
||||||
PutUserRequest request =
|
PutUserRequest request =
|
||||||
new PutUserRequest("example", password, Collections.singletonList("superuser"), null, null, true, null, RefreshPolicy.NONE);
|
new PutUserRequest("example", password, Collections.singletonList("superuser"), null, null, true, null, RefreshPolicy.NONE);
|
||||||
PutUserResponse response = client.security().putUser(request, RequestOptions.DEFAULT);
|
PutUserResponse response = client.security().putUser(request, RequestOptions.DEFAULT);
|
||||||
@ -56,7 +57,7 @@ public class SecurityDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
char[] password = new char[] { 'p', 'a', 's', 's', 'w', 'o', 'r', 'd' };
|
char[] password = new char[]{'p', 'a', 's', 's', 'w', 'o', 'r', 'd'};
|
||||||
PutUserRequest request = new PutUserRequest("example2", password, Collections.singletonList("superuser"), null, null, true,
|
PutUserRequest request = new PutUserRequest("example2", password, Collections.singletonList("superuser"), null, null, true,
|
||||||
null, RefreshPolicy.NONE);
|
null, RefreshPolicy.NONE);
|
||||||
// tag::put-user-execute-listener
|
// tag::put-user-execute-listener
|
||||||
@ -173,4 +174,48 @@ public class SecurityDocumentationIT extends ESRestHighLevelClientTestCase {
|
|||||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testChangePassword() throws Exception {
|
||||||
|
RestHighLevelClient client = highLevelClient();
|
||||||
|
char[] password = new char[]{'p', 'a', 's', 's', 'w', 'o', 'r', 'd'};
|
||||||
|
char[] newPassword = new char[]{'n', 'e', 'w', 'p', 'a', 's', 's', 'w', 'o', 'r', 'd'};
|
||||||
|
PutUserRequest putUserRequest = new PutUserRequest("change_password_user", password, Collections.singletonList("superuser"),
|
||||||
|
null, null, true, null, RefreshPolicy.NONE);
|
||||||
|
PutUserResponse putUserResponse = client.security().putUser(putUserRequest, RequestOptions.DEFAULT);
|
||||||
|
assertTrue(putUserResponse.isCreated());
|
||||||
|
{
|
||||||
|
//tag::change-password-execute
|
||||||
|
ChangePasswordRequest request = new ChangePasswordRequest("change_password_user", newPassword, RefreshPolicy.NONE);
|
||||||
|
EmptyResponse response = client.security().changePassword(request, RequestOptions.DEFAULT);
|
||||||
|
//end::change-password-execute
|
||||||
|
|
||||||
|
assertNotNull(response);
|
||||||
|
}
|
||||||
|
{
|
||||||
|
//tag::change-password-execute-listener
|
||||||
|
ChangePasswordRequest request = new ChangePasswordRequest("change_password_user", password, RefreshPolicy.NONE);
|
||||||
|
ActionListener<EmptyResponse> listener = new ActionListener<EmptyResponse>() {
|
||||||
|
@Override
|
||||||
|
public void onResponse(EmptyResponse emptyResponse) {
|
||||||
|
// <1>
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onFailure(Exception e) {
|
||||||
|
// <2>
|
||||||
|
}
|
||||||
|
};
|
||||||
|
//end::change-password-execute-listener
|
||||||
|
|
||||||
|
// Replace the empty listener by a blocking listener in test
|
||||||
|
final CountDownLatch latch = new CountDownLatch(1);
|
||||||
|
listener = new LatchedActionListener<>(listener, latch);
|
||||||
|
|
||||||
|
//tag::change-password-execute-async
|
||||||
|
client.security().changePasswordAsync(request, RequestOptions.DEFAULT, listener); // <1>
|
||||||
|
//end::change-password-execute-async
|
||||||
|
|
||||||
|
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -0,0 +1,33 @@
|
|||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.client.rollup;
|
||||||
|
|
||||||
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
|
||||||
|
public class GetRollupJobRequestTests extends ESTestCase {
|
||||||
|
public void testRequiresJob() {
|
||||||
|
final NullPointerException e = expectThrows(NullPointerException.class, () -> new GetRollupJobRequest(null));
|
||||||
|
assertEquals("jobId is required", e.getMessage());
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testDoNotUseAll() {
|
||||||
|
final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new GetRollupJobRequest("_all"));
|
||||||
|
assertEquals("use the default ctor to ask for all jobs", e.getMessage());
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,120 @@
|
|||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.elasticsearch.client.rollup;
|
||||||
|
|
||||||
|
import org.elasticsearch.common.xcontent.ToXContent;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
import org.elasticsearch.client.rollup.GetRollupJobResponse.IndexerState;
|
||||||
|
import org.elasticsearch.client.rollup.GetRollupJobResponse.JobWrapper;
|
||||||
|
import org.elasticsearch.client.rollup.GetRollupJobResponse.RollupIndexerJobStats;
|
||||||
|
import org.elasticsearch.client.rollup.GetRollupJobResponse.RollupJobStatus;
|
||||||
|
import org.elasticsearch.client.rollup.job.config.RollupJobConfigTests;
|
||||||
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
import static org.elasticsearch.test.AbstractXContentTestCase.xContentTester;
|
||||||
|
|
||||||
|
public class GetRollupJobResponseTests extends ESTestCase {
|
||||||
|
public void testFromXContent() throws IOException {
|
||||||
|
xContentTester(
|
||||||
|
this::createParser,
|
||||||
|
this::createTestInstance,
|
||||||
|
this::toXContent,
|
||||||
|
GetRollupJobResponse::fromXContent)
|
||||||
|
.supportsUnknownFields(true)
|
||||||
|
.randomFieldsExcludeFilter(field ->
|
||||||
|
field.endsWith("status.current_position"))
|
||||||
|
.test();
|
||||||
|
}
|
||||||
|
|
||||||
|
private GetRollupJobResponse createTestInstance() {
|
||||||
|
int jobCount = between(1, 5);
|
||||||
|
List<JobWrapper> jobs = new ArrayList<>();
|
||||||
|
for (int j = 0; j < jobCount; j++) {
|
||||||
|
jobs.add(new JobWrapper(
|
||||||
|
RollupJobConfigTests.randomRollupJobConfig(randomAlphaOfLength(5)),
|
||||||
|
randomStats(),
|
||||||
|
randomStatus()));
|
||||||
|
}
|
||||||
|
return new GetRollupJobResponse(jobs);
|
||||||
|
}
|
||||||
|
|
||||||
|
private RollupIndexerJobStats randomStats() {
|
||||||
|
return new RollupIndexerJobStats(randomLong(), randomLong(), randomLong(), randomLong());
|
||||||
|
}
|
||||||
|
|
||||||
|
private RollupJobStatus randomStatus() {
|
||||||
|
Map<String, Object> currentPosition = new HashMap<>();
|
||||||
|
int positions = between(0, 10);
|
||||||
|
while (currentPosition.size() < positions) {
|
||||||
|
currentPosition.put(randomAlphaOfLength(2), randomAlphaOfLength(2));
|
||||||
|
}
|
||||||
|
return new RollupJobStatus(
|
||||||
|
randomFrom(IndexerState.values()),
|
||||||
|
currentPosition,
|
||||||
|
randomBoolean());
|
||||||
|
}
|
||||||
|
|
||||||
|
private void toXContent(GetRollupJobResponse response, XContentBuilder builder) throws IOException {
|
||||||
|
ToXContent.Params params = ToXContent.EMPTY_PARAMS;
|
||||||
|
builder.startObject();
|
||||||
|
builder.startArray(GetRollupJobResponse.JOBS.getPreferredName());
|
||||||
|
for (JobWrapper job : response.getJobs()) {
|
||||||
|
toXContent(job, builder, params);
|
||||||
|
}
|
||||||
|
builder.endArray();
|
||||||
|
builder.endObject();
|
||||||
|
}
|
||||||
|
|
||||||
|
private void toXContent(JobWrapper jobWrapper, XContentBuilder builder, ToXContent.Params params) throws IOException {
|
||||||
|
builder.startObject();
|
||||||
|
builder.field(GetRollupJobResponse.CONFIG.getPreferredName());
|
||||||
|
jobWrapper.getJob().toXContent(builder, params);
|
||||||
|
builder.field(GetRollupJobResponse.STATUS.getPreferredName());
|
||||||
|
toXContent(jobWrapper.getStatus(), builder, params);
|
||||||
|
builder.field(GetRollupJobResponse.STATS.getPreferredName());
|
||||||
|
toXContent(jobWrapper.getStats(), builder, params);
|
||||||
|
builder.endObject();
|
||||||
|
}
|
||||||
|
|
||||||
|
public void toXContent(RollupJobStatus status, XContentBuilder builder, ToXContent.Params params) throws IOException {
|
||||||
|
builder.startObject();
|
||||||
|
builder.field(GetRollupJobResponse.STATE.getPreferredName(), status.getState().value());
|
||||||
|
if (status.getCurrentPosition() != null) {
|
||||||
|
builder.field(GetRollupJobResponse.CURRENT_POSITION.getPreferredName(), status.getCurrentPosition());
|
||||||
|
}
|
||||||
|
builder.field(GetRollupJobResponse.UPGRADED_DOC_ID.getPreferredName(), status.getUpgradedDocumentId());
|
||||||
|
builder.endObject();
|
||||||
|
}
|
||||||
|
|
||||||
|
public void toXContent(RollupIndexerJobStats stats, XContentBuilder builder, ToXContent.Params params) throws IOException {
|
||||||
|
builder.startObject();
|
||||||
|
builder.field(GetRollupJobResponse.NUM_PAGES.getPreferredName(), stats.getNumPages());
|
||||||
|
builder.field(GetRollupJobResponse.NUM_INPUT_DOCUMENTS.getPreferredName(), stats.getNumDocuments());
|
||||||
|
builder.field(GetRollupJobResponse.NUM_OUTPUT_DOCUMENTS.getPreferredName(), stats.getOutputDocuments());
|
||||||
|
builder.field(GetRollupJobResponse.NUM_INVOCATIONS.getPreferredName(), stats.getNumInvocations());
|
||||||
|
builder.endObject();
|
||||||
|
}
|
||||||
|
}
|
Binary file not shown.
71
docs/java-rest/high-level/rollup/get_job.asciidoc
Normal file
71
docs/java-rest/high-level/rollup/get_job.asciidoc
Normal file
@ -0,0 +1,71 @@
|
|||||||
|
[[java-rest-high-x-pack-rollup-get-job]]
|
||||||
|
=== Get Rollup Job API
|
||||||
|
|
||||||
|
The Get Rollup Job API can be used to get one or all rollup jobs from the
|
||||||
|
cluster. It accepts a `GetRollupJobRequest` object as a request and returns
|
||||||
|
a `GetRollupJobResponse`.
|
||||||
|
|
||||||
|
[[java-rest-high-x-pack-rollup-get-rollup-job-request]]
|
||||||
|
==== Get Rollup Job Request
|
||||||
|
|
||||||
|
A `GetRollupJobRequest` can be built without any parameters to get all of the
|
||||||
|
rollup jobs or with a job name to get a single job:
|
||||||
|
|
||||||
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
|
--------------------------------------------------
|
||||||
|
include-tagged::{doc-tests}/RollupDocumentationIT.java[x-pack-rollup-get-rollup-job-request]
|
||||||
|
--------------------------------------------------
|
||||||
|
<1> Gets all jobs.
|
||||||
|
<2> Gets `job_1`.
|
||||||
|
|
||||||
|
[[java-rest-high-x-pack-rollup-get-rollup-job-execution]]
|
||||||
|
==== Execution
|
||||||
|
|
||||||
|
The Get Rollup Job API can be executed through a `RollupClient`
|
||||||
|
instance. Such instance can be retrieved from a `RestHighLevelClient`
|
||||||
|
using the `rollup()` method:
|
||||||
|
|
||||||
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
|
--------------------------------------------------
|
||||||
|
include-tagged::{doc-tests}/RollupDocumentationIT.java[x-pack-rollup-get-rollup-job-execute]
|
||||||
|
--------------------------------------------------
|
||||||
|
|
||||||
|
[[java-rest-high-x-pack-rollup-get-rollup-job-response]]
|
||||||
|
==== Response
|
||||||
|
|
||||||
|
The returned `GetRollupJobResponse` includes a `JobWrapper` per returned job
|
||||||
|
which contains the configuration of the job, the job's current status, and
|
||||||
|
statistics about the job's past execution.
|
||||||
|
|
||||||
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
|
--------------------------------------------------
|
||||||
|
include-tagged::{doc-tests}/RollupDocumentationIT.java[x-pack-rollup-get-rollup-job-response]
|
||||||
|
--------------------------------------------------
|
||||||
|
<1> We only asked for a single job
|
||||||
|
|
||||||
|
[[java-rest-high-x-pack-rollup-get-rollup-job-async]]
|
||||||
|
==== Asynchronous Execution
|
||||||
|
|
||||||
|
This request can be executed asynchronously:
|
||||||
|
|
||||||
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
|
--------------------------------------------------
|
||||||
|
include-tagged::{doc-tests}/RollupDocumentationIT.java[x-pack-rollup-get-rollup-job-execute-async]
|
||||||
|
--------------------------------------------------
|
||||||
|
<1> The `GetRollupJobRequest` to execute and the `ActionListener` to use when
|
||||||
|
the execution completes
|
||||||
|
|
||||||
|
The asynchronous method does not block and returns immediately. Once it is
|
||||||
|
completed the `ActionListener` is called back using the `onResponse` method
|
||||||
|
if the execution successfully completed or using the `onFailure` method if
|
||||||
|
it failed.
|
||||||
|
|
||||||
|
A typical listener for `GetRollupJobResponse` looks like:
|
||||||
|
|
||||||
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
|
--------------------------------------------------
|
||||||
|
include-tagged::{doc-tests}/RollupDocumentationIT.java[x-pack-rollup-get-rollup-job-execute-listener]
|
||||||
|
--------------------------------------------------
|
||||||
|
<1> Called when the execution is successfully completed. The response is
|
||||||
|
provided as an argument
|
||||||
|
<2> Called in case of failure. The raised exception is provided as an argument
|
46
docs/java-rest/high-level/security/change-password.asciidoc
Normal file
46
docs/java-rest/high-level/security/change-password.asciidoc
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
[[java-rest-high-security-change-password]]
|
||||||
|
=== Change Password API
|
||||||
|
|
||||||
|
[[java-rest-high-security-change-password-execution]]
|
||||||
|
==== Execution
|
||||||
|
|
||||||
|
A user's password can be changed using the `security().changePassword()`
|
||||||
|
method:
|
||||||
|
|
||||||
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
|
--------------------------------------------------
|
||||||
|
include-tagged::{doc-tests}/SecurityDocumentationIT.java[change-password-execute]
|
||||||
|
--------------------------------------------------
|
||||||
|
|
||||||
|
[[java-rest-high-change-password-response]]
|
||||||
|
==== Response
|
||||||
|
|
||||||
|
The returned `EmptyResponse` does not contain any fields. The return of this
|
||||||
|
response indicates a successful request.
|
||||||
|
|
||||||
|
[[java-rest-high-x-pack-security-change-password-async]]
|
||||||
|
==== Asynchronous Execution
|
||||||
|
|
||||||
|
This request can be executed asynchronously:
|
||||||
|
|
||||||
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
|
--------------------------------------------------
|
||||||
|
include-tagged::{doc-tests}/SecurityDocumentationIT.java[change-password-execute-async]
|
||||||
|
--------------------------------------------------
|
||||||
|
<1> The `ChangePassword` request to execute and the `ActionListener` to use when
|
||||||
|
the execution completes.
|
||||||
|
|
||||||
|
The asynchronous method does not block and returns immediately. Once the request
|
||||||
|
has completed the `ActionListener` is called back using the `onResponse` method
|
||||||
|
if the execution successfully completed or using the `onFailure` method if
|
||||||
|
it failed.
|
||||||
|
|
||||||
|
A typical listener for a `EmptyResponse` looks like:
|
||||||
|
|
||||||
|
["source","java",subs="attributes,callouts,macros"]
|
||||||
|
--------------------------------------------------
|
||||||
|
include-tagged::{doc-tests}/SecurityDocumentationIT.java[change-password-execute-listener]
|
||||||
|
--------------------------------------------------
|
||||||
|
<1> Called when the execution is successfully completed. The response is
|
||||||
|
provided as an argument.
|
||||||
|
<2> Called in case of failure. The raised exception is provided as an argument.
|
@ -285,8 +285,10 @@ include::migration/get-assistance.asciidoc[]
|
|||||||
The Java High Level REST Client supports the following Rollup APIs:
|
The Java High Level REST Client supports the following Rollup APIs:
|
||||||
|
|
||||||
* <<java-rest-high-x-pack-rollup-put-job>>
|
* <<java-rest-high-x-pack-rollup-put-job>>
|
||||||
|
* <<java-rest-high-x-pack-rollup-get-job>>
|
||||||
|
|
||||||
include::rollup/put_job.asciidoc[]
|
include::rollup/put_job.asciidoc[]
|
||||||
|
include::rollup/get_job.asciidoc[]
|
||||||
|
|
||||||
== Security APIs
|
== Security APIs
|
||||||
|
|
||||||
@ -295,10 +297,12 @@ The Java High Level REST Client supports the following Security APIs:
|
|||||||
* <<java-rest-high-security-put-user>>
|
* <<java-rest-high-security-put-user>>
|
||||||
* <<java-rest-high-security-enable-user>>
|
* <<java-rest-high-security-enable-user>>
|
||||||
* <<java-rest-high-security-disable-user>>
|
* <<java-rest-high-security-disable-user>>
|
||||||
|
* <<java-rest-high-security-change-password>>
|
||||||
|
|
||||||
include::security/put-user.asciidoc[]
|
include::security/put-user.asciidoc[]
|
||||||
include::security/enable-user.asciidoc[]
|
include::security/enable-user.asciidoc[]
|
||||||
include::security/disable-user.asciidoc[]
|
include::security/disable-user.asciidoc[]
|
||||||
|
include::security/change-password.asciidoc[]
|
||||||
|
|
||||||
== Watcher APIs
|
== Watcher APIs
|
||||||
|
|
||||||
|
@ -56,8 +56,8 @@ releases 2.0 and later do not support rivers.
|
|||||||
* https://github.com/jprante/elasticsearch-jdbc[JDBC importer]:
|
* https://github.com/jprante/elasticsearch-jdbc[JDBC importer]:
|
||||||
The Java Database Connection (JDBC) importer allows to fetch data from JDBC sources for indexing into Elasticsearch (by Jörg Prante)
|
The Java Database Connection (JDBC) importer allows to fetch data from JDBC sources for indexing into Elasticsearch (by Jörg Prante)
|
||||||
|
|
||||||
* https://github.com/reachkrishnaraj/kafka-elasticsearch-standalone-consumer/tree/branch2.0[Kafka Standalone Consumer(Indexer)]:
|
* https://github.com/BigDataDevs/kafka-elasticsearch-consumer [Kafka Standalone Consumer(Indexer)]:
|
||||||
Kafka Standalone Consumer [Indexer] will read messages from Kafka in batches, processes(as implemented) and bulk-indexes them into Elasticsearch. Flexible and scalable. More documentation in above GitHub repo's Wiki.(Please use branch 2.0)!
|
Kafka Standalone Consumer [Indexer] will read messages from Kafka in batches, processes(as implemented) and bulk-indexes them into Elasticsearch. Flexible and scalable. More documentation in above GitHub repo's Wiki.
|
||||||
|
|
||||||
* https://github.com/ozlerhakan/mongolastic[Mongolastic]:
|
* https://github.com/ozlerhakan/mongolastic[Mongolastic]:
|
||||||
A tool that clones data from Elasticsearch to MongoDB and vice versa
|
A tool that clones data from Elasticsearch to MongoDB and vice versa
|
||||||
|
@ -615,7 +615,7 @@ GET /_search
|
|||||||
"aggs" : {
|
"aggs" : {
|
||||||
"genres" : {
|
"genres" : {
|
||||||
"terms" : {
|
"terms" : {
|
||||||
"field" : "gender",
|
"field" : "genre",
|
||||||
"script" : {
|
"script" : {
|
||||||
"source" : "'Genre: ' +_value",
|
"source" : "'Genre: ' +_value",
|
||||||
"lang" : "painless"
|
"lang" : "painless"
|
||||||
|
@ -16,6 +16,7 @@ GET _tasks?nodes=nodeId1,nodeId2 <2>
|
|||||||
GET _tasks?nodes=nodeId1,nodeId2&actions=cluster:* <3>
|
GET _tasks?nodes=nodeId1,nodeId2&actions=cluster:* <3>
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// CONSOLE
|
// CONSOLE
|
||||||
|
// TEST[skip:No tasks to retrieve]
|
||||||
|
|
||||||
<1> Retrieves all tasks currently running on all nodes in the cluster.
|
<1> Retrieves all tasks currently running on all nodes in the cluster.
|
||||||
<2> Retrieves all tasks running on nodes `nodeId1` and `nodeId2`. See <<cluster-nodes>> for more info about how to select individual nodes.
|
<2> Retrieves all tasks running on nodes `nodeId1` and `nodeId2`. See <<cluster-nodes>> for more info about how to select individual nodes.
|
||||||
@ -57,31 +58,29 @@ The result will look similar to the following:
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// NOTCONSOLE
|
// TESTRESPONSE
|
||||||
// We can't test tasks output
|
|
||||||
|
|
||||||
It is also possible to retrieve information for a particular task:
|
It is also possible to retrieve information for a particular task. The following
|
||||||
|
example retrieves information about task `oTUltX4IQMOUUVeiohTt8A:124`:
|
||||||
|
|
||||||
[source,js]
|
[source,js]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
GET _tasks/task_id <1>
|
GET _tasks/oTUltX4IQMOUUVeiohTt8A:124
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// CONSOLE
|
// CONSOLE
|
||||||
// TEST[s/task_id/node_id:1/]
|
|
||||||
// TEST[catch:missing]
|
// TEST[catch:missing]
|
||||||
|
|
||||||
<1> This will return a 404 if the task isn't found.
|
If the task isn't found, the API returns a 404.
|
||||||
|
|
||||||
Or to retrieve all children of a particular task:
|
To retrieve all children of a particular task:
|
||||||
|
|
||||||
[source,js]
|
[source,js]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
GET _tasks?parent_task_id=parent_task_id <1>
|
GET _tasks?parent_task_id=oTUltX4IQMOUUVeiohTt8A:123
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// CONSOLE
|
// CONSOLE
|
||||||
// TEST[s/=parent_task_id/=node_id:1/]
|
|
||||||
|
|
||||||
<1> This won't return a 404 if the parent isn't found.
|
If the parent isn't found, the API does not return a 404.
|
||||||
|
|
||||||
You can also use the `detailed` request parameter to get more information about
|
You can also use the `detailed` request parameter to get more information about
|
||||||
the running tasks. This is useful for telling one task from another but is more
|
the running tasks. This is useful for telling one task from another but is more
|
||||||
@ -93,8 +92,9 @@ request parameter:
|
|||||||
GET _tasks?actions=*search&detailed
|
GET _tasks?actions=*search&detailed
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// CONSOLE
|
// CONSOLE
|
||||||
|
// TEST[skip:No tasks to retrieve]
|
||||||
|
|
||||||
might look like:
|
The results might look like:
|
||||||
|
|
||||||
[source,js]
|
[source,js]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
@ -121,8 +121,7 @@ might look like:
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// NOTCONSOLE
|
// TESTRESPONSE
|
||||||
// We can't test tasks output
|
|
||||||
|
|
||||||
The new `description` field contains human readable text that identifies the
|
The new `description` field contains human readable text that identifies the
|
||||||
particular request that the task is performing such as identifying the search
|
particular request that the task is performing such as identifying the search
|
||||||
@ -167,14 +166,14 @@ GET _cat/tasks?detailed
|
|||||||
[[task-cancellation]]
|
[[task-cancellation]]
|
||||||
=== Task Cancellation
|
=== Task Cancellation
|
||||||
|
|
||||||
If a long-running task supports cancellation, it can be cancelled by the following command:
|
If a long-running task supports cancellation, it can be cancelled with the cancel
|
||||||
|
tasks API. The following example cancels task `oTUltX4IQMOUUVeiohTt8A:12345`:
|
||||||
|
|
||||||
[source,js]
|
[source,js]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
POST _tasks/node_id:task_id/_cancel
|
POST _tasks/oTUltX4IQMOUUVeiohTt8A:12345/_cancel
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// CONSOLE
|
// CONSOLE
|
||||||
// TEST[s/task_id/1/]
|
|
||||||
|
|
||||||
The task cancellation command supports the same task selection parameters as the list tasks command, so multiple tasks
|
The task cancellation command supports the same task selection parameters as the list tasks command, so multiple tasks
|
||||||
can be cancelled at the same time. For example, the following command will cancel all reindex tasks running on the
|
can be cancelled at the same time. For example, the following command will cancel all reindex tasks running on the
|
||||||
@ -217,7 +216,7 @@ a the client that started them:
|
|||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
curl -i -H "X-Opaque-Id: 123456" "http://localhost:9200/_tasks?group_by=parents"
|
curl -i -H "X-Opaque-Id: 123456" "http://localhost:9200/_tasks?group_by=parents"
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// NOTCONSOLE
|
//NOTCONSOLE
|
||||||
|
|
||||||
The result will look similar to the following:
|
The result will look similar to the following:
|
||||||
|
|
||||||
@ -260,8 +259,7 @@ content-length: 831
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// NOTCONSOLE
|
//NOTCONSOLE
|
||||||
|
|
||||||
<1> id as a part of the response header
|
<1> id as a part of the response header
|
||||||
<2> id for the tasks that was initiated by the REST request
|
<2> id for the tasks that was initiated by the REST request
|
||||||
<3> the child task of the task initiated by the REST request
|
<3> the child task of the task initiated by the REST request
|
||||||
|
@ -304,6 +304,7 @@ You can fetch the status of any running delete-by-query requests with the
|
|||||||
GET _tasks?detailed=true&actions=*/delete/byquery
|
GET _tasks?detailed=true&actions=*/delete/byquery
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// CONSOLE
|
// CONSOLE
|
||||||
|
// TEST[skip:No tasks to retrieve]
|
||||||
|
|
||||||
The responses looks like:
|
The responses looks like:
|
||||||
|
|
||||||
@ -344,9 +345,7 @@ The responses looks like:
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// NOTCONSOLE
|
// TESTRESPONSE
|
||||||
// We can't test tasks output
|
|
||||||
|
|
||||||
<1> this object contains the actual status. It is just like the response json
|
<1> this object contains the actual status. It is just like the response json
|
||||||
with the important addition of the `total` field. `total` is the total number
|
with the important addition of the `total` field. `total` is the total number
|
||||||
of operations that the reindex expects to perform. You can estimate the
|
of operations that the reindex expects to perform. You can estimate the
|
||||||
@ -357,10 +356,9 @@ With the task id you can look up the task directly:
|
|||||||
|
|
||||||
[source,js]
|
[source,js]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
GET /_tasks/task_id
|
GET /_tasks/r1A2WoRbTwKZ516z6NEs5A:36619
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// CONSOLE
|
// CONSOLE
|
||||||
// TEST[s/task_id/node_id:1/]
|
|
||||||
// TEST[catch:missing]
|
// TEST[catch:missing]
|
||||||
|
|
||||||
The advantage of this API is that it integrates with `wait_for_completion=false`
|
The advantage of this API is that it integrates with `wait_for_completion=false`
|
||||||
@ -375,16 +373,15 @@ you to delete that document.
|
|||||||
[[docs-delete-by-query-cancel-task-api]]
|
[[docs-delete-by-query-cancel-task-api]]
|
||||||
=== Works with the Cancel Task API
|
=== Works with the Cancel Task API
|
||||||
|
|
||||||
Any Delete By Query can be canceled using the <<tasks,Task Cancel API>>:
|
Any Delete By Query can be canceled using the <<tasks,task cancel API>>:
|
||||||
|
|
||||||
[source,js]
|
[source,js]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
POST _tasks/task_id/_cancel
|
POST _tasks/r1A2WoRbTwKZ516z6NEs5A:36619/_cancel
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// TEST[s/task_id/node_id:1/]
|
|
||||||
// CONSOLE
|
// CONSOLE
|
||||||
|
|
||||||
The `task_id` can be found using the tasks API above.
|
The task ID can be found using the <<tasks,tasks API>>.
|
||||||
|
|
||||||
Cancellation should happen quickly but might take a few seconds. The task status
|
Cancellation should happen quickly but might take a few seconds. The task status
|
||||||
API above will continue to list the task until it is wakes to cancel itself.
|
API above will continue to list the task until it is wakes to cancel itself.
|
||||||
@ -399,12 +396,11 @@ using the `_rethrottle` API:
|
|||||||
|
|
||||||
[source,js]
|
[source,js]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
POST _delete_by_query/task_id/_rethrottle?requests_per_second=-1
|
POST _delete_by_query/r1A2WoRbTwKZ516z6NEs5A:36619/_rethrottle?requests_per_second=-1
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// TEST[s/task_id/node_id:1/]
|
|
||||||
// CONSOLE
|
// CONSOLE
|
||||||
|
|
||||||
The `task_id` can be found using the tasks API above.
|
The task ID can be found using the <<tasks,tasks API>>.
|
||||||
|
|
||||||
Just like when setting it on the `_delete_by_query` API `requests_per_second`
|
Just like when setting it on the `_delete_by_query` API `requests_per_second`
|
||||||
can be either `-1` to disable throttling or any decimal number
|
can be either `-1` to disable throttling or any decimal number
|
||||||
|
@ -692,6 +692,7 @@ You can fetch the status of all running reindex requests with the
|
|||||||
GET _tasks?detailed=true&actions=*reindex
|
GET _tasks?detailed=true&actions=*reindex
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// CONSOLE
|
// CONSOLE
|
||||||
|
// TEST[skip:No tasks to retrieve]
|
||||||
|
|
||||||
The response looks like:
|
The response looks like:
|
||||||
|
|
||||||
@ -726,32 +727,36 @@ The response looks like:
|
|||||||
"bulk": 0,
|
"bulk": 0,
|
||||||
"search": 0
|
"search": 0
|
||||||
},
|
},
|
||||||
"throttled_millis": 0
|
"throttled_millis": 0,
|
||||||
|
"requests_per_second": -1,
|
||||||
|
"throttled_until_millis": 0
|
||||||
},
|
},
|
||||||
"description" : ""
|
"description" : "",
|
||||||
|
"start_time_in_millis": 1535149899665,
|
||||||
|
"running_time_in_nanos": 5926916792,
|
||||||
|
"cancellable": true,
|
||||||
|
"headers": {}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// NOTCONSOLE
|
// TESTRESPONSE
|
||||||
// We can't test tasks output
|
|
||||||
|
|
||||||
<1> this object contains the actual status. It is identical to the response JSON
|
<1> this object contains the actual status. It is identical to the response JSON
|
||||||
except for the important addition of the `total` field. `total` is the total number
|
except for the important addition of the `total` field. `total` is the total number
|
||||||
of operations that the `_reindex` expects to perform. You can estimate the
|
of operations that the `_reindex` expects to perform. You can estimate the
|
||||||
progress by adding the `updated`, `created`, and `deleted` fields. The request
|
progress by adding the `updated`, `created`, and `deleted` fields. The request
|
||||||
will finish when their sum is equal to the `total` field.
|
will finish when their sum is equal to the `total` field.
|
||||||
|
|
||||||
With the task id you can look up the task directly:
|
With the task id you can look up the task directly. The following example
|
||||||
|
retrieves information about the task `r1A2WoRbTwKZ516z6NEs5A:36619`:
|
||||||
|
|
||||||
[source,js]
|
[source,js]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
GET /_tasks/task_id
|
GET /_tasks/r1A2WoRbTwKZ516z6NEs5A:36619
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// CONSOLE
|
// CONSOLE
|
||||||
// TEST[s/task_id/node_id:1/]
|
|
||||||
// TEST[catch:missing]
|
// TEST[catch:missing]
|
||||||
|
|
||||||
The advantage of this API is that it integrates with `wait_for_completion=false`
|
The advantage of this API is that it integrates with `wait_for_completion=false`
|
||||||
@ -766,16 +771,16 @@ you to delete that document.
|
|||||||
[[docs-reindex-cancel-task-api]]
|
[[docs-reindex-cancel-task-api]]
|
||||||
=== Works with the Cancel Task API
|
=== Works with the Cancel Task API
|
||||||
|
|
||||||
Any Reindex can be canceled using the <<tasks,Task Cancel API>>:
|
Any Reindex can be canceled using the <<task-cancellation,Task Cancel API>>. For
|
||||||
|
example:
|
||||||
|
|
||||||
[source,js]
|
[source,js]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
POST _tasks/task_id/_cancel
|
POST _tasks/r1A2WoRbTwKZ516z6NEs5A:36619/_cancel
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// CONSOLE
|
// CONSOLE
|
||||||
// TEST[s/task_id/node_id:1/]
|
|
||||||
|
|
||||||
The `task_id` can be found using the Tasks API.
|
The task ID can be found using the <<tasks,Tasks API>>.
|
||||||
|
|
||||||
Cancelation should happen quickly but might take a few seconds. The Tasks
|
Cancelation should happen quickly but might take a few seconds. The Tasks
|
||||||
API will continue to list the task until it wakes to cancel itself.
|
API will continue to list the task until it wakes to cancel itself.
|
||||||
@ -790,12 +795,11 @@ the `_rethrottle` API:
|
|||||||
|
|
||||||
[source,js]
|
[source,js]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
POST _reindex/task_id/_rethrottle?requests_per_second=-1
|
POST _reindex/r1A2WoRbTwKZ516z6NEs5A:36619/_rethrottle?requests_per_second=-1
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// CONSOLE
|
// CONSOLE
|
||||||
// TEST[s/task_id/node_id:1/]
|
|
||||||
|
|
||||||
The `task_id` can be found using the Tasks API above.
|
The task ID can be found using the <<tasks,tasks API>>.
|
||||||
|
|
||||||
Just like when setting it on the Reindex API, `requests_per_second`
|
Just like when setting it on the Reindex API, `requests_per_second`
|
||||||
can be either `-1` to disable throttling or any decimal number
|
can be either `-1` to disable throttling or any decimal number
|
||||||
|
@ -359,6 +359,7 @@ You can fetch the status of all running update-by-query requests with the
|
|||||||
GET _tasks?detailed=true&actions=*byquery
|
GET _tasks?detailed=true&actions=*byquery
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// CONSOLE
|
// CONSOLE
|
||||||
|
// TEST[skip:No tasks to retrieve]
|
||||||
|
|
||||||
The responses looks like:
|
The responses looks like:
|
||||||
|
|
||||||
@ -392,7 +393,7 @@ The responses looks like:
|
|||||||
"retries": {
|
"retries": {
|
||||||
"bulk": 0,
|
"bulk": 0,
|
||||||
"search": 0
|
"search": 0
|
||||||
}
|
},
|
||||||
"throttled_millis": 0
|
"throttled_millis": 0
|
||||||
},
|
},
|
||||||
"description" : ""
|
"description" : ""
|
||||||
@ -402,8 +403,7 @@ The responses looks like:
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// NOTCONSOLE
|
// TESTRESPONSE
|
||||||
// We can't test tasks output
|
|
||||||
|
|
||||||
<1> this object contains the actual status. It is just like the response json
|
<1> this object contains the actual status. It is just like the response json
|
||||||
with the important addition of the `total` field. `total` is the total number
|
with the important addition of the `total` field. `total` is the total number
|
||||||
@ -411,14 +411,14 @@ of operations that the reindex expects to perform. You can estimate the
|
|||||||
progress by adding the `updated`, `created`, and `deleted` fields. The request
|
progress by adding the `updated`, `created`, and `deleted` fields. The request
|
||||||
will finish when their sum is equal to the `total` field.
|
will finish when their sum is equal to the `total` field.
|
||||||
|
|
||||||
With the task id you can look up the task directly:
|
With the task id you can look up the task directly. The following example
|
||||||
|
retrieves information about task `r1A2WoRbTwKZ516z6NEs5A:36619`:
|
||||||
|
|
||||||
[source,js]
|
[source,js]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
GET /_tasks/task_id
|
GET /_tasks/r1A2WoRbTwKZ516z6NEs5A:36619
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// CONSOLE
|
// CONSOLE
|
||||||
// TEST[s/task_id/node_id:1/]
|
|
||||||
// TEST[catch:missing]
|
// TEST[catch:missing]
|
||||||
|
|
||||||
The advantage of this API is that it integrates with `wait_for_completion=false`
|
The advantage of this API is that it integrates with `wait_for_completion=false`
|
||||||
@ -437,12 +437,11 @@ Any Update By Query can be canceled using the <<tasks,Task Cancel API>>:
|
|||||||
|
|
||||||
[source,js]
|
[source,js]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
POST _tasks/task_id/_cancel
|
POST _tasks/r1A2WoRbTwKZ516z6NEs5A:36619/_cancel
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// CONSOLE
|
// CONSOLE
|
||||||
// TEST[s/task_id/node_id:1/]
|
|
||||||
|
|
||||||
The `task_id` can be found using the tasks API above.
|
The task ID can be found using the <<tasks,tasks API>>.
|
||||||
|
|
||||||
Cancellation should happen quickly but might take a few seconds. The task status
|
Cancellation should happen quickly but might take a few seconds. The task status
|
||||||
API above will continue to list the task until it is wakes to cancel itself.
|
API above will continue to list the task until it is wakes to cancel itself.
|
||||||
@ -457,12 +456,11 @@ using the `_rethrottle` API:
|
|||||||
|
|
||||||
[source,js]
|
[source,js]
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
POST _update_by_query/task_id/_rethrottle?requests_per_second=-1
|
POST _update_by_query/r1A2WoRbTwKZ516z6NEs5A:36619/_rethrottle?requests_per_second=-1
|
||||||
--------------------------------------------------
|
--------------------------------------------------
|
||||||
// CONSOLE
|
// CONSOLE
|
||||||
// TEST[s/task_id/node_id:1/]
|
|
||||||
|
|
||||||
The `task_id` can be found using the tasks API above.
|
The task ID can be found using the <<tasks, tasks API>>.
|
||||||
|
|
||||||
Just like when setting it on the `_update_by_query` API `requests_per_second`
|
Just like when setting it on the `_update_by_query` API `requests_per_second`
|
||||||
can be either `-1` to disable throttling or any decimal number
|
can be either `-1` to disable throttling or any decimal number
|
||||||
|
@ -43,6 +43,12 @@ PUT /_cluster/settings
|
|||||||
-------------------------------
|
-------------------------------
|
||||||
// CONSOLE
|
// CONSOLE
|
||||||
|
|
||||||
|
IMPORTANT: User-defined cluster metadata is not intended to store sensitive or
|
||||||
|
confidential information. Any information stored in user-defined cluster
|
||||||
|
metadata will be viewable by anyone with access to the
|
||||||
|
<<cluster-get-settings,Cluster Get Settings>> API, and is recorded in the
|
||||||
|
{es} logs.
|
||||||
|
|
||||||
[[cluster-max-tombstones]]
|
[[cluster-max-tombstones]]
|
||||||
==== Index Tombstones
|
==== Index Tombstones
|
||||||
|
|
||||||
|
@ -22,6 +22,17 @@ it excludes (due to `-`) all indices that start with `l`.
|
|||||||
This notation is very convenient and powerful as it allows both inclusion and exclusion, depending on
|
This notation is very convenient and powerful as it allows both inclusion and exclusion, depending on
|
||||||
the target naming convention.
|
the target naming convention.
|
||||||
|
|
||||||
|
The same kind of patterns can also be used to query multiple indices or tables.
|
||||||
|
|
||||||
|
For example:
|
||||||
|
|
||||||
|
["source","sql",subs="attributes,callouts,macros"]
|
||||||
|
----
|
||||||
|
include-tagged::{sql-specs}/docs.csv-spec[fromTablePatternQuoted]
|
||||||
|
----
|
||||||
|
|
||||||
|
NOTE: There is the restriction that all resolved concrete tables have the exact same mapping.
|
||||||
|
|
||||||
* SQL `LIKE` notation
|
* SQL `LIKE` notation
|
||||||
|
|
||||||
The common `LIKE` statement (including escaping if needed) to match a wildcard pattern, based on one `_`
|
The common `LIKE` statement (including escaping if needed) to match a wildcard pattern, based on one `_`
|
||||||
|
@ -88,7 +88,7 @@ where:
|
|||||||
Represents the name (optionally qualified) of an existing table, either a concrete or base one (actual index) or alias.
|
Represents the name (optionally qualified) of an existing table, either a concrete or base one (actual index) or alias.
|
||||||
|
|
||||||
|
|
||||||
If the table name contains special SQL characters (such as `.`,`-`,etc...) use double quotes to escape them:
|
If the table name contains special SQL characters (such as `.`,`-`,`*`,etc...) use double quotes to escape them:
|
||||||
|
|
||||||
["source","sql",subs="attributes,callouts,macros"]
|
["source","sql",subs="attributes,callouts,macros"]
|
||||||
----
|
----
|
||||||
|
@ -33,7 +33,7 @@ import java.util.Collections;
|
|||||||
import java.util.Comparator;
|
import java.util.Comparator;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
import java.util.Optional;
|
import java.util.OptionalInt;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||||
@ -119,8 +119,8 @@ public class DiscountedCumulativeGain implements EvaluationMetric {
|
|||||||
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Optional<Integer> forcedSearchSize() {
|
public OptionalInt forcedSearchSize() {
|
||||||
return Optional.of(k);
|
return OptionalInt.of(k);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -130,9 +130,13 @@ public class DiscountedCumulativeGain implements EvaluationMetric {
|
|||||||
List<Integer> ratingsInSearchHits = new ArrayList<>(ratedHits.size());
|
List<Integer> ratingsInSearchHits = new ArrayList<>(ratedHits.size());
|
||||||
int unratedResults = 0;
|
int unratedResults = 0;
|
||||||
for (RatedSearchHit hit : ratedHits) {
|
for (RatedSearchHit hit : ratedHits) {
|
||||||
// unknownDocRating might be null, in which case unrated docs will be ignored in the dcg calculation.
|
if (hit.getRating().isPresent()) {
|
||||||
// we still need to add them as a placeholder so the rank of the subsequent ratings is correct
|
ratingsInSearchHits.add(hit.getRating().getAsInt());
|
||||||
ratingsInSearchHits.add(hit.getRating().orElse(unknownDocRating));
|
} else {
|
||||||
|
// unknownDocRating might be null, in which case unrated docs will be ignored in the dcg calculation.
|
||||||
|
// we still need to add them as a placeholder so the rank of the subsequent ratings is correct
|
||||||
|
ratingsInSearchHits.add(unknownDocRating);
|
||||||
|
}
|
||||||
if (hit.getRating().isPresent() == false) {
|
if (hit.getRating().isPresent() == false) {
|
||||||
unratedResults++;
|
unratedResults++;
|
||||||
}
|
}
|
||||||
|
@ -29,7 +29,7 @@ import java.util.ArrayList;
|
|||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Optional;
|
import java.util.OptionalInt;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -64,9 +64,9 @@ public interface EvaluationMetric extends ToXContentObject, NamedWriteable {
|
|||||||
DocumentKey key = new DocumentKey(hit.getIndex(), hit.getId());
|
DocumentKey key = new DocumentKey(hit.getIndex(), hit.getId());
|
||||||
RatedDocument ratedDoc = ratedDocumentMap.get(key);
|
RatedDocument ratedDoc = ratedDocumentMap.get(key);
|
||||||
if (ratedDoc != null) {
|
if (ratedDoc != null) {
|
||||||
ratedSearchHits.add(new RatedSearchHit(hit, Optional.of(ratedDoc.getRating())));
|
ratedSearchHits.add(new RatedSearchHit(hit, OptionalInt.of(ratedDoc.getRating())));
|
||||||
} else {
|
} else {
|
||||||
ratedSearchHits.add(new RatedSearchHit(hit, Optional.empty()));
|
ratedSearchHits.add(new RatedSearchHit(hit, OptionalInt.empty()));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return ratedSearchHits;
|
return ratedSearchHits;
|
||||||
@ -93,7 +93,7 @@ public interface EvaluationMetric extends ToXContentObject, NamedWriteable {
|
|||||||
* this method. The default implementation returns an empty optional.
|
* this method. The default implementation returns an empty optional.
|
||||||
* @return the number of search hits this metrics requests
|
* @return the number of search hits this metrics requests
|
||||||
*/
|
*/
|
||||||
default Optional<Integer> forcedSearchSize() {
|
default OptionalInt forcedSearchSize() {
|
||||||
return Optional.empty();
|
return OptionalInt.empty();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -32,7 +32,7 @@ import java.io.IOException;
|
|||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
import java.util.Optional;
|
import java.util.OptionalInt;
|
||||||
|
|
||||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
|
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
|
||||||
@ -126,8 +126,8 @@ public class ExpectedReciprocalRank implements EvaluationMetric {
|
|||||||
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Optional<Integer> forcedSearchSize() {
|
public OptionalInt forcedSearchSize() {
|
||||||
return Optional.of(k);
|
return OptionalInt.of(k);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -139,9 +139,13 @@ public class ExpectedReciprocalRank implements EvaluationMetric {
|
|||||||
List<Integer> ratingsInSearchHits = new ArrayList<>(ratedHits.size());
|
List<Integer> ratingsInSearchHits = new ArrayList<>(ratedHits.size());
|
||||||
int unratedResults = 0;
|
int unratedResults = 0;
|
||||||
for (RatedSearchHit hit : ratedHits) {
|
for (RatedSearchHit hit : ratedHits) {
|
||||||
// unknownDocRating might be null, in which case unrated will be ignored in the calculation.
|
if (hit.getRating().isPresent()) {
|
||||||
// we still need to add them as a placeholder so the rank of the subsequent ratings is correct
|
ratingsInSearchHits.add(hit.getRating().getAsInt());
|
||||||
ratingsInSearchHits.add(hit.getRating().orElse(unknownDocRating));
|
} else {
|
||||||
|
// unknownDocRating might be null, in which case unrated docs will be ignored in the dcg calculation.
|
||||||
|
// we still need to add them as a placeholder so the rank of the subsequent ratings is correct
|
||||||
|
ratingsInSearchHits.add(unknownDocRating);
|
||||||
|
}
|
||||||
if (hit.getRating().isPresent() == false) {
|
if (hit.getRating().isPresent() == false) {
|
||||||
unratedResults++;
|
unratedResults++;
|
||||||
}
|
}
|
||||||
|
@ -30,7 +30,7 @@ import org.elasticsearch.search.SearchHit;
|
|||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
import java.util.Optional;
|
import java.util.OptionalInt;
|
||||||
|
|
||||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
|
||||||
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
|
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
|
||||||
@ -90,8 +90,8 @@ public class MeanReciprocalRank implements EvaluationMetric {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Optional<Integer> forcedSearchSize() {
|
public OptionalInt forcedSearchSize() {
|
||||||
return Optional.of(k);
|
return OptionalInt.of(k);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -115,9 +115,9 @@ public class MeanReciprocalRank implements EvaluationMetric {
|
|||||||
int firstRelevant = -1;
|
int firstRelevant = -1;
|
||||||
int rank = 1;
|
int rank = 1;
|
||||||
for (RatedSearchHit hit : ratedHits) {
|
for (RatedSearchHit hit : ratedHits) {
|
||||||
Optional<Integer> rating = hit.getRating();
|
OptionalInt rating = hit.getRating();
|
||||||
if (rating.isPresent()) {
|
if (rating.isPresent()) {
|
||||||
if (rating.get() >= this.relevantRatingThreshhold) {
|
if (rating.getAsInt() >= this.relevantRatingThreshhold) {
|
||||||
firstRelevant = rank;
|
firstRelevant = rank;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -30,7 +30,7 @@ import org.elasticsearch.search.SearchHit;
|
|||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
import java.util.Optional;
|
import java.util.OptionalInt;
|
||||||
|
|
||||||
import javax.naming.directory.SearchResult;
|
import javax.naming.directory.SearchResult;
|
||||||
|
|
||||||
@ -144,8 +144,8 @@ public class PrecisionAtK implements EvaluationMetric {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Optional<Integer> forcedSearchSize() {
|
public OptionalInt forcedSearchSize() {
|
||||||
return Optional.of(k);
|
return OptionalInt.of(k);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static PrecisionAtK fromXContent(XContentParser parser) {
|
public static PrecisionAtK fromXContent(XContentParser parser) {
|
||||||
@ -164,9 +164,9 @@ public class PrecisionAtK implements EvaluationMetric {
|
|||||||
int falsePositives = 0;
|
int falsePositives = 0;
|
||||||
List<RatedSearchHit> ratedSearchHits = joinHitsWithRatings(hits, ratedDocs);
|
List<RatedSearchHit> ratedSearchHits = joinHitsWithRatings(hits, ratedDocs);
|
||||||
for (RatedSearchHit hit : ratedSearchHits) {
|
for (RatedSearchHit hit : ratedSearchHits) {
|
||||||
Optional<Integer> rating = hit.getRating();
|
OptionalInt rating = hit.getRating();
|
||||||
if (rating.isPresent()) {
|
if (rating.isPresent()) {
|
||||||
if (rating.get() >= this.relevantRatingThreshhold) {
|
if (rating.getAsInt() >= this.relevantRatingThreshhold) {
|
||||||
truePositives++;
|
truePositives++;
|
||||||
} else {
|
} else {
|
||||||
falsePositives++;
|
falsePositives++;
|
||||||
|
@ -33,7 +33,7 @@ import org.elasticsearch.search.SearchHit;
|
|||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
import java.util.Optional;
|
import java.util.OptionalInt;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Combines a {@link SearchHit} with a document rating.
|
* Combines a {@link SearchHit} with a document rating.
|
||||||
@ -41,16 +41,16 @@ import java.util.Optional;
|
|||||||
public class RatedSearchHit implements Writeable, ToXContentObject {
|
public class RatedSearchHit implements Writeable, ToXContentObject {
|
||||||
|
|
||||||
private final SearchHit searchHit;
|
private final SearchHit searchHit;
|
||||||
private final Optional<Integer> rating;
|
private final OptionalInt rating;
|
||||||
|
|
||||||
public RatedSearchHit(SearchHit searchHit, Optional<Integer> rating) {
|
public RatedSearchHit(SearchHit searchHit, OptionalInt rating) {
|
||||||
this.searchHit = searchHit;
|
this.searchHit = searchHit;
|
||||||
this.rating = rating;
|
this.rating = rating;
|
||||||
}
|
}
|
||||||
|
|
||||||
RatedSearchHit(StreamInput in) throws IOException {
|
RatedSearchHit(StreamInput in) throws IOException {
|
||||||
this(SearchHit.readSearchHit(in),
|
this(SearchHit.readSearchHit(in),
|
||||||
in.readBoolean() == true ? Optional.of(in.readVInt()) : Optional.empty());
|
in.readBoolean() == true ? OptionalInt.of(in.readVInt()) : OptionalInt.empty());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -58,7 +58,7 @@ public class RatedSearchHit implements Writeable, ToXContentObject {
|
|||||||
searchHit.writeTo(out);
|
searchHit.writeTo(out);
|
||||||
out.writeBoolean(rating.isPresent());
|
out.writeBoolean(rating.isPresent());
|
||||||
if (rating.isPresent()) {
|
if (rating.isPresent()) {
|
||||||
out.writeVInt(rating.get());
|
out.writeVInt(rating.getAsInt());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -66,7 +66,7 @@ public class RatedSearchHit implements Writeable, ToXContentObject {
|
|||||||
return this.searchHit;
|
return this.searchHit;
|
||||||
}
|
}
|
||||||
|
|
||||||
public Optional<Integer> getRating() {
|
public OptionalInt getRating() {
|
||||||
return this.rating;
|
return this.rating;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -75,22 +75,21 @@ public class RatedSearchHit implements Writeable, ToXContentObject {
|
|||||||
throws IOException {
|
throws IOException {
|
||||||
builder.startObject();
|
builder.startObject();
|
||||||
builder.field("hit", (ToXContent) searchHit);
|
builder.field("hit", (ToXContent) searchHit);
|
||||||
builder.field("rating", rating.orElse(null));
|
builder.field("rating", rating.isPresent() ? rating.getAsInt() : null);
|
||||||
builder.endObject();
|
builder.endObject();
|
||||||
return builder;
|
return builder;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static final ParseField HIT_FIELD = new ParseField("hit");
|
private static final ParseField HIT_FIELD = new ParseField("hit");
|
||||||
private static final ParseField RATING_FIELD = new ParseField("rating");
|
private static final ParseField RATING_FIELD = new ParseField("rating");
|
||||||
@SuppressWarnings("unchecked")
|
|
||||||
private static final ConstructingObjectParser<RatedSearchHit, Void> PARSER = new ConstructingObjectParser<>("rated_hit", true,
|
private static final ConstructingObjectParser<RatedSearchHit, Void> PARSER = new ConstructingObjectParser<>("rated_hit", true,
|
||||||
a -> new RatedSearchHit((SearchHit) a[0], (Optional<Integer>) a[1]));
|
a -> new RatedSearchHit((SearchHit) a[0], (OptionalInt) a[1]));
|
||||||
|
|
||||||
static {
|
static {
|
||||||
PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> SearchHit.fromXContent(p), HIT_FIELD);
|
PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> SearchHit.fromXContent(p), HIT_FIELD);
|
||||||
PARSER.declareField(ConstructingObjectParser.constructorArg(),
|
PARSER.declareField(ConstructingObjectParser.constructorArg(),
|
||||||
(p) -> p.currentToken() == XContentParser.Token.VALUE_NULL ? Optional.empty() : Optional.of(p.intValue()), RATING_FIELD,
|
(p) -> p.currentToken() == XContentParser.Token.VALUE_NULL ? OptionalInt.empty() : OptionalInt.of(p.intValue()),
|
||||||
ValueType.INT_OR_NULL);
|
RATING_FIELD, ValueType.INT_OR_NULL);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static RatedSearchHit parse(XContentParser parser) throws IOException {
|
public static RatedSearchHit parse(XContentParser parser) throws IOException {
|
||||||
|
@ -119,7 +119,7 @@ public class TransportRankEvalAction extends HandledTransportAction<RankEvalRequ
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (metric.forcedSearchSize().isPresent()) {
|
if (metric.forcedSearchSize().isPresent()) {
|
||||||
evaluationRequest.size(metric.forcedSearchSize().get());
|
evaluationRequest.size(metric.forcedSearchSize().getAsInt());
|
||||||
}
|
}
|
||||||
|
|
||||||
ratedRequestsInSearch.add(ratedRequest);
|
ratedRequestsInSearch.add(ratedRequest);
|
||||||
|
@ -223,7 +223,8 @@ public class PrecisionAtKTests extends ESTestCase {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private static PrecisionAtK copy(PrecisionAtK original) {
|
private static PrecisionAtK copy(PrecisionAtK original) {
|
||||||
return new PrecisionAtK(original.getRelevantRatingThreshold(), original.getIgnoreUnlabeled(), original.forcedSearchSize().get());
|
return new PrecisionAtK(original.getRelevantRatingThreshold(), original.getIgnoreUnlabeled(),
|
||||||
|
original.forcedSearchSize().getAsInt());
|
||||||
}
|
}
|
||||||
|
|
||||||
private static PrecisionAtK mutate(PrecisionAtK original) {
|
private static PrecisionAtK mutate(PrecisionAtK original) {
|
||||||
@ -231,15 +232,15 @@ public class PrecisionAtKTests extends ESTestCase {
|
|||||||
switch (randomIntBetween(0, 2)) {
|
switch (randomIntBetween(0, 2)) {
|
||||||
case 0:
|
case 0:
|
||||||
pAtK = new PrecisionAtK(original.getRelevantRatingThreshold(), !original.getIgnoreUnlabeled(),
|
pAtK = new PrecisionAtK(original.getRelevantRatingThreshold(), !original.getIgnoreUnlabeled(),
|
||||||
original.forcedSearchSize().get());
|
original.forcedSearchSize().getAsInt());
|
||||||
break;
|
break;
|
||||||
case 1:
|
case 1:
|
||||||
pAtK = new PrecisionAtK(randomValueOtherThan(original.getRelevantRatingThreshold(), () -> randomIntBetween(0, 10)),
|
pAtK = new PrecisionAtK(randomValueOtherThan(original.getRelevantRatingThreshold(), () -> randomIntBetween(0, 10)),
|
||||||
original.getIgnoreUnlabeled(), original.forcedSearchSize().get());
|
original.getIgnoreUnlabeled(), original.forcedSearchSize().getAsInt());
|
||||||
break;
|
break;
|
||||||
case 2:
|
case 2:
|
||||||
pAtK = new PrecisionAtK(original.getRelevantRatingThreshold(),
|
pAtK = new PrecisionAtK(original.getRelevantRatingThreshold(),
|
||||||
original.getIgnoreUnlabeled(), original.forcedSearchSize().get() + 1);
|
original.getIgnoreUnlabeled(), original.forcedSearchSize().getAsInt() + 1);
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
throw new IllegalStateException("The test should only allow three parameters mutated");
|
throw new IllegalStateException("The test should only allow three parameters mutated");
|
||||||
|
@ -128,7 +128,7 @@ public class RankEvalRequestIT extends ESIntegTestCase {
|
|||||||
if (id.equals("1") || id.equals("6")) {
|
if (id.equals("1") || id.equals("6")) {
|
||||||
assertFalse(hit.getRating().isPresent());
|
assertFalse(hit.getRating().isPresent());
|
||||||
} else {
|
} else {
|
||||||
assertEquals(RELEVANT_RATING_1, hit.getRating().get().intValue());
|
assertEquals(RELEVANT_RATING_1, hit.getRating().getAsInt());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -139,7 +139,7 @@ public class RankEvalRequestIT extends ESIntegTestCase {
|
|||||||
for (RatedSearchHit hit : hitsAndRatings) {
|
for (RatedSearchHit hit : hitsAndRatings) {
|
||||||
String id = hit.getSearchHit().getId();
|
String id = hit.getSearchHit().getId();
|
||||||
if (id.equals("1")) {
|
if (id.equals("1")) {
|
||||||
assertEquals(RELEVANT_RATING_1, hit.getRating().get().intValue());
|
assertEquals(RELEVANT_RATING_1, hit.getRating().getAsInt());
|
||||||
} else {
|
} else {
|
||||||
assertFalse(hit.getRating().isPresent());
|
assertFalse(hit.getRating().isPresent());
|
||||||
}
|
}
|
||||||
|
@ -50,7 +50,7 @@ import java.util.Collections;
|
|||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Optional;
|
import java.util.OptionalInt;
|
||||||
import java.util.function.Predicate;
|
import java.util.function.Predicate;
|
||||||
|
|
||||||
import static java.util.Collections.singleton;
|
import static java.util.Collections.singleton;
|
||||||
@ -182,6 +182,6 @@ public class RankEvalResponseTests extends ESTestCase {
|
|||||||
SearchHit hit = new SearchHit(docId, docId + "", new Text(""), Collections.emptyMap());
|
SearchHit hit = new SearchHit(docId, docId + "", new Text(""), Collections.emptyMap());
|
||||||
hit.shard(new SearchShardTarget("testnode", new Index(index, "uuid"), 0, null));
|
hit.shard(new SearchShardTarget("testnode", new Index(index, "uuid"), 0, null));
|
||||||
hit.score(1.0f);
|
hit.score(1.0f);
|
||||||
return new RatedSearchHit(hit, rating != null ? Optional.of(rating) : Optional.empty());
|
return new RatedSearchHit(hit, rating != null ? OptionalInt.of(rating) : OptionalInt.empty());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -30,7 +30,7 @@ import org.elasticsearch.test.ESTestCase;
|
|||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.Optional;
|
import java.util.OptionalInt;
|
||||||
|
|
||||||
import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode;
|
import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode;
|
||||||
import static org.elasticsearch.test.XContentTestUtils.insertRandomFields;
|
import static org.elasticsearch.test.XContentTestUtils.insertRandomFields;
|
||||||
@ -38,8 +38,8 @@ import static org.elasticsearch.test.XContentTestUtils.insertRandomFields;
|
|||||||
public class RatedSearchHitTests extends ESTestCase {
|
public class RatedSearchHitTests extends ESTestCase {
|
||||||
|
|
||||||
public static RatedSearchHit randomRatedSearchHit() {
|
public static RatedSearchHit randomRatedSearchHit() {
|
||||||
Optional<Integer> rating = randomBoolean() ? Optional.empty()
|
OptionalInt rating = randomBoolean() ? OptionalInt.empty()
|
||||||
: Optional.of(randomIntBetween(0, 5));
|
: OptionalInt.of(randomIntBetween(0, 5));
|
||||||
SearchHit searchHit = new SearchHit(randomIntBetween(0, 10), randomAlphaOfLength(10),
|
SearchHit searchHit = new SearchHit(randomIntBetween(0, 10), randomAlphaOfLength(10),
|
||||||
new Text(randomAlphaOfLength(10)), Collections.emptyMap());
|
new Text(randomAlphaOfLength(10)), Collections.emptyMap());
|
||||||
RatedSearchHit ratedSearchHit = new RatedSearchHit(searchHit, rating);
|
RatedSearchHit ratedSearchHit = new RatedSearchHit(searchHit, rating);
|
||||||
@ -47,11 +47,11 @@ public class RatedSearchHitTests extends ESTestCase {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private static RatedSearchHit mutateTestItem(RatedSearchHit original) {
|
private static RatedSearchHit mutateTestItem(RatedSearchHit original) {
|
||||||
Optional<Integer> rating = original.getRating();
|
OptionalInt rating = original.getRating();
|
||||||
SearchHit hit = original.getSearchHit();
|
SearchHit hit = original.getSearchHit();
|
||||||
switch (randomIntBetween(0, 1)) {
|
switch (randomIntBetween(0, 1)) {
|
||||||
case 0:
|
case 0:
|
||||||
rating = rating.isPresent() ? Optional.of(rating.get() + 1) : Optional.of(randomInt(5));
|
rating = rating.isPresent() ? OptionalInt.of(rating.getAsInt() + 1) : OptionalInt.of(randomInt(5));
|
||||||
break;
|
break;
|
||||||
case 1:
|
case 1:
|
||||||
hit = new SearchHit(hit.docId(), hit.getId() + randomAlphaOfLength(10),
|
hit = new SearchHit(hit.docId(), hit.getId() + randomAlphaOfLength(10),
|
||||||
|
@ -31,8 +31,10 @@ import org.elasticsearch.common.settings.Settings;
|
|||||||
import org.elasticsearch.plugins.Plugin;
|
import org.elasticsearch.plugins.Plugin;
|
||||||
import org.elasticsearch.plugins.ScriptPlugin;
|
import org.elasticsearch.plugins.ScriptPlugin;
|
||||||
import org.elasticsearch.script.ScoreScript;
|
import org.elasticsearch.script.ScoreScript;
|
||||||
|
import org.elasticsearch.script.ScoreScript.LeafFactory;
|
||||||
import org.elasticsearch.script.ScriptContext;
|
import org.elasticsearch.script.ScriptContext;
|
||||||
import org.elasticsearch.script.ScriptEngine;
|
import org.elasticsearch.script.ScriptEngine;
|
||||||
|
import org.elasticsearch.search.lookup.SearchLookup;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* An example script plugin that adds a {@link ScriptEngine} implementing expert scoring.
|
* An example script plugin that adds a {@link ScriptEngine} implementing expert scoring.
|
||||||
@ -53,81 +55,106 @@ public class ExpertScriptPlugin extends Plugin implements ScriptPlugin {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public <T> T compile(String scriptName, String scriptSource, ScriptContext<T> context, Map<String, String> params) {
|
public <T> T compile(String scriptName, String scriptSource,
|
||||||
|
ScriptContext<T> context, Map<String, String> params) {
|
||||||
if (context.equals(ScoreScript.CONTEXT) == false) {
|
if (context.equals(ScoreScript.CONTEXT) == false) {
|
||||||
throw new IllegalArgumentException(getType() + " scripts cannot be used for context [" + context.name + "]");
|
throw new IllegalArgumentException(getType()
|
||||||
|
+ " scripts cannot be used for context ["
|
||||||
|
+ context.name + "]");
|
||||||
}
|
}
|
||||||
// we use the script "source" as the script identifier
|
// we use the script "source" as the script identifier
|
||||||
if ("pure_df".equals(scriptSource)) {
|
if ("pure_df".equals(scriptSource)) {
|
||||||
ScoreScript.Factory factory = (p, lookup) -> new ScoreScript.LeafFactory() {
|
ScoreScript.Factory factory = PureDfLeafFactory::new;
|
||||||
final String field;
|
|
||||||
final String term;
|
|
||||||
{
|
|
||||||
if (p.containsKey("field") == false) {
|
|
||||||
throw new IllegalArgumentException("Missing parameter [field]");
|
|
||||||
}
|
|
||||||
if (p.containsKey("term") == false) {
|
|
||||||
throw new IllegalArgumentException("Missing parameter [term]");
|
|
||||||
}
|
|
||||||
field = p.get("field").toString();
|
|
||||||
term = p.get("term").toString();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ScoreScript newInstance(LeafReaderContext context) throws IOException {
|
|
||||||
PostingsEnum postings = context.reader().postings(new Term(field, term));
|
|
||||||
if (postings == null) {
|
|
||||||
// the field and/or term don't exist in this segment, so always return 0
|
|
||||||
return new ScoreScript(p, lookup, context) {
|
|
||||||
@Override
|
|
||||||
public double execute() {
|
|
||||||
return 0.0d;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
return new ScoreScript(p, lookup, context) {
|
|
||||||
int currentDocid = -1;
|
|
||||||
@Override
|
|
||||||
public void setDocument(int docid) {
|
|
||||||
// advance has undefined behavior calling with a docid <= its current docid
|
|
||||||
if (postings.docID() < docid) {
|
|
||||||
try {
|
|
||||||
postings.advance(docid);
|
|
||||||
} catch (IOException e) {
|
|
||||||
throw new UncheckedIOException(e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
currentDocid = docid;
|
|
||||||
}
|
|
||||||
@Override
|
|
||||||
public double execute() {
|
|
||||||
if (postings.docID() != currentDocid) {
|
|
||||||
// advance moved past the current doc, so this doc has no occurrences of the term
|
|
||||||
return 0.0d;
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
return postings.freq();
|
|
||||||
} catch (IOException e) {
|
|
||||||
throw new UncheckedIOException(e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean needs_score() {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
return context.factoryClazz.cast(factory);
|
return context.factoryClazz.cast(factory);
|
||||||
}
|
}
|
||||||
throw new IllegalArgumentException("Unknown script name " + scriptSource);
|
throw new IllegalArgumentException("Unknown script name "
|
||||||
|
+ scriptSource);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void close() {
|
public void close() {
|
||||||
// optionally close resources
|
// optionally close resources
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static class PureDfLeafFactory implements LeafFactory {
|
||||||
|
private final Map<String, Object> params;
|
||||||
|
private final SearchLookup lookup;
|
||||||
|
private final String field;
|
||||||
|
private final String term;
|
||||||
|
|
||||||
|
private PureDfLeafFactory(
|
||||||
|
Map<String, Object> params, SearchLookup lookup) {
|
||||||
|
if (params.containsKey("field") == false) {
|
||||||
|
throw new IllegalArgumentException(
|
||||||
|
"Missing parameter [field]");
|
||||||
|
}
|
||||||
|
if (params.containsKey("term") == false) {
|
||||||
|
throw new IllegalArgumentException(
|
||||||
|
"Missing parameter [term]");
|
||||||
|
}
|
||||||
|
this.params = params;
|
||||||
|
this.lookup = lookup;
|
||||||
|
field = params.get("field").toString();
|
||||||
|
term = params.get("term").toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean needs_score() {
|
||||||
|
return false; // Return true if the script needs the score
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ScoreScript newInstance(LeafReaderContext context)
|
||||||
|
throws IOException {
|
||||||
|
PostingsEnum postings = context.reader().postings(
|
||||||
|
new Term(field, term));
|
||||||
|
if (postings == null) {
|
||||||
|
/*
|
||||||
|
* the field and/or term don't exist in this segment,
|
||||||
|
* so always return 0
|
||||||
|
*/
|
||||||
|
return new ScoreScript(params, lookup, context) {
|
||||||
|
@Override
|
||||||
|
public double execute() {
|
||||||
|
return 0.0d;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return new ScoreScript(params, lookup, context) {
|
||||||
|
int currentDocid = -1;
|
||||||
|
@Override
|
||||||
|
public void setDocument(int docid) {
|
||||||
|
/*
|
||||||
|
* advance has undefined behavior calling with
|
||||||
|
* a docid <= its current docid
|
||||||
|
*/
|
||||||
|
if (postings.docID() < docid) {
|
||||||
|
try {
|
||||||
|
postings.advance(docid);
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new UncheckedIOException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
currentDocid = docid;
|
||||||
|
}
|
||||||
|
@Override
|
||||||
|
public double execute() {
|
||||||
|
if (postings.docID() != currentDocid) {
|
||||||
|
/*
|
||||||
|
* advance moved past the current doc, so this doc
|
||||||
|
* has no occurrences of the term
|
||||||
|
*/
|
||||||
|
return 0.0d;
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
return postings.freq();
|
||||||
|
} catch (IOException e) {
|
||||||
|
throw new UncheckedIOException(e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
// end::expert_engine
|
// end::expert_engine
|
||||||
}
|
}
|
||||||
|
@ -148,12 +148,12 @@ public class RecoveryIT extends AbstractRollingTestCase {
|
|||||||
break;
|
break;
|
||||||
case UPGRADED:
|
case UPGRADED:
|
||||||
updateIndexSettings(index, Settings.builder().put(INDEX_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), (String)null));
|
updateIndexSettings(index, Settings.builder().put(INDEX_ROUTING_ALLOCATION_ENABLE_SETTING.getKey(), (String)null));
|
||||||
asyncIndexDocs(index, 60, 50).get();
|
asyncIndexDocs(index, 60, 45).get();
|
||||||
ensureGreen(index);
|
ensureGreen(index);
|
||||||
client().performRequest(new Request("POST", index + "/_refresh"));
|
client().performRequest(new Request("POST", index + "/_refresh"));
|
||||||
assertCount(index, "_only_nodes:" + nodes.get(0), 110);
|
assertCount(index, "_only_nodes:" + nodes.get(0), 105);
|
||||||
assertCount(index, "_only_nodes:" + nodes.get(1), 110);
|
assertCount(index, "_only_nodes:" + nodes.get(1), 105);
|
||||||
assertCount(index, "_only_nodes:" + nodes.get(2), 110);
|
assertCount(index, "_only_nodes:" + nodes.get(2), 105);
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
throw new IllegalStateException("unknown type " + CLUSTER_TYPE);
|
throw new IllegalStateException("unknown type " + CLUSTER_TYPE);
|
||||||
@ -165,7 +165,7 @@ public class RecoveryIT extends AbstractRollingTestCase {
|
|||||||
request.addParameter("preference", preference);
|
request.addParameter("preference", preference);
|
||||||
final Response response = client().performRequest(request);
|
final Response response = client().performRequest(request);
|
||||||
final int actualCount = Integer.parseInt(ObjectPath.createFromResponse(response).evaluate("count").toString());
|
final int actualCount = Integer.parseInt(ObjectPath.createFromResponse(response).evaluate("count").toString());
|
||||||
assertThat(actualCount, equalTo(expectedCount));
|
assertThat("preference [" + preference + "]", actualCount, equalTo(expectedCount));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@ -225,7 +225,7 @@ public class RecoveryIT extends AbstractRollingTestCase {
|
|||||||
.put(IndexMetaData.INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), 2)
|
.put(IndexMetaData.INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), 2)
|
||||||
.put("index.routing.allocation.include._id", (String)null)
|
.put("index.routing.allocation.include._id", (String)null)
|
||||||
);
|
);
|
||||||
asyncIndexDocs(index, 60, 50).get();
|
asyncIndexDocs(index, 60, 45).get();
|
||||||
ensureGreen(index);
|
ensureGreen(index);
|
||||||
client().performRequest(new Request("POST", index + "/_refresh"));
|
client().performRequest(new Request("POST", index + "/_refresh"));
|
||||||
Response response = client().performRequest(new Request("GET", "_nodes"));
|
Response response = client().performRequest(new Request("GET", "_nodes"));
|
||||||
@ -233,9 +233,9 @@ public class RecoveryIT extends AbstractRollingTestCase {
|
|||||||
final Map<String, Object> nodeMap = objectPath.evaluate("nodes");
|
final Map<String, Object> nodeMap = objectPath.evaluate("nodes");
|
||||||
List<String> nodes = new ArrayList<>(nodeMap.keySet());
|
List<String> nodes = new ArrayList<>(nodeMap.keySet());
|
||||||
|
|
||||||
assertCount(index, "_only_nodes:" + nodes.get(0), 110);
|
assertCount(index, "_only_nodes:" + nodes.get(0), 105);
|
||||||
assertCount(index, "_only_nodes:" + nodes.get(1), 110);
|
assertCount(index, "_only_nodes:" + nodes.get(1), 105);
|
||||||
assertCount(index, "_only_nodes:" + nodes.get(2), 110);
|
assertCount(index, "_only_nodes:" + nodes.get(2), 105);
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
throw new IllegalStateException("unknown type " + CLUSTER_TYPE);
|
throw new IllegalStateException("unknown type " + CLUSTER_TYPE);
|
||||||
|
@ -0,0 +1,299 @@
|
|||||||
|
|
||||||
|
---
|
||||||
|
"Search by suggestion and by keyword sub-field should work":
|
||||||
|
|
||||||
|
- skip:
|
||||||
|
version: " - 6.99.99"
|
||||||
|
reason: "Search by suggestion with multi-fields was introduced 7.0.0"
|
||||||
|
|
||||||
|
- do:
|
||||||
|
indices.create:
|
||||||
|
index: completion_with_sub_keyword
|
||||||
|
body:
|
||||||
|
mappings:
|
||||||
|
test:
|
||||||
|
"properties":
|
||||||
|
"suggest_1":
|
||||||
|
"type" : "completion"
|
||||||
|
"fields":
|
||||||
|
"text_raw":
|
||||||
|
"type" : "keyword"
|
||||||
|
|
||||||
|
- do:
|
||||||
|
index:
|
||||||
|
index: completion_with_sub_keyword
|
||||||
|
type: test
|
||||||
|
id: 1
|
||||||
|
body:
|
||||||
|
suggest_1: "bar"
|
||||||
|
|
||||||
|
- do:
|
||||||
|
index:
|
||||||
|
index: completion_with_sub_keyword
|
||||||
|
type: test
|
||||||
|
id: 2
|
||||||
|
body:
|
||||||
|
suggest_1: "baz"
|
||||||
|
|
||||||
|
- do:
|
||||||
|
indices.refresh: {}
|
||||||
|
|
||||||
|
- do:
|
||||||
|
search:
|
||||||
|
index: completion_with_sub_keyword
|
||||||
|
body:
|
||||||
|
suggest:
|
||||||
|
result:
|
||||||
|
text: "b"
|
||||||
|
completion:
|
||||||
|
field: suggest_1
|
||||||
|
|
||||||
|
- length: { suggest.result: 1 }
|
||||||
|
- length: { suggest.result.0.options: 2 }
|
||||||
|
|
||||||
|
|
||||||
|
- do:
|
||||||
|
search:
|
||||||
|
index: completion_with_sub_keyword
|
||||||
|
body:
|
||||||
|
query: { term: { suggest_1.text_raw: "bar" }}
|
||||||
|
|
||||||
|
- match: { hits.total: 1 }
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
---
|
||||||
|
"Search by suggestion on sub field should work":
|
||||||
|
|
||||||
|
- skip:
|
||||||
|
version: " - 6.99.99"
|
||||||
|
reason: "Search by suggestion with multi-fields was introduced 7.0.0"
|
||||||
|
|
||||||
|
- do:
|
||||||
|
indices.create:
|
||||||
|
index: completion_with_sub_completion
|
||||||
|
body:
|
||||||
|
mappings:
|
||||||
|
test:
|
||||||
|
"properties":
|
||||||
|
"suggest_1":
|
||||||
|
"type": "completion"
|
||||||
|
"fields":
|
||||||
|
"suggest_2":
|
||||||
|
"type": "completion"
|
||||||
|
|
||||||
|
- do:
|
||||||
|
index:
|
||||||
|
index: completion_with_sub_completion
|
||||||
|
type: test
|
||||||
|
id: 1
|
||||||
|
body:
|
||||||
|
suggest_1: "bar"
|
||||||
|
|
||||||
|
- do:
|
||||||
|
index:
|
||||||
|
index: completion_with_sub_completion
|
||||||
|
type: test
|
||||||
|
id: 2
|
||||||
|
body:
|
||||||
|
suggest_1: "baz"
|
||||||
|
|
||||||
|
- do:
|
||||||
|
indices.refresh: {}
|
||||||
|
|
||||||
|
- do:
|
||||||
|
search:
|
||||||
|
index: completion_with_sub_completion
|
||||||
|
body:
|
||||||
|
suggest:
|
||||||
|
result:
|
||||||
|
text: "b"
|
||||||
|
completion:
|
||||||
|
field: suggest_1.suggest_2
|
||||||
|
|
||||||
|
- length: { suggest.result: 1 }
|
||||||
|
- length: { suggest.result.0.options: 2 }
|
||||||
|
|
||||||
|
---
|
||||||
|
"Search by suggestion on sub field with context should work":
|
||||||
|
|
||||||
|
- skip:
|
||||||
|
version: " - 6.99.99"
|
||||||
|
reason: "Search by suggestion with multi-fields was introduced 7.0.0"
|
||||||
|
|
||||||
|
- do:
|
||||||
|
indices.create:
|
||||||
|
index: completion_with_context
|
||||||
|
body:
|
||||||
|
mappings:
|
||||||
|
test:
|
||||||
|
"properties":
|
||||||
|
"suggest_1":
|
||||||
|
"type": "completion"
|
||||||
|
"contexts":
|
||||||
|
-
|
||||||
|
"name": "color"
|
||||||
|
"type": "category"
|
||||||
|
"fields":
|
||||||
|
"suggest_2":
|
||||||
|
"type": "completion"
|
||||||
|
"contexts":
|
||||||
|
-
|
||||||
|
"name": "color"
|
||||||
|
"type": "category"
|
||||||
|
|
||||||
|
|
||||||
|
- do:
|
||||||
|
index:
|
||||||
|
index: completion_with_context
|
||||||
|
type: test
|
||||||
|
id: 1
|
||||||
|
body:
|
||||||
|
suggest_1:
|
||||||
|
input: "foo red"
|
||||||
|
contexts:
|
||||||
|
color: "red"
|
||||||
|
|
||||||
|
- do:
|
||||||
|
index:
|
||||||
|
index: completion_with_context
|
||||||
|
type: test
|
||||||
|
id: 2
|
||||||
|
body:
|
||||||
|
suggest_1:
|
||||||
|
input: "foo blue"
|
||||||
|
contexts:
|
||||||
|
color: "blue"
|
||||||
|
|
||||||
|
- do:
|
||||||
|
indices.refresh: {}
|
||||||
|
|
||||||
|
- do:
|
||||||
|
search:
|
||||||
|
index: completion_with_context
|
||||||
|
body:
|
||||||
|
suggest:
|
||||||
|
result:
|
||||||
|
prefix: "foo"
|
||||||
|
completion:
|
||||||
|
field: suggest_1.suggest_2
|
||||||
|
contexts:
|
||||||
|
color: "red"
|
||||||
|
|
||||||
|
- length: { suggest.result: 1 }
|
||||||
|
- length: { suggest.result.0.options: 1 }
|
||||||
|
- match: { suggest.result.0.options.0.text: "foo red" }
|
||||||
|
|
||||||
|
|
||||||
|
---
|
||||||
|
"Search by suggestion on sub field with weight should work":
|
||||||
|
|
||||||
|
- skip:
|
||||||
|
version: " - 6.99.99"
|
||||||
|
reason: "Search by suggestion with multi-fields was introduced 7.0.0"
|
||||||
|
|
||||||
|
- do:
|
||||||
|
indices.create:
|
||||||
|
index: completion_with_weight
|
||||||
|
body:
|
||||||
|
mappings:
|
||||||
|
test:
|
||||||
|
"properties":
|
||||||
|
"suggest_1":
|
||||||
|
"type": "completion"
|
||||||
|
"fields":
|
||||||
|
"suggest_2":
|
||||||
|
"type": "completion"
|
||||||
|
|
||||||
|
- do:
|
||||||
|
index:
|
||||||
|
index: completion_with_weight
|
||||||
|
type: test
|
||||||
|
id: 1
|
||||||
|
body:
|
||||||
|
suggest_1:
|
||||||
|
input: "bar"
|
||||||
|
weight: 2
|
||||||
|
|
||||||
|
- do:
|
||||||
|
index:
|
||||||
|
index: completion_with_weight
|
||||||
|
type: test
|
||||||
|
id: 2
|
||||||
|
body:
|
||||||
|
suggest_1:
|
||||||
|
input: "baz"
|
||||||
|
weight: 3
|
||||||
|
|
||||||
|
- do:
|
||||||
|
indices.refresh: {}
|
||||||
|
|
||||||
|
- do:
|
||||||
|
search:
|
||||||
|
index: completion_with_weight
|
||||||
|
body:
|
||||||
|
suggest:
|
||||||
|
result:
|
||||||
|
text: "b"
|
||||||
|
completion:
|
||||||
|
field: suggest_1.suggest_2
|
||||||
|
|
||||||
|
- length: { suggest.result: 1 }
|
||||||
|
- length: { suggest.result.0.options: 2 }
|
||||||
|
- match: { suggest.result.0.options.0.text: "baz" }
|
||||||
|
- match: { suggest.result.0.options.1.text: "bar" }
|
||||||
|
|
||||||
|
---
|
||||||
|
"Search by suggestion on geofield-hash on sub field should work":
|
||||||
|
|
||||||
|
- skip:
|
||||||
|
version: " - 6.99.99"
|
||||||
|
reason: "Search by suggestion with multi-fields was introduced 7.0.0"
|
||||||
|
|
||||||
|
- do:
|
||||||
|
indices.create:
|
||||||
|
index: geofield_with_completion
|
||||||
|
body:
|
||||||
|
mappings:
|
||||||
|
test:
|
||||||
|
"properties":
|
||||||
|
"geofield":
|
||||||
|
"type": "geo_point"
|
||||||
|
"fields":
|
||||||
|
"suggest_1":
|
||||||
|
"type": "completion"
|
||||||
|
|
||||||
|
- do:
|
||||||
|
index:
|
||||||
|
index: geofield_with_completion
|
||||||
|
type: test
|
||||||
|
id: 1
|
||||||
|
body:
|
||||||
|
geofield: "hgjhrwysvqw7"
|
||||||
|
#41.12,-72.34,12
|
||||||
|
|
||||||
|
- do:
|
||||||
|
index:
|
||||||
|
index: geofield_with_completion
|
||||||
|
type: test
|
||||||
|
id: 1
|
||||||
|
body:
|
||||||
|
geofield: "hgm4psywmkn7"
|
||||||
|
#41.12,-71.34,12
|
||||||
|
|
||||||
|
- do:
|
||||||
|
indices.refresh: {}
|
||||||
|
|
||||||
|
- do:
|
||||||
|
search:
|
||||||
|
index: geofield_with_completion
|
||||||
|
body:
|
||||||
|
suggest:
|
||||||
|
result:
|
||||||
|
prefix: "hgm"
|
||||||
|
completion:
|
||||||
|
field: geofield.suggest_1
|
||||||
|
|
||||||
|
|
||||||
|
- length: { suggest.result: 1 }
|
||||||
|
- length: { suggest.result.0.options: 1 }
|
@ -103,6 +103,8 @@ public class Version implements Comparable<Version>, ToXContentFragment {
|
|||||||
public static final Version V_6_4_1 = new Version(V_6_4_1_ID, org.apache.lucene.util.Version.LUCENE_7_4_0);
|
public static final Version V_6_4_1 = new Version(V_6_4_1_ID, org.apache.lucene.util.Version.LUCENE_7_4_0);
|
||||||
public static final int V_6_4_2_ID = 6040299;
|
public static final int V_6_4_2_ID = 6040299;
|
||||||
public static final Version V_6_4_2 = new Version(V_6_4_2_ID, org.apache.lucene.util.Version.LUCENE_7_4_0);
|
public static final Version V_6_4_2 = new Version(V_6_4_2_ID, org.apache.lucene.util.Version.LUCENE_7_4_0);
|
||||||
|
public static final int V_6_4_3_ID = 6040399;
|
||||||
|
public static final Version V_6_4_3 = new Version(V_6_4_3_ID, org.apache.lucene.util.Version.LUCENE_7_4_0);
|
||||||
public static final int V_6_5_0_ID = 6050099;
|
public static final int V_6_5_0_ID = 6050099;
|
||||||
public static final Version V_6_5_0 = new Version(V_6_5_0_ID, org.apache.lucene.util.Version.LUCENE_7_5_0);
|
public static final Version V_6_5_0 = new Version(V_6_5_0_ID, org.apache.lucene.util.Version.LUCENE_7_5_0);
|
||||||
public static final int V_7_0_0_alpha1_ID = 7000001;
|
public static final int V_7_0_0_alpha1_ID = 7000001;
|
||||||
@ -125,6 +127,8 @@ public class Version implements Comparable<Version>, ToXContentFragment {
|
|||||||
return V_7_0_0_alpha1;
|
return V_7_0_0_alpha1;
|
||||||
case V_6_5_0_ID:
|
case V_6_5_0_ID:
|
||||||
return V_6_5_0;
|
return V_6_5_0;
|
||||||
|
case V_6_4_3_ID:
|
||||||
|
return V_6_4_3;
|
||||||
case V_6_4_2_ID:
|
case V_6_4_2_ID:
|
||||||
return V_6_4_2;
|
return V_6_4_2;
|
||||||
case V_6_4_1_ID:
|
case V_6_4_1_ID:
|
||||||
|
@ -38,7 +38,7 @@ import org.elasticsearch.cluster.service.ClusterService;
|
|||||||
import org.elasticsearch.common.io.stream.Writeable;
|
import org.elasticsearch.common.io.stream.Writeable;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.unit.TimeValue;
|
import org.elasticsearch.common.unit.TimeValue;
|
||||||
import org.elasticsearch.discovery.Discovery;
|
import org.elasticsearch.discovery.Discovery.FailedToCommitClusterStateException;
|
||||||
import org.elasticsearch.discovery.MasterNotDiscoveredException;
|
import org.elasticsearch.discovery.MasterNotDiscoveredException;
|
||||||
import org.elasticsearch.node.NodeClosedException;
|
import org.elasticsearch.node.NodeClosedException;
|
||||||
import org.elasticsearch.tasks.Task;
|
import org.elasticsearch.tasks.Task;
|
||||||
@ -53,13 +53,15 @@ import java.util.function.Supplier;
|
|||||||
/**
|
/**
|
||||||
* A base class for operations that needs to be performed on the master node.
|
* A base class for operations that needs to be performed on the master node.
|
||||||
*/
|
*/
|
||||||
public abstract class TransportMasterNodeAction<Request extends MasterNodeRequest<Request>, Response extends ActionResponse> extends HandledTransportAction<Request, Response> {
|
public abstract class TransportMasterNodeAction<Request extends MasterNodeRequest<Request>, Response extends ActionResponse>
|
||||||
|
extends HandledTransportAction<Request, Response> {
|
||||||
|
|
||||||
protected final ThreadPool threadPool;
|
protected final ThreadPool threadPool;
|
||||||
protected final TransportService transportService;
|
protected final TransportService transportService;
|
||||||
protected final ClusterService clusterService;
|
protected final ClusterService clusterService;
|
||||||
protected final IndexNameExpressionResolver indexNameExpressionResolver;
|
protected final IndexNameExpressionResolver indexNameExpressionResolver;
|
||||||
|
|
||||||
final String executor;
|
private final String executor;
|
||||||
|
|
||||||
protected TransportMasterNodeAction(Settings settings, String actionName, TransportService transportService,
|
protected TransportMasterNodeAction(Settings settings, String actionName, TransportService transportService,
|
||||||
ClusterService clusterService, ThreadPool threadPool, ActionFilters actionFilters,
|
ClusterService clusterService, ThreadPool threadPool, ActionFilters actionFilters,
|
||||||
@ -75,7 +77,8 @@ public abstract class TransportMasterNodeAction<Request extends MasterNodeReques
|
|||||||
|
|
||||||
protected TransportMasterNodeAction(Settings settings, String actionName, boolean canTripCircuitBreaker,
|
protected TransportMasterNodeAction(Settings settings, String actionName, boolean canTripCircuitBreaker,
|
||||||
TransportService transportService, ClusterService clusterService, ThreadPool threadPool,
|
TransportService transportService, ClusterService clusterService, ThreadPool threadPool,
|
||||||
ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, Supplier<Request> request) {
|
ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver,
|
||||||
|
Supplier<Request> request) {
|
||||||
super(settings, actionName, canTripCircuitBreaker, transportService, actionFilters, request);
|
super(settings, actionName, canTripCircuitBreaker, transportService, actionFilters, request);
|
||||||
this.transportService = transportService;
|
this.transportService = transportService;
|
||||||
this.clusterService = clusterService;
|
this.clusterService = clusterService;
|
||||||
@ -138,7 +141,8 @@ public abstract class TransportMasterNodeAction<Request extends MasterNodeReques
|
|||||||
|
|
||||||
public void start() {
|
public void start() {
|
||||||
ClusterState state = clusterService.state();
|
ClusterState state = clusterService.state();
|
||||||
this.observer = new ClusterStateObserver(state, clusterService, request.masterNodeTimeout(), logger, threadPool.getThreadContext());
|
this.observer
|
||||||
|
= new ClusterStateObserver(state, clusterService, request.masterNodeTimeout(), logger, threadPool.getThreadContext());
|
||||||
doStart(state);
|
doStart(state);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -174,16 +178,16 @@ public abstract class TransportMasterNodeAction<Request extends MasterNodeReques
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onFailure(Exception t) {
|
public void onFailure(Exception t) {
|
||||||
if (t instanceof Discovery.FailedToCommitClusterStateException
|
if (t instanceof FailedToCommitClusterStateException || t instanceof NotMasterException) {
|
||||||
|| (t instanceof NotMasterException)) {
|
logger.debug(() -> new ParameterizedMessage("master could not publish cluster state or " +
|
||||||
logger.debug(() -> new ParameterizedMessage("master could not publish cluster state or stepped down before publishing action [{}], scheduling a retry", actionName), t);
|
"stepped down before publishing action [{}], scheduling a retry", actionName), t);
|
||||||
retry(t, masterChangePredicate);
|
retry(t, masterChangePredicate);
|
||||||
} else {
|
} else {
|
||||||
listener.onFailure(t);
|
listener.onFailure(t);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
threadPool.executor(executor).execute(new ActionRunnable(delegate) {
|
threadPool.executor(executor).execute(new ActionRunnable<Response>(delegate) {
|
||||||
@Override
|
@Override
|
||||||
protected void doRun() throws Exception {
|
protected void doRun() throws Exception {
|
||||||
masterOperation(task, request, clusterState, delegate);
|
masterOperation(task, request, clusterState, delegate);
|
||||||
@ -204,7 +208,8 @@ public abstract class TransportMasterNodeAction<Request extends MasterNodeReques
|
|||||||
Throwable cause = exp.unwrapCause();
|
Throwable cause = exp.unwrapCause();
|
||||||
if (cause instanceof ConnectTransportException) {
|
if (cause instanceof ConnectTransportException) {
|
||||||
// we want to retry here a bit to see if a new master is elected
|
// we want to retry here a bit to see if a new master is elected
|
||||||
logger.debug("connection exception while trying to forward request with action name [{}] to master node [{}], scheduling a retry. Error: [{}]",
|
logger.debug("connection exception while trying to forward request with action name [{}] to " +
|
||||||
|
"master node [{}], scheduling a retry. Error: [{}]",
|
||||||
actionName, nodes.getMasterNode(), exp.getDetailedMessage());
|
actionName, nodes.getMasterNode(), exp.getDetailedMessage());
|
||||||
retry(cause, masterChangePredicate);
|
retry(cause, masterChangePredicate);
|
||||||
} else {
|
} else {
|
||||||
@ -234,7 +239,8 @@ public abstract class TransportMasterNodeAction<Request extends MasterNodeReques
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onTimeout(TimeValue timeout) {
|
public void onTimeout(TimeValue timeout) {
|
||||||
logger.debug(() -> new ParameterizedMessage("timed out while retrying [{}] after failure (timeout [{}])", actionName, timeout), failure);
|
logger.debug(() -> new ParameterizedMessage("timed out while retrying [{}] after failure (timeout [{}])",
|
||||||
|
actionName, timeout), failure);
|
||||||
listener.onFailure(new MasterNotDiscoveredException(failure));
|
listener.onFailure(new MasterNotDiscoveredException(failure));
|
||||||
}
|
}
|
||||||
}, statePredicate
|
}, statePredicate
|
||||||
|
@ -58,7 +58,7 @@ public interface ClusterStateTaskExecutor<T> {
|
|||||||
* This allows groupd task description but the submitting source.
|
* This allows groupd task description but the submitting source.
|
||||||
*/
|
*/
|
||||||
default String describeTasks(List<T> tasks) {
|
default String describeTasks(List<T> tasks) {
|
||||||
return String.join(", ", tasks.stream().map(t -> (CharSequence)t.toString()).filter(t -> t.length() == 0)::iterator);
|
return String.join(", ", tasks.stream().map(t -> (CharSequence)t.toString()).filter(t -> t.length() > 0)::iterator);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -27,7 +27,7 @@ import java.util.ArrayList;
|
|||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Optional;
|
import java.util.OptionalInt;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This class acts as a functional wrapper around the {@code index.auto_expand_replicas} setting.
|
* This class acts as a functional wrapper around the {@code index.auto_expand_replicas} setting.
|
||||||
@ -93,7 +93,7 @@ public final class AutoExpandReplicas {
|
|||||||
return Math.min(maxReplicas, numDataNodes-1);
|
return Math.min(maxReplicas, numDataNodes-1);
|
||||||
}
|
}
|
||||||
|
|
||||||
Optional<Integer> getDesiredNumberOfReplicas(int numDataNodes) {
|
private OptionalInt getDesiredNumberOfReplicas(int numDataNodes) {
|
||||||
if (enabled) {
|
if (enabled) {
|
||||||
final int min = getMinReplicas();
|
final int min = getMinReplicas();
|
||||||
final int max = getMaxReplicas(numDataNodes);
|
final int max = getMaxReplicas(numDataNodes);
|
||||||
@ -105,10 +105,10 @@ public final class AutoExpandReplicas {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (numberOfReplicas >= min && numberOfReplicas <= max) {
|
if (numberOfReplicas >= min && numberOfReplicas <= max) {
|
||||||
return Optional.of(numberOfReplicas);
|
return OptionalInt.of(numberOfReplicas);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return Optional.empty();
|
return OptionalInt.empty();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -24,6 +24,7 @@ import java.time.format.DateTimeParseException;
|
|||||||
import java.time.temporal.TemporalAccessor;
|
import java.time.temporal.TemporalAccessor;
|
||||||
import java.time.temporal.TemporalField;
|
import java.time.temporal.TemporalField;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
import java.util.Locale;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
@ -46,6 +47,14 @@ public interface DateFormatter {
|
|||||||
*/
|
*/
|
||||||
DateFormatter withZone(ZoneId zoneId);
|
DateFormatter withZone(ZoneId zoneId);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a copy of this formatter that is configured to parse dates in the specified locale
|
||||||
|
*
|
||||||
|
* @param locale The local to use for the new formatter
|
||||||
|
* @return A copy of the date formatter this has been called on
|
||||||
|
*/
|
||||||
|
DateFormatter withLocale(Locale locale);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Print the supplied java time accessor in a string based representation according to this formatter
|
* Print the supplied java time accessor in a string based representation according to this formatter
|
||||||
*
|
*
|
||||||
@ -62,6 +71,20 @@ public interface DateFormatter {
|
|||||||
*/
|
*/
|
||||||
String pattern();
|
String pattern();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the configured locale of the date formatter
|
||||||
|
*
|
||||||
|
* @return The locale of this formatter
|
||||||
|
*/
|
||||||
|
Locale getLocale();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the configured time zone of the date formatter
|
||||||
|
*
|
||||||
|
* @return The time zone of this formatter
|
||||||
|
*/
|
||||||
|
ZoneId getZone();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Configure a formatter using default fields for a TemporalAccessor that should be used in case
|
* Configure a formatter using default fields for a TemporalAccessor that should be used in case
|
||||||
* the supplied date is not having all of those fields
|
* the supplied date is not having all of those fields
|
||||||
@ -115,6 +138,11 @@ public interface DateFormatter {
|
|||||||
return new MergedDateFormatter(Arrays.stream(formatters).map(f -> f.withZone(zoneId)).toArray(DateFormatter[]::new));
|
return new MergedDateFormatter(Arrays.stream(formatters).map(f -> f.withZone(zoneId)).toArray(DateFormatter[]::new));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public DateFormatter withLocale(Locale locale) {
|
||||||
|
return new MergedDateFormatter(Arrays.stream(formatters).map(f -> f.withLocale(locale)).toArray(DateFormatter[]::new));
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String format(TemporalAccessor accessor) {
|
public String format(TemporalAccessor accessor) {
|
||||||
return formatters[0].format(accessor);
|
return formatters[0].format(accessor);
|
||||||
@ -125,6 +153,16 @@ public interface DateFormatter {
|
|||||||
return format;
|
return format;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Locale getLocale() {
|
||||||
|
return formatters[0].getLocale();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ZoneId getZone() {
|
||||||
|
return formatters[0].getZone();
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DateFormatter parseDefaulting(Map<TemporalField, Long> fields) {
|
public DateFormatter parseDefaulting(Map<TemporalField, Long> fields) {
|
||||||
return new MergedDateFormatter(Arrays.stream(formatters).map(f -> f.parseDefaulting(fields)).toArray(DateFormatter[]::new));
|
return new MergedDateFormatter(Arrays.stream(formatters).map(f -> f.parseDefaulting(fields)).toArray(DateFormatter[]::new));
|
||||||
|
@ -1269,7 +1269,7 @@ public class DateFormatters {
|
|||||||
return forPattern(input, Locale.ROOT);
|
return forPattern(input, Locale.ROOT);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static DateFormatter forPattern(String input, Locale locale) {
|
private static DateFormatter forPattern(String input, Locale locale) {
|
||||||
if (Strings.hasLength(input)) {
|
if (Strings.hasLength(input)) {
|
||||||
input = input.trim();
|
input = input.trim();
|
||||||
}
|
}
|
||||||
|
@ -25,6 +25,7 @@ import java.time.ZoneOffset;
|
|||||||
import java.time.format.DateTimeParseException;
|
import java.time.format.DateTimeParseException;
|
||||||
import java.time.temporal.TemporalAccessor;
|
import java.time.temporal.TemporalAccessor;
|
||||||
import java.time.temporal.TemporalField;
|
import java.time.temporal.TemporalField;
|
||||||
|
import java.util.Locale;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -40,7 +41,8 @@ class EpochMillisDateFormatter implements DateFormatter {
|
|||||||
|
|
||||||
public static DateFormatter INSTANCE = new EpochMillisDateFormatter();
|
public static DateFormatter INSTANCE = new EpochMillisDateFormatter();
|
||||||
|
|
||||||
private EpochMillisDateFormatter() {}
|
private EpochMillisDateFormatter() {
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public TemporalAccessor parse(String input) {
|
public TemporalAccessor parse(String input) {
|
||||||
@ -53,6 +55,17 @@ class EpochMillisDateFormatter implements DateFormatter {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public DateFormatter withZone(ZoneId zoneId) {
|
public DateFormatter withZone(ZoneId zoneId) {
|
||||||
|
if (ZoneOffset.UTC.equals(zoneId) == false) {
|
||||||
|
throw new IllegalArgumentException(pattern() + " date formatter can only be in zone offset UTC");
|
||||||
|
}
|
||||||
|
return INSTANCE;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public DateFormatter withLocale(Locale locale) {
|
||||||
|
if (Locale.ROOT.equals(locale) == false) {
|
||||||
|
throw new IllegalArgumentException(pattern() + " date formatter can only be in locale ROOT");
|
||||||
|
}
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -70,4 +83,14 @@ class EpochMillisDateFormatter implements DateFormatter {
|
|||||||
public DateFormatter parseDefaulting(Map<TemporalField, Long> fields) {
|
public DateFormatter parseDefaulting(Map<TemporalField, Long> fields) {
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Locale getLocale() {
|
||||||
|
return Locale.ROOT;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ZoneId getZone() {
|
||||||
|
return ZoneOffset.UTC;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -26,6 +26,7 @@ import java.time.ZoneOffset;
|
|||||||
import java.time.format.DateTimeParseException;
|
import java.time.format.DateTimeParseException;
|
||||||
import java.time.temporal.TemporalAccessor;
|
import java.time.temporal.TemporalAccessor;
|
||||||
import java.time.temporal.TemporalField;
|
import java.time.temporal.TemporalField;
|
||||||
|
import java.util.Locale;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.regex.Pattern;
|
import java.util.regex.Pattern;
|
||||||
|
|
||||||
@ -59,11 +60,6 @@ public class EpochSecondsDateFormatter implements DateFormatter {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public DateFormatter withZone(ZoneId zoneId) {
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String format(TemporalAccessor accessor) {
|
public String format(TemporalAccessor accessor) {
|
||||||
Instant instant = Instant.from(accessor);
|
Instant instant = Instant.from(accessor);
|
||||||
@ -75,7 +71,33 @@ public class EpochSecondsDateFormatter implements DateFormatter {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String pattern() {
|
public String pattern() {
|
||||||
return "epoch_seconds";
|
return "epoch_second";
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Locale getLocale() {
|
||||||
|
return Locale.ROOT;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ZoneId getZone() {
|
||||||
|
return ZoneOffset.UTC;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public DateFormatter withZone(ZoneId zoneId) {
|
||||||
|
if (zoneId.equals(ZoneOffset.UTC) == false) {
|
||||||
|
throw new IllegalArgumentException(pattern() + " date formatter can only be in zone offset UTC");
|
||||||
|
}
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public DateFormatter withLocale(Locale locale) {
|
||||||
|
if (Locale.ROOT.equals(locale) == false) {
|
||||||
|
throw new IllegalArgumentException(pattern() + " date formatter can only be in locale ROOT");
|
||||||
|
}
|
||||||
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -28,6 +28,7 @@ import java.time.temporal.TemporalField;
|
|||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Locale;
|
import java.util.Locale;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
import java.util.Objects;
|
||||||
|
|
||||||
class JavaDateFormatter implements DateFormatter {
|
class JavaDateFormatter implements DateFormatter {
|
||||||
|
|
||||||
@ -36,10 +37,17 @@ class JavaDateFormatter implements DateFormatter {
|
|||||||
private final DateTimeFormatter[] parsers;
|
private final DateTimeFormatter[] parsers;
|
||||||
|
|
||||||
JavaDateFormatter(String format, DateTimeFormatter printer, DateTimeFormatter... parsers) {
|
JavaDateFormatter(String format, DateTimeFormatter printer, DateTimeFormatter... parsers) {
|
||||||
|
if (printer == null) {
|
||||||
|
throw new IllegalArgumentException("printer may not be null");
|
||||||
|
}
|
||||||
long distinctZones = Arrays.stream(parsers).map(DateTimeFormatter::getZone).distinct().count();
|
long distinctZones = Arrays.stream(parsers).map(DateTimeFormatter::getZone).distinct().count();
|
||||||
if (distinctZones > 1) {
|
if (distinctZones > 1) {
|
||||||
throw new IllegalArgumentException("formatters must have the same time zone");
|
throw new IllegalArgumentException("formatters must have the same time zone");
|
||||||
}
|
}
|
||||||
|
long distinctLocales = Arrays.stream(parsers).map(DateTimeFormatter::getLocale).distinct().count();
|
||||||
|
if (distinctLocales > 1) {
|
||||||
|
throw new IllegalArgumentException("formatters must have the same locale");
|
||||||
|
}
|
||||||
if (parsers.length == 0) {
|
if (parsers.length == 0) {
|
||||||
this.parsers = new DateTimeFormatter[]{printer};
|
this.parsers = new DateTimeFormatter[]{printer};
|
||||||
} else {
|
} else {
|
||||||
@ -83,6 +91,21 @@ class JavaDateFormatter implements DateFormatter {
|
|||||||
return new JavaDateFormatter(format, printer.withZone(zoneId), parsersWithZone);
|
return new JavaDateFormatter(format, printer.withZone(zoneId), parsersWithZone);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public DateFormatter withLocale(Locale locale) {
|
||||||
|
// shortcurt to not create new objects unnecessarily
|
||||||
|
if (locale.equals(parsers[0].getLocale())) {
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
final DateTimeFormatter[] parsersWithZone = new DateTimeFormatter[parsers.length];
|
||||||
|
for (int i = 0; i < parsers.length; i++) {
|
||||||
|
parsersWithZone[i] = parsers[i].withLocale(locale);
|
||||||
|
}
|
||||||
|
|
||||||
|
return new JavaDateFormatter(format, printer.withLocale(locale), parsersWithZone);
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String format(TemporalAccessor accessor) {
|
public String format(TemporalAccessor accessor) {
|
||||||
return printer.format(accessor);
|
return printer.format(accessor);
|
||||||
@ -109,4 +132,36 @@ class JavaDateFormatter implements DateFormatter {
|
|||||||
return new JavaDateFormatter(format, parseDefaultingBuilder.toFormatter(Locale.ROOT), parsersWithDefaulting);
|
return new JavaDateFormatter(format, parseDefaultingBuilder.toFormatter(Locale.ROOT), parsersWithDefaulting);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Locale getLocale() {
|
||||||
|
return this.printer.getLocale();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ZoneId getZone() {
|
||||||
|
return this.printer.getZone();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode() {
|
||||||
|
return Objects.hash(getLocale(), printer.getZone(), format);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object obj) {
|
||||||
|
if (obj.getClass().equals(this.getClass()) == false) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
JavaDateFormatter other = (JavaDateFormatter) obj;
|
||||||
|
|
||||||
|
return Objects.equals(format, other.format) &&
|
||||||
|
Objects.equals(getLocale(), other.getLocale()) &&
|
||||||
|
Objects.equals(this.printer.getZone(), other.printer.getZone());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return String.format(Locale.ROOT, "format[%s] locale[%s]", format, getLocale());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -436,8 +436,9 @@ public class CompletionFieldMapper extends FieldMapper implements ArrayValueMapp
|
|||||||
Token token = parser.currentToken();
|
Token token = parser.currentToken();
|
||||||
Map<String, CompletionInputMetaData> inputMap = new HashMap<>(1);
|
Map<String, CompletionInputMetaData> inputMap = new HashMap<>(1);
|
||||||
|
|
||||||
// ignore null values
|
if (context.externalValueSet()) {
|
||||||
if (token == Token.VALUE_NULL) {
|
inputMap = getInputMapFromExternalValue(context);
|
||||||
|
} else if (token == Token.VALUE_NULL) { // ignore null values
|
||||||
return;
|
return;
|
||||||
} else if (token == Token.START_ARRAY) {
|
} else if (token == Token.START_ARRAY) {
|
||||||
while ((token = parser.nextToken()) != Token.END_ARRAY) {
|
while ((token = parser.nextToken()) != Token.END_ARRAY) {
|
||||||
@ -471,12 +472,33 @@ public class CompletionFieldMapper extends FieldMapper implements ArrayValueMapp
|
|||||||
context.doc().add(new SuggestField(fieldType().name(), input, metaData.weight));
|
context.doc().add(new SuggestField(fieldType().name(), input, metaData.weight));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
List<IndexableField> fields = new ArrayList<>(1);
|
List<IndexableField> fields = new ArrayList<>(1);
|
||||||
createFieldNamesField(context, fields);
|
createFieldNamesField(context, fields);
|
||||||
for (IndexableField field : fields) {
|
for (IndexableField field : fields) {
|
||||||
context.doc().add(field);
|
context.doc().add(field);
|
||||||
}
|
}
|
||||||
multiFields.parse(this, context);
|
|
||||||
|
for (CompletionInputMetaData metaData: inputMap.values()) {
|
||||||
|
ParseContext externalValueContext = context.createExternalValueContext(metaData);
|
||||||
|
multiFields.parse(this, externalValueContext);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private Map<String, CompletionInputMetaData> getInputMapFromExternalValue(ParseContext context) {
|
||||||
|
Map<String, CompletionInputMetaData> inputMap;
|
||||||
|
if (isExternalValueOfClass(context, CompletionInputMetaData.class)) {
|
||||||
|
CompletionInputMetaData inputAndMeta = (CompletionInputMetaData) context.externalValue();
|
||||||
|
inputMap = Collections.singletonMap(inputAndMeta.input, inputAndMeta);
|
||||||
|
} else {
|
||||||
|
String fieldName = context.externalValue().toString();
|
||||||
|
inputMap = Collections.singletonMap(fieldName, new CompletionInputMetaData(fieldName, Collections.emptyMap(), 1));
|
||||||
|
}
|
||||||
|
return inputMap;
|
||||||
|
}
|
||||||
|
|
||||||
|
private boolean isExternalValueOfClass(ParseContext context, Class<?> clazz) {
|
||||||
|
return context.externalValue().getClass().equals(clazz);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -487,7 +509,7 @@ public class CompletionFieldMapper extends FieldMapper implements ArrayValueMapp
|
|||||||
private void parse(ParseContext parseContext, Token token, XContentParser parser, Map<String, CompletionInputMetaData> inputMap) throws IOException {
|
private void parse(ParseContext parseContext, Token token, XContentParser parser, Map<String, CompletionInputMetaData> inputMap) throws IOException {
|
||||||
String currentFieldName = null;
|
String currentFieldName = null;
|
||||||
if (token == Token.VALUE_STRING) {
|
if (token == Token.VALUE_STRING) {
|
||||||
inputMap.put(parser.text(), new CompletionInputMetaData(Collections.<String, Set<CharSequence>>emptyMap(), 1));
|
inputMap.put(parser.text(), new CompletionInputMetaData(parser.text(), Collections.emptyMap(), 1));
|
||||||
} else if (token == Token.START_OBJECT) {
|
} else if (token == Token.START_OBJECT) {
|
||||||
Set<String> inputs = new HashSet<>();
|
Set<String> inputs = new HashSet<>();
|
||||||
int weight = 1;
|
int weight = 1;
|
||||||
@ -561,7 +583,7 @@ public class CompletionFieldMapper extends FieldMapper implements ArrayValueMapp
|
|||||||
}
|
}
|
||||||
for (String input : inputs) {
|
for (String input : inputs) {
|
||||||
if (inputMap.containsKey(input) == false || inputMap.get(input).weight < weight) {
|
if (inputMap.containsKey(input) == false || inputMap.get(input).weight < weight) {
|
||||||
inputMap.put(input, new CompletionInputMetaData(contextsMap, weight));
|
inputMap.put(input, new CompletionInputMetaData(input, contextsMap, weight));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
@ -570,13 +592,20 @@ public class CompletionFieldMapper extends FieldMapper implements ArrayValueMapp
|
|||||||
}
|
}
|
||||||
|
|
||||||
static class CompletionInputMetaData {
|
static class CompletionInputMetaData {
|
||||||
|
public final String input;
|
||||||
public final Map<String, Set<CharSequence>> contexts;
|
public final Map<String, Set<CharSequence>> contexts;
|
||||||
public final int weight;
|
public final int weight;
|
||||||
|
|
||||||
CompletionInputMetaData(Map<String, Set<CharSequence>> contexts, int weight) {
|
CompletionInputMetaData(String input, Map<String, Set<CharSequence>> contexts, int weight) {
|
||||||
|
this.input = input;
|
||||||
this.contexts = contexts;
|
this.contexts = contexts;
|
||||||
this.weight = weight;
|
this.weight = weight;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return input;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -67,7 +67,9 @@ public class MultiMatchQuery extends MatchQuery {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public Query parse(MultiMatchQueryBuilder.Type type, Map<String, Float> fieldNames, Object value, String minimumShouldMatch) throws IOException {
|
public Query parse(MultiMatchQueryBuilder.Type type, Map<String, Float> fieldNames, Object value, String minimumShouldMatch) throws IOException {
|
||||||
Query result;
|
final Query result;
|
||||||
|
// reset query builder
|
||||||
|
queryBuilder = null;
|
||||||
if (fieldNames.size() == 1) {
|
if (fieldNames.size() == 1) {
|
||||||
Map.Entry<String, Float> fieldBoost = fieldNames.entrySet().iterator().next();
|
Map.Entry<String, Float> fieldBoost = fieldNames.entrySet().iterator().next();
|
||||||
Float boostValue = fieldBoost.getValue();
|
Float boostValue = fieldBoost.getValue();
|
||||||
|
@ -19,6 +19,7 @@
|
|||||||
|
|
||||||
package org.elasticsearch.index.search;
|
package org.elasticsearch.index.search;
|
||||||
|
|
||||||
|
import org.apache.lucene.index.PrefixCodedTerms;
|
||||||
import org.apache.lucene.search.BooleanClause;
|
import org.apache.lucene.search.BooleanClause;
|
||||||
import org.apache.lucene.search.BooleanQuery;
|
import org.apache.lucene.search.BooleanQuery;
|
||||||
import org.apache.lucene.search.BoostQuery;
|
import org.apache.lucene.search.BoostQuery;
|
||||||
@ -28,6 +29,7 @@ import org.apache.lucene.search.MatchAllDocsQuery;
|
|||||||
import org.apache.lucene.search.MatchNoDocsQuery;
|
import org.apache.lucene.search.MatchNoDocsQuery;
|
||||||
import org.apache.lucene.search.PointRangeQuery;
|
import org.apache.lucene.search.PointRangeQuery;
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
|
import org.apache.lucene.search.TermInSetQuery;
|
||||||
import org.apache.lucene.search.TermQuery;
|
import org.apache.lucene.search.TermQuery;
|
||||||
import org.apache.lucene.search.BooleanClause.Occur;
|
import org.apache.lucene.search.BooleanClause.Occur;
|
||||||
import org.elasticsearch.index.mapper.MapperService;
|
import org.elasticsearch.index.mapper.MapperService;
|
||||||
@ -54,9 +56,18 @@ public final class NestedHelper {
|
|||||||
} else if (query instanceof MatchNoDocsQuery) {
|
} else if (query instanceof MatchNoDocsQuery) {
|
||||||
return false;
|
return false;
|
||||||
} else if (query instanceof TermQuery) {
|
} else if (query instanceof TermQuery) {
|
||||||
// We only handle term queries and range queries, which should already
|
// We only handle term(s) queries and range queries, which should already
|
||||||
// cover a high majority of use-cases
|
// cover a high majority of use-cases
|
||||||
return mightMatchNestedDocs(((TermQuery) query).getTerm().field());
|
return mightMatchNestedDocs(((TermQuery) query).getTerm().field());
|
||||||
|
} else if (query instanceof TermInSetQuery) {
|
||||||
|
PrefixCodedTerms terms = ((TermInSetQuery) query).getTermData();
|
||||||
|
if (terms.size() > 0) {
|
||||||
|
PrefixCodedTerms.TermIterator it = terms.iterator();
|
||||||
|
it.next();
|
||||||
|
return mightMatchNestedDocs(it.field());
|
||||||
|
} else {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
} else if (query instanceof PointRangeQuery) {
|
} else if (query instanceof PointRangeQuery) {
|
||||||
return mightMatchNestedDocs(((PointRangeQuery) query).getField());
|
return mightMatchNestedDocs(((PointRangeQuery) query).getField());
|
||||||
} else if (query instanceof IndexOrDocValuesQuery) {
|
} else if (query instanceof IndexOrDocValuesQuery) {
|
||||||
@ -118,6 +129,15 @@ public final class NestedHelper {
|
|||||||
return false;
|
return false;
|
||||||
} else if (query instanceof TermQuery) {
|
} else if (query instanceof TermQuery) {
|
||||||
return mightMatchNonNestedDocs(((TermQuery) query).getTerm().field(), nestedPath);
|
return mightMatchNonNestedDocs(((TermQuery) query).getTerm().field(), nestedPath);
|
||||||
|
} else if (query instanceof TermInSetQuery) {
|
||||||
|
PrefixCodedTerms terms = ((TermInSetQuery) query).getTermData();
|
||||||
|
if (terms.size() > 0) {
|
||||||
|
PrefixCodedTerms.TermIterator it = terms.iterator();
|
||||||
|
it.next();
|
||||||
|
return mightMatchNonNestedDocs(it.field(), nestedPath);
|
||||||
|
} else {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
} else if (query instanceof PointRangeQuery) {
|
} else if (query instanceof PointRangeQuery) {
|
||||||
return mightMatchNonNestedDocs(((PointRangeQuery) query).getField(), nestedPath);
|
return mightMatchNonNestedDocs(((PointRangeQuery) query).getField(), nestedPath);
|
||||||
} else if (query instanceof IndexOrDocValuesQuery) {
|
} else if (query instanceof IndexOrDocValuesQuery) {
|
||||||
|
@ -0,0 +1,62 @@
|
|||||||
|
/*
|
||||||
|
* Licensed to Elasticsearch under one or more contributor
|
||||||
|
* license agreements. See the NOTICE file distributed with
|
||||||
|
* this work for additional information regarding copyright
|
||||||
|
* ownership. Elasticsearch licenses this file to you under
|
||||||
|
* the Apache License, Version 2.0 (the "License"); you may
|
||||||
|
* not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
package org.elasticsearch.cluster;
|
||||||
|
|
||||||
|
import org.elasticsearch.test.ESTestCase;
|
||||||
|
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.Collections;
|
||||||
|
|
||||||
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
|
|
||||||
|
public class ClusterStateTaskExecutorTests extends ESTestCase {
|
||||||
|
|
||||||
|
private class TestTask {
|
||||||
|
private final String description;
|
||||||
|
|
||||||
|
TestTask(String description) {
|
||||||
|
this.description = description;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return description == null ? "" : "Task{" + description + "}";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testDescribeTasks() {
|
||||||
|
final ClusterStateTaskExecutor<TestTask> executor = (currentState, tasks) -> {
|
||||||
|
throw new AssertionError("should not be called");
|
||||||
|
};
|
||||||
|
|
||||||
|
assertThat("describes an empty list", executor.describeTasks(Collections.emptyList()), equalTo(""));
|
||||||
|
assertThat("describes a singleton list", executor.describeTasks(Collections.singletonList(new TestTask("a task"))),
|
||||||
|
equalTo("Task{a task}"));
|
||||||
|
assertThat("describes a list of two tasks",
|
||||||
|
executor.describeTasks(Arrays.asList(new TestTask("a task"), new TestTask("another task"))),
|
||||||
|
equalTo("Task{a task}, Task{another task}"));
|
||||||
|
|
||||||
|
assertThat("skips the only item if it has no description", executor.describeTasks(Collections.singletonList(new TestTask(null))),
|
||||||
|
equalTo(""));
|
||||||
|
assertThat("skips an item if it has no description",
|
||||||
|
executor.describeTasks(Arrays.asList(
|
||||||
|
new TestTask("a task"), new TestTask(null), new TestTask("another task"), new TestTask(null))),
|
||||||
|
equalTo("Task{a task}, Task{another task}"));
|
||||||
|
}
|
||||||
|
}
|
@ -23,38 +23,23 @@ import org.elasticsearch.test.ESTestCase;
|
|||||||
|
|
||||||
import java.time.Instant;
|
import java.time.Instant;
|
||||||
import java.time.ZoneId;
|
import java.time.ZoneId;
|
||||||
import java.time.ZonedDateTime;
|
|
||||||
import java.time.format.DateTimeParseException;
|
import java.time.format.DateTimeParseException;
|
||||||
import java.time.temporal.TemporalAccessor;
|
import java.time.temporal.TemporalAccessor;
|
||||||
|
import java.util.Locale;
|
||||||
|
|
||||||
import static org.hamcrest.Matchers.containsString;
|
import static org.hamcrest.Matchers.containsString;
|
||||||
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
import static org.hamcrest.Matchers.is;
|
import static org.hamcrest.Matchers.is;
|
||||||
|
import static org.hamcrest.Matchers.not;
|
||||||
|
import static org.hamcrest.Matchers.nullValue;
|
||||||
|
import static org.hamcrest.Matchers.sameInstance;
|
||||||
|
|
||||||
public class DateFormattersTests extends ESTestCase {
|
public class DateFormattersTests extends ESTestCase {
|
||||||
|
|
||||||
public void testEpochMilliParser() {
|
public void testEpochMilliParser() {
|
||||||
DateFormatter formatter = DateFormatters.forPattern("epoch_millis");
|
DateFormatter formatter = DateFormatters.forPattern("epoch_millis");
|
||||||
|
|
||||||
DateTimeParseException e = expectThrows(DateTimeParseException.class, () -> formatter.parse("invalid"));
|
DateTimeParseException e = expectThrows(DateTimeParseException.class, () -> formatter.parse("invalid"));
|
||||||
assertThat(e.getMessage(), containsString("invalid number"));
|
assertThat(e.getMessage(), containsString("invalid number"));
|
||||||
|
|
||||||
// different zone, should still yield the same output, as epoch is time zone independent
|
|
||||||
ZoneId zoneId = randomZone();
|
|
||||||
DateFormatter zonedFormatter = formatter.withZone(zoneId);
|
|
||||||
|
|
||||||
// test with negative and non negative values
|
|
||||||
assertThatSameDateTime(formatter, zonedFormatter, randomNonNegativeLong() * -1);
|
|
||||||
assertThatSameDateTime(formatter, zonedFormatter, randomNonNegativeLong());
|
|
||||||
assertThatSameDateTime(formatter, zonedFormatter, 0);
|
|
||||||
assertThatSameDateTime(formatter, zonedFormatter, -1);
|
|
||||||
assertThatSameDateTime(formatter, zonedFormatter, 1);
|
|
||||||
|
|
||||||
// format() output should be equal as well
|
|
||||||
assertSameFormat(formatter, randomNonNegativeLong() * -1);
|
|
||||||
assertSameFormat(formatter, randomNonNegativeLong());
|
|
||||||
assertSameFormat(formatter, 0);
|
|
||||||
assertSameFormat(formatter, -1);
|
|
||||||
assertSameFormat(formatter, 1);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// this is not in the duelling tests, because the epoch second parser in joda time drops the milliseconds after the comma
|
// this is not in the duelling tests, because the epoch second parser in joda time drops the milliseconds after the comma
|
||||||
@ -83,14 +68,6 @@ public class DateFormattersTests extends ESTestCase {
|
|||||||
assertThat(e.getMessage(), is("invalid number [abc]"));
|
assertThat(e.getMessage(), is("invalid number [abc]"));
|
||||||
e = expectThrows(DateTimeParseException.class, () -> formatter.parse("1234.abc"));
|
e = expectThrows(DateTimeParseException.class, () -> formatter.parse("1234.abc"));
|
||||||
assertThat(e.getMessage(), is("invalid number [1234.abc]"));
|
assertThat(e.getMessage(), is("invalid number [1234.abc]"));
|
||||||
|
|
||||||
// different zone, should still yield the same output, as epoch is time zone independent
|
|
||||||
ZoneId zoneId = randomZone();
|
|
||||||
DateFormatter zonedFormatter = formatter.withZone(zoneId);
|
|
||||||
|
|
||||||
assertThatSameDateTime(formatter, zonedFormatter, randomLongBetween(-100_000_000, 100_000_000));
|
|
||||||
assertSameFormat(formatter, randomLongBetween(-100_000_000, 100_000_000));
|
|
||||||
assertThat(formatter.format(Instant.ofEpochSecond(1234, 567_000_000)), is("1234.567"));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testEpochMilliParsersWithDifferentFormatters() {
|
public void testEpochMilliParsersWithDifferentFormatters() {
|
||||||
@ -100,16 +77,54 @@ public class DateFormattersTests extends ESTestCase {
|
|||||||
assertThat(formatter.pattern(), is("strict_date_optional_time||epoch_millis"));
|
assertThat(formatter.pattern(), is("strict_date_optional_time||epoch_millis"));
|
||||||
}
|
}
|
||||||
|
|
||||||
private void assertThatSameDateTime(DateFormatter formatter, DateFormatter zonedFormatter, long millis) {
|
public void testLocales() {
|
||||||
String millisAsString = String.valueOf(millis);
|
assertThat(DateFormatters.forPattern("strict_date_optional_time").getLocale(), is(Locale.ROOT));
|
||||||
ZonedDateTime formatterZonedDateTime = DateFormatters.toZonedDateTime(formatter.parse(millisAsString));
|
Locale locale = randomLocale(random());
|
||||||
ZonedDateTime zonedFormatterZonedDateTime = DateFormatters.toZonedDateTime(zonedFormatter.parse(millisAsString));
|
assertThat(DateFormatters.forPattern("strict_date_optional_time").withLocale(locale).getLocale(), is(locale));
|
||||||
assertThat(formatterZonedDateTime.toInstant().toEpochMilli(), is(zonedFormatterZonedDateTime.toInstant().toEpochMilli()));
|
IllegalArgumentException e =
|
||||||
|
expectThrows(IllegalArgumentException.class, () -> DateFormatters.forPattern("epoch_millis").withLocale(locale));
|
||||||
|
assertThat(e.getMessage(), is("epoch_millis date formatter can only be in locale ROOT"));
|
||||||
|
e = expectThrows(IllegalArgumentException.class, () -> DateFormatters.forPattern("epoch_second").withLocale(locale));
|
||||||
|
assertThat(e.getMessage(), is("epoch_second date formatter can only be in locale ROOT"));
|
||||||
}
|
}
|
||||||
|
|
||||||
private void assertSameFormat(DateFormatter formatter, long millis) {
|
public void testTimeZones() {
|
||||||
String millisAsString = String.valueOf(millis);
|
// zone is null by default due to different behaviours between java8 and above
|
||||||
TemporalAccessor accessor = formatter.parse(millisAsString);
|
assertThat(DateFormatters.forPattern("strict_date_optional_time").getZone(), is(nullValue()));
|
||||||
assertThat(millisAsString, is(formatter.format(accessor)));
|
ZoneId zoneId = randomZone();
|
||||||
|
assertThat(DateFormatters.forPattern("strict_date_optional_time").withZone(zoneId).getZone(), is(zoneId));
|
||||||
|
IllegalArgumentException e =
|
||||||
|
expectThrows(IllegalArgumentException.class, () -> DateFormatters.forPattern("epoch_millis").withZone(zoneId));
|
||||||
|
assertThat(e.getMessage(), is("epoch_millis date formatter can only be in zone offset UTC"));
|
||||||
|
e = expectThrows(IllegalArgumentException.class, () -> DateFormatters.forPattern("epoch_second").withZone(zoneId));
|
||||||
|
assertThat(e.getMessage(), is("epoch_second date formatter can only be in zone offset UTC"));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testEqualsAndHashcode() {
|
||||||
|
assertThat(DateFormatters.forPattern("strict_date_optional_time"),
|
||||||
|
sameInstance(DateFormatters.forPattern("strict_date_optional_time")));
|
||||||
|
assertThat(DateFormatters.forPattern("YYYY"), equalTo(DateFormatters.forPattern("YYYY")));
|
||||||
|
assertThat(DateFormatters.forPattern("YYYY").hashCode(),
|
||||||
|
is(DateFormatters.forPattern("YYYY").hashCode()));
|
||||||
|
|
||||||
|
// different timezone, thus not equals
|
||||||
|
assertThat(DateFormatters.forPattern("YYYY").withZone(ZoneId.of("CET")), not(equalTo(DateFormatters.forPattern("YYYY"))));
|
||||||
|
|
||||||
|
// different locale, thus not equals
|
||||||
|
assertThat(DateFormatters.forPattern("YYYY").withLocale(randomLocale(random())),
|
||||||
|
not(equalTo(DateFormatters.forPattern("YYYY"))));
|
||||||
|
|
||||||
|
// different pattern, thus not equals
|
||||||
|
assertThat(DateFormatters.forPattern("YYYY"), not(equalTo(DateFormatters.forPattern("YY"))));
|
||||||
|
|
||||||
|
DateFormatter epochSecondFormatter = DateFormatters.forPattern("epoch_second");
|
||||||
|
assertThat(epochSecondFormatter, sameInstance(DateFormatters.forPattern("epoch_second")));
|
||||||
|
assertThat(epochSecondFormatter, equalTo(DateFormatters.forPattern("epoch_second")));
|
||||||
|
assertThat(epochSecondFormatter.hashCode(), is(DateFormatters.forPattern("epoch_second").hashCode()));
|
||||||
|
|
||||||
|
DateFormatter epochMillisFormatter = DateFormatters.forPattern("epoch_millis");
|
||||||
|
assertThat(epochMillisFormatter.hashCode(), is(DateFormatters.forPattern("epoch_millis").hashCode()));
|
||||||
|
assertThat(epochMillisFormatter, sameInstance(DateFormatters.forPattern("epoch_millis")));
|
||||||
|
assertThat(epochMillisFormatter, equalTo(DateFormatters.forPattern("epoch_millis")));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -18,9 +18,11 @@
|
|||||||
*/
|
*/
|
||||||
package org.elasticsearch.index.mapper;
|
package org.elasticsearch.index.mapper;
|
||||||
|
|
||||||
|
import org.apache.lucene.document.SortedSetDocValuesField;
|
||||||
import org.apache.lucene.index.IndexableField;
|
import org.apache.lucene.index.IndexableField;
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
import org.apache.lucene.search.suggest.document.CompletionAnalyzer;
|
import org.apache.lucene.search.suggest.document.CompletionAnalyzer;
|
||||||
|
import org.apache.lucene.search.suggest.document.ContextSuggestField;
|
||||||
import org.apache.lucene.search.suggest.document.FuzzyCompletionQuery;
|
import org.apache.lucene.search.suggest.document.FuzzyCompletionQuery;
|
||||||
import org.apache.lucene.search.suggest.document.PrefixCompletionQuery;
|
import org.apache.lucene.search.suggest.document.PrefixCompletionQuery;
|
||||||
import org.apache.lucene.search.suggest.document.RegexCompletionQuery;
|
import org.apache.lucene.search.suggest.document.RegexCompletionQuery;
|
||||||
@ -42,11 +44,18 @@ import org.elasticsearch.common.xcontent.json.JsonXContent;
|
|||||||
import org.elasticsearch.index.IndexService;
|
import org.elasticsearch.index.IndexService;
|
||||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||||
|
import org.hamcrest.FeatureMatcher;
|
||||||
|
import org.hamcrest.Matcher;
|
||||||
|
import org.hamcrest.Matchers;
|
||||||
|
import org.hamcrest.core.CombinableMatcher;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
import java.util.function.Function;
|
||||||
|
|
||||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||||
|
import static org.hamcrest.Matchers.arrayContainingInAnyOrder;
|
||||||
|
import static org.hamcrest.Matchers.arrayWithSize;
|
||||||
import static org.hamcrest.Matchers.containsString;
|
import static org.hamcrest.Matchers.containsString;
|
||||||
import static org.hamcrest.Matchers.equalTo;
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
import static org.hamcrest.Matchers.instanceOf;
|
import static org.hamcrest.Matchers.instanceOf;
|
||||||
@ -182,6 +191,328 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase {
|
|||||||
assertEquals("failed to parse [completion]: expected text or object, but got VALUE_NUMBER", e.getCause().getMessage());
|
assertEquals("failed to parse [completion]: expected text or object, but got VALUE_NUMBER", e.getCause().getMessage());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testKeywordWithSubCompletionAndContext() throws Exception {
|
||||||
|
String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1")
|
||||||
|
.startObject("properties")
|
||||||
|
.startObject("keywordfield")
|
||||||
|
.field("type", "keyword")
|
||||||
|
.startObject("fields")
|
||||||
|
.startObject("subsuggest")
|
||||||
|
.field("type", "completion")
|
||||||
|
.startArray("contexts")
|
||||||
|
.startObject()
|
||||||
|
.field("name","place_type")
|
||||||
|
.field("type","category")
|
||||||
|
.field("path","cat")
|
||||||
|
.endObject()
|
||||||
|
.endArray()
|
||||||
|
.endObject()
|
||||||
|
.endObject()
|
||||||
|
.endObject().endObject()
|
||||||
|
.endObject().endObject()
|
||||||
|
);
|
||||||
|
|
||||||
|
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
|
||||||
|
|
||||||
|
ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference
|
||||||
|
.bytes(XContentFactory.jsonBuilder()
|
||||||
|
.startObject()
|
||||||
|
.array("keywordfield", "key1", "key2", "key3")
|
||||||
|
.endObject()),
|
||||||
|
XContentType.JSON));
|
||||||
|
|
||||||
|
ParseContext.Document indexableFields = parsedDocument.rootDoc();
|
||||||
|
|
||||||
|
assertThat(indexableFields.getFields("keywordfield"), arrayContainingInAnyOrder(
|
||||||
|
keywordField("key1"),
|
||||||
|
sortedSetDocValuesField("key1"),
|
||||||
|
keywordField("key2"),
|
||||||
|
sortedSetDocValuesField("key2"),
|
||||||
|
keywordField("key3"),
|
||||||
|
sortedSetDocValuesField("key3")
|
||||||
|
));
|
||||||
|
assertThat(indexableFields.getFields("keywordfield.subsuggest"), arrayContainingInAnyOrder(
|
||||||
|
contextSuggestField("key1"),
|
||||||
|
contextSuggestField("key2"),
|
||||||
|
contextSuggestField("key3")
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testCompletionWithContextAndSubCompletion() throws Exception {
|
||||||
|
String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1")
|
||||||
|
.startObject("properties")
|
||||||
|
.startObject("suggest")
|
||||||
|
.field("type", "completion")
|
||||||
|
.startArray("contexts")
|
||||||
|
.startObject()
|
||||||
|
.field("name","place_type")
|
||||||
|
.field("type","category")
|
||||||
|
.field("path","cat")
|
||||||
|
.endObject()
|
||||||
|
.endArray()
|
||||||
|
.startObject("fields")
|
||||||
|
.startObject("subsuggest")
|
||||||
|
.field("type", "completion")
|
||||||
|
.startArray("contexts")
|
||||||
|
.startObject()
|
||||||
|
.field("name","place_type")
|
||||||
|
.field("type","category")
|
||||||
|
.field("path","cat")
|
||||||
|
.endObject()
|
||||||
|
.endArray()
|
||||||
|
.endObject()
|
||||||
|
.endObject()
|
||||||
|
.endObject().endObject()
|
||||||
|
.endObject().endObject()
|
||||||
|
);
|
||||||
|
|
||||||
|
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
|
||||||
|
|
||||||
|
ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference
|
||||||
|
.bytes(XContentFactory.jsonBuilder()
|
||||||
|
.startObject()
|
||||||
|
.startObject("suggest")
|
||||||
|
.array("input","timmy","starbucks")
|
||||||
|
.startObject("contexts")
|
||||||
|
.array("place_type","cafe","food")
|
||||||
|
.endObject()
|
||||||
|
.field("weight", 3)
|
||||||
|
.endObject()
|
||||||
|
.endObject()),
|
||||||
|
XContentType.JSON));
|
||||||
|
|
||||||
|
ParseContext.Document indexableFields = parsedDocument.rootDoc();
|
||||||
|
assertThat(indexableFields.getFields("suggest"), arrayContainingInAnyOrder(
|
||||||
|
contextSuggestField("timmy"),
|
||||||
|
contextSuggestField("starbucks")
|
||||||
|
));
|
||||||
|
assertThat(indexableFields.getFields("suggest.subsuggest"), arrayContainingInAnyOrder(
|
||||||
|
contextSuggestField("timmy"),
|
||||||
|
contextSuggestField("starbucks")
|
||||||
|
));
|
||||||
|
//unable to assert about context, covered in a REST test
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testCompletionWithContextAndSubCompletionIndexByPath() throws Exception {
|
||||||
|
String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1")
|
||||||
|
.startObject("properties")
|
||||||
|
.startObject("suggest")
|
||||||
|
.field("type", "completion")
|
||||||
|
.startArray("contexts")
|
||||||
|
.startObject()
|
||||||
|
.field("name","place_type")
|
||||||
|
.field("type","category")
|
||||||
|
.field("path","cat")
|
||||||
|
.endObject()
|
||||||
|
.endArray()
|
||||||
|
.startObject("fields")
|
||||||
|
.startObject("subsuggest")
|
||||||
|
.field("type", "completion")
|
||||||
|
.startArray("contexts")
|
||||||
|
.startObject()
|
||||||
|
.field("name","place_type")
|
||||||
|
.field("type","category")
|
||||||
|
.field("path","cat")
|
||||||
|
.endObject()
|
||||||
|
.endArray()
|
||||||
|
.endObject()
|
||||||
|
.endObject()
|
||||||
|
.endObject().endObject()
|
||||||
|
.endObject().endObject()
|
||||||
|
);
|
||||||
|
|
||||||
|
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
|
||||||
|
|
||||||
|
ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference
|
||||||
|
.bytes(XContentFactory.jsonBuilder()
|
||||||
|
.startObject()
|
||||||
|
.array("suggest", "timmy","starbucks")
|
||||||
|
.array("cat","cafe","food")
|
||||||
|
.endObject()),
|
||||||
|
XContentType.JSON));
|
||||||
|
|
||||||
|
ParseContext.Document indexableFields = parsedDocument.rootDoc();
|
||||||
|
assertThat(indexableFields.getFields("suggest"), arrayContainingInAnyOrder(
|
||||||
|
contextSuggestField("timmy"),
|
||||||
|
contextSuggestField("starbucks")
|
||||||
|
));
|
||||||
|
assertThat(indexableFields.getFields("suggest.subsuggest"), arrayContainingInAnyOrder(
|
||||||
|
contextSuggestField("timmy"),
|
||||||
|
contextSuggestField("starbucks")
|
||||||
|
));
|
||||||
|
//unable to assert about context, covered in a REST test
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public void testKeywordWithSubCompletionAndStringInsert() throws Exception {
|
||||||
|
String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1")
|
||||||
|
.startObject("properties").startObject("geofield")
|
||||||
|
.field("type", "geo_point")
|
||||||
|
.startObject("fields")
|
||||||
|
.startObject("analyzed")
|
||||||
|
.field("type", "completion")
|
||||||
|
.endObject()
|
||||||
|
.endObject()
|
||||||
|
.endObject().endObject()
|
||||||
|
.endObject().endObject()
|
||||||
|
);
|
||||||
|
|
||||||
|
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
|
||||||
|
|
||||||
|
ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference
|
||||||
|
.bytes(XContentFactory.jsonBuilder()
|
||||||
|
.startObject()
|
||||||
|
.field("geofield", "drm3btev3e86")//"41.12,-71.34"
|
||||||
|
.endObject()),
|
||||||
|
XContentType.JSON));
|
||||||
|
|
||||||
|
ParseContext.Document indexableFields = parsedDocument.rootDoc();
|
||||||
|
assertThat(indexableFields.getFields("geofield"), arrayWithSize(2));
|
||||||
|
assertThat(indexableFields.getFields("geofield.analyzed"), arrayContainingInAnyOrder(
|
||||||
|
suggestField("drm3btev3e86")
|
||||||
|
));
|
||||||
|
//unable to assert about geofield content, covered in a REST test
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testCompletionTypeWithSubCompletionFieldAndStringInsert() throws Exception {
|
||||||
|
String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1")
|
||||||
|
.startObject("properties").startObject("suggest")
|
||||||
|
.field("type", "completion")
|
||||||
|
.startObject("fields")
|
||||||
|
.startObject("subsuggest")
|
||||||
|
.field("type", "completion")
|
||||||
|
.endObject()
|
||||||
|
.endObject()
|
||||||
|
.endObject().endObject()
|
||||||
|
.endObject().endObject()
|
||||||
|
);
|
||||||
|
|
||||||
|
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
|
||||||
|
|
||||||
|
ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference
|
||||||
|
.bytes(XContentFactory.jsonBuilder()
|
||||||
|
.startObject()
|
||||||
|
.field("suggest", "suggestion")
|
||||||
|
.endObject()),
|
||||||
|
XContentType.JSON));
|
||||||
|
|
||||||
|
ParseContext.Document indexableFields = parsedDocument.rootDoc();
|
||||||
|
assertThat(indexableFields.getFields("suggest"), arrayContainingInAnyOrder(
|
||||||
|
suggestField("suggestion")
|
||||||
|
));
|
||||||
|
assertThat(indexableFields.getFields("suggest.subsuggest"), arrayContainingInAnyOrder(
|
||||||
|
suggestField("suggestion")
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testCompletionTypeWithSubCompletionFieldAndObjectInsert() throws Exception {
|
||||||
|
String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1")
|
||||||
|
.startObject("properties").startObject("completion")
|
||||||
|
.field("type", "completion")
|
||||||
|
.startObject("fields")
|
||||||
|
.startObject("analyzed")
|
||||||
|
.field("type", "completion")
|
||||||
|
.endObject()
|
||||||
|
.endObject()
|
||||||
|
.endObject().endObject()
|
||||||
|
.endObject().endObject()
|
||||||
|
);
|
||||||
|
|
||||||
|
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
|
||||||
|
|
||||||
|
ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference
|
||||||
|
.bytes(XContentFactory.jsonBuilder()
|
||||||
|
.startObject()
|
||||||
|
.startObject("completion")
|
||||||
|
.array("input","New York", "NY")
|
||||||
|
.field("weight",34)
|
||||||
|
.endObject()
|
||||||
|
.endObject()),
|
||||||
|
XContentType.JSON));
|
||||||
|
|
||||||
|
ParseContext.Document indexableFields = parsedDocument.rootDoc();
|
||||||
|
assertThat(indexableFields.getFields("completion"), arrayContainingInAnyOrder(
|
||||||
|
suggestField("New York"),
|
||||||
|
suggestField("NY")
|
||||||
|
));
|
||||||
|
assertThat(indexableFields.getFields("completion.analyzed"), arrayContainingInAnyOrder(
|
||||||
|
suggestField("New York"),
|
||||||
|
suggestField("NY")
|
||||||
|
));
|
||||||
|
//unable to assert about weight, covered in a REST test
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testCompletionTypeWithSubKeywordFieldAndObjectInsert() throws Exception {
|
||||||
|
String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1")
|
||||||
|
.startObject("properties").startObject("completion")
|
||||||
|
.field("type", "completion")
|
||||||
|
.startObject("fields")
|
||||||
|
.startObject("analyzed")
|
||||||
|
.field("type", "keyword")
|
||||||
|
.endObject()
|
||||||
|
.endObject()
|
||||||
|
.endObject().endObject()
|
||||||
|
.endObject().endObject()
|
||||||
|
);
|
||||||
|
|
||||||
|
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
|
||||||
|
|
||||||
|
ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference
|
||||||
|
.bytes(XContentFactory.jsonBuilder()
|
||||||
|
.startObject()
|
||||||
|
.startObject("completion")
|
||||||
|
.array("input","New York", "NY")
|
||||||
|
.field("weight",34)
|
||||||
|
.endObject()
|
||||||
|
.endObject()),
|
||||||
|
XContentType.JSON));
|
||||||
|
|
||||||
|
ParseContext.Document indexableFields = parsedDocument.rootDoc();
|
||||||
|
assertThat(indexableFields.getFields("completion"), arrayContainingInAnyOrder(
|
||||||
|
suggestField("New York"),
|
||||||
|
suggestField("NY")
|
||||||
|
));
|
||||||
|
assertThat(indexableFields.getFields("completion.analyzed"), arrayContainingInAnyOrder(
|
||||||
|
keywordField("New York"),
|
||||||
|
sortedSetDocValuesField("New York"),
|
||||||
|
keywordField("NY"),
|
||||||
|
sortedSetDocValuesField("NY")
|
||||||
|
));
|
||||||
|
//unable to assert about weight, covered in a REST test
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testCompletionTypeWithSubKeywordFieldAndStringInsert() throws Exception {
|
||||||
|
String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1")
|
||||||
|
.startObject("properties").startObject("completion")
|
||||||
|
.field("type", "completion")
|
||||||
|
.startObject("fields")
|
||||||
|
.startObject("analyzed")
|
||||||
|
.field("type", "keyword")
|
||||||
|
.endObject()
|
||||||
|
.endObject()
|
||||||
|
.endObject().endObject()
|
||||||
|
.endObject().endObject()
|
||||||
|
);
|
||||||
|
|
||||||
|
DocumentMapper defaultMapper = createIndex("test").mapperService().documentMapperParser().parse("type1", new CompressedXContent(mapping));
|
||||||
|
|
||||||
|
ParsedDocument parsedDocument = defaultMapper.parse(SourceToParse.source("test", "type1", "1", BytesReference
|
||||||
|
.bytes(XContentFactory.jsonBuilder()
|
||||||
|
.startObject()
|
||||||
|
.field("completion", "suggestion")
|
||||||
|
.endObject()),
|
||||||
|
XContentType.JSON));
|
||||||
|
|
||||||
|
ParseContext.Document indexableFields = parsedDocument.rootDoc();
|
||||||
|
assertThat(indexableFields.getFields("completion"), arrayContainingInAnyOrder(
|
||||||
|
suggestField("suggestion")
|
||||||
|
));
|
||||||
|
assertThat(indexableFields.getFields("completion.analyzed"), arrayContainingInAnyOrder(
|
||||||
|
keywordField("suggestion"),
|
||||||
|
sortedSetDocValuesField("suggestion")
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
public void testParsingMultiValued() throws Exception {
|
public void testParsingMultiValued() throws Exception {
|
||||||
String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1")
|
String mapping = Strings.toString(jsonBuilder().startObject().startObject("type1")
|
||||||
.startObject("properties").startObject("completion")
|
.startObject("properties").startObject("completion")
|
||||||
@ -199,7 +530,10 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase {
|
|||||||
.endObject()),
|
.endObject()),
|
||||||
XContentType.JSON));
|
XContentType.JSON));
|
||||||
IndexableField[] fields = parsedDocument.rootDoc().getFields(fieldMapper.name());
|
IndexableField[] fields = parsedDocument.rootDoc().getFields(fieldMapper.name());
|
||||||
assertSuggestFields(fields, 2);
|
assertThat(fields, arrayContainingInAnyOrder(
|
||||||
|
suggestField("suggestion1"),
|
||||||
|
suggestField("suggestion2")
|
||||||
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testParsingWithWeight() throws Exception {
|
public void testParsingWithWeight() throws Exception {
|
||||||
@ -222,7 +556,9 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase {
|
|||||||
.endObject()),
|
.endObject()),
|
||||||
XContentType.JSON));
|
XContentType.JSON));
|
||||||
IndexableField[] fields = parsedDocument.rootDoc().getFields(fieldMapper.name());
|
IndexableField[] fields = parsedDocument.rootDoc().getFields(fieldMapper.name());
|
||||||
assertSuggestFields(fields, 1);
|
assertThat(fields, arrayContainingInAnyOrder(
|
||||||
|
suggestField("suggestion")
|
||||||
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testParsingMultiValueWithWeight() throws Exception {
|
public void testParsingMultiValueWithWeight() throws Exception {
|
||||||
@ -245,7 +581,11 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase {
|
|||||||
.endObject()),
|
.endObject()),
|
||||||
XContentType.JSON));
|
XContentType.JSON));
|
||||||
IndexableField[] fields = parsedDocument.rootDoc().getFields(fieldMapper.name());
|
IndexableField[] fields = parsedDocument.rootDoc().getFields(fieldMapper.name());
|
||||||
assertSuggestFields(fields, 3);
|
assertThat(fields, arrayContainingInAnyOrder(
|
||||||
|
suggestField("suggestion1"),
|
||||||
|
suggestField("suggestion2"),
|
||||||
|
suggestField("suggestion3")
|
||||||
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testParsingWithGeoFieldAlias() throws Exception {
|
public void testParsingWithGeoFieldAlias() throws Exception {
|
||||||
@ -318,7 +658,11 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase {
|
|||||||
.endObject()),
|
.endObject()),
|
||||||
XContentType.JSON));
|
XContentType.JSON));
|
||||||
IndexableField[] fields = parsedDocument.rootDoc().getFields(fieldMapper.name());
|
IndexableField[] fields = parsedDocument.rootDoc().getFields(fieldMapper.name());
|
||||||
assertSuggestFields(fields, 3);
|
assertThat(fields, arrayContainingInAnyOrder(
|
||||||
|
suggestField("suggestion1"),
|
||||||
|
suggestField("suggestion2"),
|
||||||
|
suggestField("suggestion3")
|
||||||
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testParsingMixed() throws Exception {
|
public void testParsingMixed() throws Exception {
|
||||||
@ -351,7 +695,14 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase {
|
|||||||
.endObject()),
|
.endObject()),
|
||||||
XContentType.JSON));
|
XContentType.JSON));
|
||||||
IndexableField[] fields = parsedDocument.rootDoc().getFields(fieldMapper.name());
|
IndexableField[] fields = parsedDocument.rootDoc().getFields(fieldMapper.name());
|
||||||
assertSuggestFields(fields, 6);
|
assertThat(fields, arrayContainingInAnyOrder(
|
||||||
|
suggestField("suggestion1"),
|
||||||
|
suggestField("suggestion2"),
|
||||||
|
suggestField("suggestion3"),
|
||||||
|
suggestField("suggestion4"),
|
||||||
|
suggestField("suggestion5"),
|
||||||
|
suggestField("suggestion6")
|
||||||
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testNonContextEnabledParsingWithContexts() throws Exception {
|
public void testNonContextEnabledParsingWithContexts() throws Exception {
|
||||||
@ -508,9 +859,13 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private static void assertSuggestFields(IndexableField[] fields, int expected) {
|
private static void assertSuggestFields(IndexableField[] fields, int expected) {
|
||||||
|
assertFieldsOfType(fields, SuggestField.class, expected);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void assertFieldsOfType(IndexableField[] fields, Class<?> clazz, int expected) {
|
||||||
int actualFieldCount = 0;
|
int actualFieldCount = 0;
|
||||||
for (IndexableField field : fields) {
|
for (IndexableField field : fields) {
|
||||||
if (field instanceof SuggestField) {
|
if (clazz.isInstance(field)) {
|
||||||
actualFieldCount++;
|
actualFieldCount++;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -529,4 +884,33 @@ public class CompletionFieldMapperTests extends ESSingleNodeTestCase {
|
|||||||
);
|
);
|
||||||
assertThat(e.getMessage(), containsString("name cannot be empty string"));
|
assertThat(e.getMessage(), containsString("name cannot be empty string"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private Matcher<IndexableField> suggestField(String value) {
|
||||||
|
return Matchers.allOf(hasProperty(IndexableField::stringValue, equalTo(value)),
|
||||||
|
Matchers.instanceOf(SuggestField.class));
|
||||||
|
}
|
||||||
|
|
||||||
|
private Matcher<IndexableField> contextSuggestField(String value) {
|
||||||
|
return Matchers.allOf(hasProperty(IndexableField::stringValue, equalTo(value)),
|
||||||
|
Matchers.instanceOf(ContextSuggestField.class));
|
||||||
|
}
|
||||||
|
|
||||||
|
private CombinableMatcher<IndexableField> sortedSetDocValuesField(String value) {
|
||||||
|
return Matchers.both(hasProperty(IndexableField::binaryValue, equalTo(new BytesRef(value))))
|
||||||
|
.and(Matchers.instanceOf(SortedSetDocValuesField.class));
|
||||||
|
}
|
||||||
|
|
||||||
|
private CombinableMatcher<IndexableField> keywordField(String value) {
|
||||||
|
return Matchers.both(hasProperty(IndexableField::binaryValue, equalTo(new BytesRef(value))))
|
||||||
|
.and(hasProperty(IndexableField::fieldType, Matchers.instanceOf(KeywordFieldMapper.KeywordFieldType.class)));
|
||||||
|
}
|
||||||
|
|
||||||
|
private <T, V> Matcher<T> hasProperty(Function<? super T, ? extends V> property, Matcher<V> valueMatcher) {
|
||||||
|
return new FeatureMatcher<T, V>(valueMatcher, "object with", property.toString()) {
|
||||||
|
@Override
|
||||||
|
protected V featureValueOf(T actual) {
|
||||||
|
return property.apply(actual);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -21,6 +21,7 @@ package org.elasticsearch.index.query;
|
|||||||
|
|
||||||
import org.apache.lucene.analysis.MockSynonymAnalyzer;
|
import org.apache.lucene.analysis.MockSynonymAnalyzer;
|
||||||
import org.apache.lucene.index.Term;
|
import org.apache.lucene.index.Term;
|
||||||
|
import org.apache.lucene.queries.BlendedTermQuery;
|
||||||
import org.apache.lucene.search.BooleanClause;
|
import org.apache.lucene.search.BooleanClause;
|
||||||
import org.apache.lucene.search.BooleanClause.Occur;
|
import org.apache.lucene.search.BooleanClause.Occur;
|
||||||
import org.apache.lucene.search.BooleanQuery;
|
import org.apache.lucene.search.BooleanQuery;
|
||||||
@ -1195,20 +1196,23 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase<QueryStr
|
|||||||
newIndexMeta("index", context.getIndexSettings().getSettings(), Settings.builder().putList("index.query.default_field",
|
newIndexMeta("index", context.getIndexSettings().getSettings(), Settings.builder().putList("index.query.default_field",
|
||||||
STRING_FIELD_NAME, STRING_FIELD_NAME_2 + "^5").build())
|
STRING_FIELD_NAME, STRING_FIELD_NAME_2 + "^5").build())
|
||||||
);
|
);
|
||||||
Query query = new QueryStringQueryBuilder("hello")
|
try {
|
||||||
.toQuery(context);
|
Query query = new QueryStringQueryBuilder("hello")
|
||||||
Query expected = new DisjunctionMaxQuery(
|
.toQuery(context);
|
||||||
Arrays.asList(
|
Query expected = new DisjunctionMaxQuery(
|
||||||
new TermQuery(new Term(STRING_FIELD_NAME, "hello")),
|
Arrays.asList(
|
||||||
new BoostQuery(new TermQuery(new Term(STRING_FIELD_NAME_2, "hello")), 5.0f)
|
new TermQuery(new Term(STRING_FIELD_NAME, "hello")),
|
||||||
), 0.0f
|
new BoostQuery(new TermQuery(new Term(STRING_FIELD_NAME_2, "hello")), 5.0f)
|
||||||
);
|
), 0.0f
|
||||||
assertEquals(expected, query);
|
);
|
||||||
// Reset the default value
|
assertEquals(expected, query);
|
||||||
context.getIndexSettings().updateIndexMetaData(
|
} finally {
|
||||||
newIndexMeta("index",
|
// Reset the default value
|
||||||
context.getIndexSettings().getSettings(), Settings.builder().putList("index.query.default_field", "*").build())
|
context.getIndexSettings().updateIndexMetaData(
|
||||||
);
|
newIndexMeta("index",
|
||||||
|
context.getIndexSettings().getSettings(), Settings.builder().putList("index.query.default_field", "*").build())
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -1345,6 +1349,44 @@ public class QueryStringQueryBuilderTests extends AbstractQueryTestCase<QueryStr
|
|||||||
assertEquals(expected, query);
|
assertEquals(expected, query);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testCrossFields() throws Exception {
|
||||||
|
final QueryShardContext context = createShardContext();
|
||||||
|
context.getIndexSettings().updateIndexMetaData(
|
||||||
|
newIndexMeta("index", context.getIndexSettings().getSettings(),
|
||||||
|
Settings.builder().putList("index.query.default_field",
|
||||||
|
STRING_FIELD_NAME, STRING_FIELD_NAME_2).build())
|
||||||
|
);
|
||||||
|
try {
|
||||||
|
Term[] blendedTerms = new Term[2];
|
||||||
|
blendedTerms[0] = new Term(STRING_FIELD_NAME, "foo");
|
||||||
|
blendedTerms[1] = new Term(STRING_FIELD_NAME_2, "foo");
|
||||||
|
|
||||||
|
Query query = new QueryStringQueryBuilder("foo")
|
||||||
|
.analyzer("whitespace")
|
||||||
|
.type(MultiMatchQueryBuilder.Type.CROSS_FIELDS)
|
||||||
|
.toQuery(createShardContext());
|
||||||
|
Query expected = BlendedTermQuery.dismaxBlendedQuery(blendedTerms, 1.0f);
|
||||||
|
assertEquals(expected, query);
|
||||||
|
|
||||||
|
query = new QueryStringQueryBuilder("foo mapped_string:10")
|
||||||
|
.analyzer("whitespace")
|
||||||
|
.type(MultiMatchQueryBuilder.Type.CROSS_FIELDS)
|
||||||
|
.toQuery(createShardContext());
|
||||||
|
expected = new BooleanQuery.Builder()
|
||||||
|
.add(BlendedTermQuery.dismaxBlendedQuery(blendedTerms, 1.0f), Occur.SHOULD)
|
||||||
|
.add(new TermQuery(new Term(STRING_FIELD_NAME, "10")), Occur.SHOULD)
|
||||||
|
.build();
|
||||||
|
assertEquals(expected, query);
|
||||||
|
} finally {
|
||||||
|
// Reset the default value
|
||||||
|
context.getIndexSettings().updateIndexMetaData(
|
||||||
|
newIndexMeta("index",
|
||||||
|
context.getIndexSettings().getSettings(),
|
||||||
|
Settings.builder().putList("index.query.default_field", "*").build())
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private static IndexMetaData newIndexMeta(String name, Settings oldIndexSettings, Settings indexSettings) {
|
private static IndexMetaData newIndexMeta(String name, Settings oldIndexSettings, Settings indexSettings) {
|
||||||
Settings build = Settings.builder().put(oldIndexSettings)
|
Settings build = Settings.builder().put(oldIndexSettings)
|
||||||
.put(indexSettings)
|
.put(indexSettings)
|
||||||
|
@ -40,6 +40,7 @@ import org.elasticsearch.index.query.TermQueryBuilder;
|
|||||||
import org.elasticsearch.test.ESSingleNodeTestCase;
|
import org.elasticsearch.test.ESSingleNodeTestCase;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.util.Collections;
|
||||||
|
|
||||||
public class NestedHelperTests extends ESSingleNodeTestCase {
|
public class NestedHelperTests extends ESSingleNodeTestCase {
|
||||||
|
|
||||||
@ -115,6 +116,36 @@ public class NestedHelperTests extends ESSingleNodeTestCase {
|
|||||||
assertFalse(new NestedHelper(mapperService).mightMatchNonNestedDocs(new MatchNoDocsQuery(), "nested_missing"));
|
assertFalse(new NestedHelper(mapperService).mightMatchNonNestedDocs(new MatchNoDocsQuery(), "nested_missing"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testTermsQuery() {
|
||||||
|
Query termsQuery = mapperService.fullName("foo").termsQuery(Collections.singletonList("bar"), null);
|
||||||
|
assertFalse(new NestedHelper(mapperService).mightMatchNestedDocs(termsQuery));
|
||||||
|
assertTrue(new NestedHelper(mapperService).mightMatchNonNestedDocs(termsQuery, "nested1"));
|
||||||
|
assertTrue(new NestedHelper(mapperService).mightMatchNonNestedDocs(termsQuery, "nested2"));
|
||||||
|
assertTrue(new NestedHelper(mapperService).mightMatchNonNestedDocs(termsQuery, "nested3"));
|
||||||
|
assertTrue(new NestedHelper(mapperService).mightMatchNonNestedDocs(termsQuery, "nested_missing"));
|
||||||
|
|
||||||
|
termsQuery = mapperService.fullName("nested1.foo").termsQuery(Collections.singletonList("bar"), null);
|
||||||
|
assertTrue(new NestedHelper(mapperService).mightMatchNestedDocs(termsQuery));
|
||||||
|
assertFalse(new NestedHelper(mapperService).mightMatchNonNestedDocs(termsQuery, "nested1"));
|
||||||
|
assertTrue(new NestedHelper(mapperService).mightMatchNonNestedDocs(termsQuery, "nested2"));
|
||||||
|
assertTrue(new NestedHelper(mapperService).mightMatchNonNestedDocs(termsQuery, "nested3"));
|
||||||
|
assertTrue(new NestedHelper(mapperService).mightMatchNonNestedDocs(termsQuery, "nested_missing"));
|
||||||
|
|
||||||
|
termsQuery = mapperService.fullName("nested2.foo").termsQuery(Collections.singletonList("bar"), null);
|
||||||
|
assertTrue(new NestedHelper(mapperService).mightMatchNestedDocs(termsQuery));
|
||||||
|
assertTrue(new NestedHelper(mapperService).mightMatchNonNestedDocs(termsQuery, "nested1"));
|
||||||
|
assertTrue(new NestedHelper(mapperService).mightMatchNonNestedDocs(termsQuery, "nested2"));
|
||||||
|
assertTrue(new NestedHelper(mapperService).mightMatchNonNestedDocs(termsQuery, "nested3"));
|
||||||
|
assertTrue(new NestedHelper(mapperService).mightMatchNonNestedDocs(termsQuery, "nested_missing"));
|
||||||
|
|
||||||
|
termsQuery = mapperService.fullName("nested3.foo").termsQuery(Collections.singletonList("bar"), null);
|
||||||
|
assertTrue(new NestedHelper(mapperService).mightMatchNestedDocs(termsQuery));
|
||||||
|
assertTrue(new NestedHelper(mapperService).mightMatchNonNestedDocs(termsQuery, "nested1"));
|
||||||
|
assertTrue(new NestedHelper(mapperService).mightMatchNonNestedDocs(termsQuery, "nested2"));
|
||||||
|
assertTrue(new NestedHelper(mapperService).mightMatchNonNestedDocs(termsQuery, "nested3"));
|
||||||
|
assertTrue(new NestedHelper(mapperService).mightMatchNonNestedDocs(termsQuery, "nested_missing"));
|
||||||
|
}
|
||||||
|
|
||||||
public void testTermQuery() {
|
public void testTermQuery() {
|
||||||
Query termQuery = mapperService.fullName("foo").termQuery("bar", null);
|
Query termQuery = mapperService.fullName("foo").termQuery("bar", null);
|
||||||
assertFalse(new NestedHelper(mapperService).mightMatchNestedDocs(termQuery));
|
assertFalse(new NestedHelper(mapperService).mightMatchNestedDocs(termQuery));
|
||||||
|
@ -20,6 +20,7 @@
|
|||||||
package org.elasticsearch.test;
|
package org.elasticsearch.test;
|
||||||
|
|
||||||
import org.elasticsearch.common.CheckedBiFunction;
|
import org.elasticsearch.common.CheckedBiFunction;
|
||||||
|
import org.elasticsearch.common.CheckedBiConsumer;
|
||||||
import org.elasticsearch.common.CheckedFunction;
|
import org.elasticsearch.common.CheckedFunction;
|
||||||
import org.elasticsearch.common.Strings;
|
import org.elasticsearch.common.Strings;
|
||||||
import org.elasticsearch.common.bytes.BytesReference;
|
import org.elasticsearch.common.bytes.BytesReference;
|
||||||
@ -38,34 +39,147 @@ import java.util.function.Supplier;
|
|||||||
|
|
||||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent;
|
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent;
|
||||||
|
|
||||||
public abstract class AbstractXContentTestCase<T extends ToXContent> extends ESTestCase {
|
|
||||||
|
|
||||||
|
public abstract class AbstractXContentTestCase<T extends ToXContent> extends ESTestCase {
|
||||||
protected static final int NUMBER_OF_TEST_RUNS = 20;
|
protected static final int NUMBER_OF_TEST_RUNS = 20;
|
||||||
|
|
||||||
public static <T extends ToXContent> void testFromXContent(int numberOfTestRuns, Supplier<T> instanceSupplier,
|
public static <T> XContentTester<T> xContentTester(
|
||||||
boolean supportsUnknownFields, String[] shuffleFieldsExceptions,
|
CheckedBiFunction<XContent, BytesReference, XContentParser, IOException> createParser,
|
||||||
Predicate<String> randomFieldsExcludeFilter,
|
Supplier<T> instanceSupplier,
|
||||||
CheckedBiFunction<XContent, BytesReference, XContentParser, IOException>
|
CheckedBiConsumer<T, XContentBuilder, IOException> toXContent,
|
||||||
createParserFunction,
|
CheckedFunction<XContentParser, T, IOException> fromXContent) {
|
||||||
CheckedFunction<XContentParser, T, IOException> parseFunction,
|
return new XContentTester<T>(
|
||||||
BiConsumer<T, T> assertEqualsConsumer,
|
createParser,
|
||||||
boolean assertToXContentEquivalence,
|
instanceSupplier,
|
||||||
ToXContent.Params toXContentParams) throws IOException {
|
(testInstance, xContentType) -> {
|
||||||
for (int runs = 0; runs < numberOfTestRuns; runs++) {
|
try (XContentBuilder builder = XContentBuilder.builder(xContentType.xContent())) {
|
||||||
T testInstance = instanceSupplier.get();
|
toXContent.accept(testInstance, builder);
|
||||||
XContentType xContentType = randomFrom(XContentType.values());
|
return BytesReference.bytes(builder);
|
||||||
BytesReference shuffledContent = insertRandomFieldsAndShuffle(testInstance, xContentType, supportsUnknownFields,
|
}
|
||||||
shuffleFieldsExceptions, randomFieldsExcludeFilter, createParserFunction, toXContentParams);
|
},
|
||||||
XContentParser parser = createParserFunction.apply(XContentFactory.xContent(xContentType), shuffledContent);
|
fromXContent);
|
||||||
T parsed = parseFunction.apply(parser);
|
}
|
||||||
assertEqualsConsumer.accept(testInstance, parsed);
|
|
||||||
if (assertToXContentEquivalence) {
|
public static <T extends ToXContent> XContentTester<T> xContentTester(
|
||||||
assertToXContentEquivalent(
|
CheckedBiFunction<XContent, BytesReference, XContentParser, IOException> createParser,
|
||||||
|
Supplier<T> instanceSupplier,
|
||||||
|
CheckedFunction<XContentParser, T, IOException> fromXContent) {
|
||||||
|
return xContentTester(createParser, instanceSupplier, ToXContent.EMPTY_PARAMS, fromXContent);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static <T extends ToXContent> XContentTester<T> xContentTester(
|
||||||
|
CheckedBiFunction<XContent, BytesReference, XContentParser, IOException> createParser,
|
||||||
|
Supplier<T> instanceSupplier,
|
||||||
|
ToXContent.Params toXContentParams,
|
||||||
|
CheckedFunction<XContentParser, T, IOException> fromXContent) {
|
||||||
|
return new XContentTester<T>(
|
||||||
|
createParser,
|
||||||
|
instanceSupplier,
|
||||||
|
(testInstance, xContentType) ->
|
||||||
XContentHelper.toXContent(testInstance, xContentType, toXContentParams, false),
|
XContentHelper.toXContent(testInstance, xContentType, toXContentParams, false),
|
||||||
XContentHelper.toXContent(parsed, xContentType, toXContentParams, false),
|
fromXContent);
|
||||||
xContentType);
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Tests converting to and from xcontent.
|
||||||
|
*/
|
||||||
|
public static class XContentTester<T> {
|
||||||
|
private final CheckedBiFunction<XContent, BytesReference, XContentParser, IOException> createParser;
|
||||||
|
private final Supplier<T> instanceSupplier;
|
||||||
|
private final CheckedBiFunction<T, XContentType, BytesReference, IOException> toXContent;
|
||||||
|
private final CheckedFunction<XContentParser, T, IOException> fromXContent;
|
||||||
|
|
||||||
|
private int numberOfTestRuns = NUMBER_OF_TEST_RUNS;
|
||||||
|
private boolean supportsUnknownFields = false;
|
||||||
|
private String[] shuffleFieldsExceptions = Strings.EMPTY_ARRAY;
|
||||||
|
private Predicate<String> randomFieldsExcludeFilter = field -> false;
|
||||||
|
private BiConsumer<T, T> assertEqualsConsumer = (expectedInstance, newInstance) -> {
|
||||||
|
assertNotSame(newInstance, expectedInstance);
|
||||||
|
assertEquals(expectedInstance, newInstance);
|
||||||
|
assertEquals(expectedInstance.hashCode(), newInstance.hashCode());
|
||||||
|
};
|
||||||
|
private boolean assertToXContentEquivalence = true;
|
||||||
|
|
||||||
|
private XContentTester(
|
||||||
|
CheckedBiFunction<XContent, BytesReference, XContentParser, IOException> createParser,
|
||||||
|
Supplier<T> instanceSupplier,
|
||||||
|
CheckedBiFunction<T, XContentType, BytesReference, IOException> toXContent,
|
||||||
|
CheckedFunction<XContentParser, T, IOException> fromXContent) {
|
||||||
|
this.createParser = createParser;
|
||||||
|
this.instanceSupplier = instanceSupplier;
|
||||||
|
this.toXContent = toXContent;
|
||||||
|
this.fromXContent = fromXContent;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void test() throws IOException {
|
||||||
|
for (int runs = 0; runs < numberOfTestRuns; runs++) {
|
||||||
|
T testInstance = instanceSupplier.get();
|
||||||
|
XContentType xContentType = randomFrom(XContentType.values());
|
||||||
|
BytesReference originalXContent = toXContent.apply(testInstance, xContentType);
|
||||||
|
BytesReference shuffledContent = insertRandomFieldsAndShuffle(originalXContent, xContentType, supportsUnknownFields,
|
||||||
|
shuffleFieldsExceptions, randomFieldsExcludeFilter, createParser);
|
||||||
|
XContentParser parser = createParser.apply(XContentFactory.xContent(xContentType), shuffledContent);
|
||||||
|
T parsed = fromXContent.apply(parser);
|
||||||
|
assertEqualsConsumer.accept(testInstance, parsed);
|
||||||
|
if (assertToXContentEquivalence) {
|
||||||
|
assertToXContentEquivalent(
|
||||||
|
toXContent.apply(testInstance, xContentType),
|
||||||
|
toXContent.apply(parsed, xContentType),
|
||||||
|
xContentType);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public XContentTester<T> numberOfTestRuns(int numberOfTestRuns) {
|
||||||
|
this.numberOfTestRuns = numberOfTestRuns;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public XContentTester<T> supportsUnknownFields(boolean supportsUnknownFields) {
|
||||||
|
this.supportsUnknownFields = supportsUnknownFields;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public XContentTester<T> shuffleFieldsExceptions(String[] shuffleFieldsExceptions) {
|
||||||
|
this.shuffleFieldsExceptions = shuffleFieldsExceptions;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public XContentTester<T> randomFieldsExcludeFilter(Predicate<String> randomFieldsExcludeFilter) {
|
||||||
|
this.randomFieldsExcludeFilter = randomFieldsExcludeFilter;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public XContentTester<T> assertEqualsConsumer(BiConsumer<T, T> assertEqualsConsumer) {
|
||||||
|
this.assertEqualsConsumer = assertEqualsConsumer;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public XContentTester<T> assertToXContentEquivalence(boolean assertToXContentEquivalence) {
|
||||||
|
this.assertToXContentEquivalence = assertToXContentEquivalence;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static <T extends ToXContent> void testFromXContent(
|
||||||
|
int numberOfTestRuns,
|
||||||
|
Supplier<T> instanceSupplier,
|
||||||
|
boolean supportsUnknownFields,
|
||||||
|
String[] shuffleFieldsExceptions,
|
||||||
|
Predicate<String> randomFieldsExcludeFilter,
|
||||||
|
CheckedBiFunction<XContent, BytesReference, XContentParser, IOException> createParserFunction,
|
||||||
|
CheckedFunction<XContentParser, T, IOException> fromXContent,
|
||||||
|
BiConsumer<T, T> assertEqualsConsumer,
|
||||||
|
boolean assertToXContentEquivalence,
|
||||||
|
ToXContent.Params toXContentParams) throws IOException {
|
||||||
|
xContentTester(createParserFunction, instanceSupplier, toXContentParams, fromXContent)
|
||||||
|
.numberOfTestRuns(numberOfTestRuns)
|
||||||
|
.supportsUnknownFields(supportsUnknownFields)
|
||||||
|
.shuffleFieldsExceptions(shuffleFieldsExceptions)
|
||||||
|
.randomFieldsExcludeFilter(randomFieldsExcludeFilter)
|
||||||
|
.assertEqualsConsumer(assertEqualsConsumer)
|
||||||
|
.assertToXContentEquivalence(assertToXContentEquivalence)
|
||||||
|
.test();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -133,11 +247,9 @@ public abstract class AbstractXContentTestCase<T extends ToXContent> extends EST
|
|||||||
return ToXContent.EMPTY_PARAMS;
|
return ToXContent.EMPTY_PARAMS;
|
||||||
}
|
}
|
||||||
|
|
||||||
static BytesReference insertRandomFieldsAndShuffle(ToXContent testInstance, XContentType xContentType,
|
static BytesReference insertRandomFieldsAndShuffle(BytesReference xContent, XContentType xContentType,
|
||||||
boolean supportsUnknownFields, String[] shuffleFieldsExceptions, Predicate<String> randomFieldsExcludeFilter,
|
boolean supportsUnknownFields, String[] shuffleFieldsExceptions, Predicate<String> randomFieldsExcludeFilter,
|
||||||
CheckedBiFunction<XContent, BytesReference, XContentParser, IOException> createParserFunction,
|
CheckedBiFunction<XContent, BytesReference, XContentParser, IOException> createParserFunction) throws IOException {
|
||||||
ToXContent.Params toXContentParams) throws IOException {
|
|
||||||
BytesReference xContent = XContentHelper.toXContent(testInstance, xContentType, toXContentParams, false);
|
|
||||||
BytesReference withRandomFields;
|
BytesReference withRandomFields;
|
||||||
if (supportsUnknownFields) {
|
if (supportsUnknownFields) {
|
||||||
// add a few random fields to check that the parser is lenient on new fields
|
// add a few random fields to check that the parser is lenient on new fields
|
||||||
|
@ -22,13 +22,11 @@ package org.elasticsearch.test;
|
|||||||
import com.carrotsearch.randomizedtesting.RandomizedContext;
|
import com.carrotsearch.randomizedtesting.RandomizedContext;
|
||||||
|
|
||||||
import org.elasticsearch.common.bytes.BytesReference;
|
import org.elasticsearch.common.bytes.BytesReference;
|
||||||
import org.elasticsearch.common.xcontent.ToXContent;
|
|
||||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||||
import org.elasticsearch.common.xcontent.XContentParser;
|
import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.common.xcontent.XContentType;
|
import org.elasticsearch.common.xcontent.XContentType;
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
import static org.hamcrest.Matchers.equalTo;
|
import static org.hamcrest.Matchers.equalTo;
|
||||||
@ -37,29 +35,24 @@ import static org.hamcrest.Matchers.not;
|
|||||||
public class AbstractXContentTestCaseTests extends ESTestCase {
|
public class AbstractXContentTestCaseTests extends ESTestCase {
|
||||||
|
|
||||||
public void testInsertRandomFieldsAndShuffle() throws Exception {
|
public void testInsertRandomFieldsAndShuffle() throws Exception {
|
||||||
TestInstance t = new TestInstance();
|
XContentBuilder builder = XContentFactory.jsonBuilder();
|
||||||
|
builder.startObject();
|
||||||
|
{
|
||||||
|
builder.field("field", 1);
|
||||||
|
}
|
||||||
|
builder.endObject();
|
||||||
BytesReference insertRandomFieldsAndShuffle = RandomizedContext.current().runWithPrivateRandomness(1,
|
BytesReference insertRandomFieldsAndShuffle = RandomizedContext.current().runWithPrivateRandomness(1,
|
||||||
() -> AbstractXContentTestCase.insertRandomFieldsAndShuffle(t, XContentType.JSON, true, new String[] {}, null,
|
() -> AbstractXContentTestCase.insertRandomFieldsAndShuffle(
|
||||||
this::createParser, ToXContent.EMPTY_PARAMS));
|
BytesReference.bytes(builder),
|
||||||
|
XContentType.JSON,
|
||||||
|
true,
|
||||||
|
new String[] {},
|
||||||
|
null,
|
||||||
|
this::createParser));
|
||||||
try (XContentParser parser = createParser(XContentType.JSON.xContent(), insertRandomFieldsAndShuffle)) {
|
try (XContentParser parser = createParser(XContentType.JSON.xContent(), insertRandomFieldsAndShuffle)) {
|
||||||
Map<String, Object> mapOrdered = parser.mapOrdered();
|
Map<String, Object> mapOrdered = parser.mapOrdered();
|
||||||
assertThat(mapOrdered.size(), equalTo(2));
|
assertThat(mapOrdered.size(), equalTo(2));
|
||||||
assertThat(mapOrdered.keySet().iterator().next(), not(equalTo("field")));
|
assertThat(mapOrdered.keySet().iterator().next(), not(equalTo("field")));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private class TestInstance implements ToXContentObject {
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
|
||||||
builder.startObject();
|
|
||||||
{
|
|
||||||
builder.field("field", 1);
|
|
||||||
}
|
|
||||||
builder.endObject();
|
|
||||||
return builder;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
@ -148,7 +148,7 @@ public class Ccr extends Plugin implements ActionPlugin, PersistentTaskPlugin, E
|
|||||||
@Override
|
@Override
|
||||||
public List<PersistentTasksExecutor<?>> getPersistentTasksExecutor(ClusterService clusterService,
|
public List<PersistentTasksExecutor<?>> getPersistentTasksExecutor(ClusterService clusterService,
|
||||||
ThreadPool threadPool, Client client) {
|
ThreadPool threadPool, Client client) {
|
||||||
return Collections.singletonList(new ShardFollowTasksExecutor(settings, client, threadPool));
|
return Collections.singletonList(new ShardFollowTasksExecutor(settings, client, threadPool, clusterService));
|
||||||
}
|
}
|
||||||
|
|
||||||
public List<ActionHandler<? extends ActionRequest, ? extends ActionResponse>> getActions() {
|
public List<ActionHandler<? extends ActionRequest, ? extends ActionResponse>> getActions() {
|
||||||
|
@ -62,6 +62,7 @@ public abstract class ShardFollowNodeTask extends AllocatedPersistentTask {
|
|||||||
private final BiConsumer<TimeValue, Runnable> scheduler;
|
private final BiConsumer<TimeValue, Runnable> scheduler;
|
||||||
private final LongSupplier relativeTimeProvider;
|
private final LongSupplier relativeTimeProvider;
|
||||||
|
|
||||||
|
private String followerHistoryUUID;
|
||||||
private long leaderGlobalCheckpoint;
|
private long leaderGlobalCheckpoint;
|
||||||
private long leaderMaxSeqNo;
|
private long leaderMaxSeqNo;
|
||||||
private long leaderMaxSeqNoOfUpdatesOrDeletes = SequenceNumbers.UNASSIGNED_SEQ_NO;
|
private long leaderMaxSeqNoOfUpdatesOrDeletes = SequenceNumbers.UNASSIGNED_SEQ_NO;
|
||||||
@ -110,15 +111,17 @@ public abstract class ShardFollowNodeTask extends AllocatedPersistentTask {
|
|||||||
}
|
}
|
||||||
|
|
||||||
void start(
|
void start(
|
||||||
final long leaderGlobalCheckpoint,
|
final String followerHistoryUUID,
|
||||||
final long leaderMaxSeqNo,
|
final long leaderGlobalCheckpoint,
|
||||||
final long followerGlobalCheckpoint,
|
final long leaderMaxSeqNo,
|
||||||
final long followerMaxSeqNo) {
|
final long followerGlobalCheckpoint,
|
||||||
|
final long followerMaxSeqNo) {
|
||||||
/*
|
/*
|
||||||
* While this should only ever be called once and before any other threads can touch these fields, we use synchronization here to
|
* While this should only ever be called once and before any other threads can touch these fields, we use synchronization here to
|
||||||
* avoid the need to declare these fields as volatile. That is, we are ensuring thesefields are always accessed under the same lock.
|
* avoid the need to declare these fields as volatile. That is, we are ensuring thesefields are always accessed under the same lock.
|
||||||
*/
|
*/
|
||||||
synchronized (this) {
|
synchronized (this) {
|
||||||
|
this.followerHistoryUUID = followerHistoryUUID;
|
||||||
this.leaderGlobalCheckpoint = leaderGlobalCheckpoint;
|
this.leaderGlobalCheckpoint = leaderGlobalCheckpoint;
|
||||||
this.leaderMaxSeqNo = leaderMaxSeqNo;
|
this.leaderMaxSeqNo = leaderMaxSeqNo;
|
||||||
this.followerGlobalCheckpoint = followerGlobalCheckpoint;
|
this.followerGlobalCheckpoint = followerGlobalCheckpoint;
|
||||||
@ -305,7 +308,7 @@ public abstract class ShardFollowNodeTask extends AllocatedPersistentTask {
|
|||||||
AtomicInteger retryCounter) {
|
AtomicInteger retryCounter) {
|
||||||
assert leaderMaxSeqNoOfUpdatesOrDeletes != SequenceNumbers.UNASSIGNED_SEQ_NO : "mus is not replicated";
|
assert leaderMaxSeqNoOfUpdatesOrDeletes != SequenceNumbers.UNASSIGNED_SEQ_NO : "mus is not replicated";
|
||||||
final long startTime = relativeTimeProvider.getAsLong();
|
final long startTime = relativeTimeProvider.getAsLong();
|
||||||
innerSendBulkShardOperationsRequest(operations, leaderMaxSeqNoOfUpdatesOrDeletes,
|
innerSendBulkShardOperationsRequest(followerHistoryUUID, operations, leaderMaxSeqNoOfUpdatesOrDeletes,
|
||||||
response -> {
|
response -> {
|
||||||
synchronized (ShardFollowNodeTask.this) {
|
synchronized (ShardFollowNodeTask.this) {
|
||||||
totalIndexTimeMillis += TimeUnit.NANOSECONDS.toMillis(relativeTimeProvider.getAsLong() - startTime);
|
totalIndexTimeMillis += TimeUnit.NANOSECONDS.toMillis(relativeTimeProvider.getAsLong() - startTime);
|
||||||
@ -404,8 +407,11 @@ public abstract class ShardFollowNodeTask extends AllocatedPersistentTask {
|
|||||||
// These methods are protected for testing purposes:
|
// These methods are protected for testing purposes:
|
||||||
protected abstract void innerUpdateMapping(LongConsumer handler, Consumer<Exception> errorHandler);
|
protected abstract void innerUpdateMapping(LongConsumer handler, Consumer<Exception> errorHandler);
|
||||||
|
|
||||||
protected abstract void innerSendBulkShardOperationsRequest(List<Translog.Operation> operations, long leaderMaxSeqNoOfUpdatesOrDeletes,
|
protected abstract void innerSendBulkShardOperationsRequest(String followerHistoryUUID,
|
||||||
Consumer<BulkShardOperationsResponse> handler, Consumer<Exception> errorHandler);
|
List<Translog.Operation> operations,
|
||||||
|
long leaderMaxSeqNoOfUpdatesOrDeletes,
|
||||||
|
Consumer<BulkShardOperationsResponse> handler,
|
||||||
|
Consumer<Exception> errorHandler);
|
||||||
|
|
||||||
protected abstract void innerSendShardChangesRequest(long from, int maxOperationCount, Consumer<ShardChangesAction.Response> handler,
|
protected abstract void innerSendShardChangesRequest(long from, int maxOperationCount, Consumer<ShardChangesAction.Response> handler,
|
||||||
Consumer<Exception> errorHandler);
|
Consumer<Exception> errorHandler);
|
||||||
|
@ -51,13 +51,12 @@ public class ShardFollowTask implements XPackPlugin.XPackPersistentTaskParams {
|
|||||||
public static final ParseField MAX_WRITE_BUFFER_SIZE = new ParseField("max_write_buffer_size");
|
public static final ParseField MAX_WRITE_BUFFER_SIZE = new ParseField("max_write_buffer_size");
|
||||||
public static final ParseField MAX_RETRY_DELAY = new ParseField("max_retry_delay");
|
public static final ParseField MAX_RETRY_DELAY = new ParseField("max_retry_delay");
|
||||||
public static final ParseField POLL_TIMEOUT = new ParseField("poll_timeout");
|
public static final ParseField POLL_TIMEOUT = new ParseField("poll_timeout");
|
||||||
public static final ParseField RECORDED_HISTORY_UUID = new ParseField("recorded_history_uuid");
|
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
private static ConstructingObjectParser<ShardFollowTask, Void> PARSER = new ConstructingObjectParser<>(NAME,
|
private static ConstructingObjectParser<ShardFollowTask, Void> PARSER = new ConstructingObjectParser<>(NAME,
|
||||||
(a) -> new ShardFollowTask((String) a[0], new ShardId((String) a[1], (String) a[2], (int) a[3]),
|
(a) -> new ShardFollowTask((String) a[0], new ShardId((String) a[1], (String) a[2], (int) a[3]),
|
||||||
new ShardId((String) a[4], (String) a[5], (int) a[6]), (int) a[7], (int) a[8], (ByteSizeValue) a[9],
|
new ShardId((String) a[4], (String) a[5], (int) a[6]), (int) a[7], (int) a[8], (ByteSizeValue) a[9],
|
||||||
(int) a[10], (int) a[11], (TimeValue) a[12], (TimeValue) a[13], (String) a[14], (Map<String, String>) a[15]));
|
(int) a[10], (int) a[11], (TimeValue) a[12], (TimeValue) a[13], (Map<String, String>) a[14]));
|
||||||
|
|
||||||
static {
|
static {
|
||||||
PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), LEADER_CLUSTER_ALIAS_FIELD);
|
PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), LEADER_CLUSTER_ALIAS_FIELD);
|
||||||
@ -82,7 +81,6 @@ public class ShardFollowTask implements XPackPlugin.XPackPersistentTaskParams {
|
|||||||
PARSER.declareField(ConstructingObjectParser.constructorArg(),
|
PARSER.declareField(ConstructingObjectParser.constructorArg(),
|
||||||
(p, c) -> TimeValue.parseTimeValue(p.text(), POLL_TIMEOUT.getPreferredName()),
|
(p, c) -> TimeValue.parseTimeValue(p.text(), POLL_TIMEOUT.getPreferredName()),
|
||||||
POLL_TIMEOUT, ObjectParser.ValueType.STRING);
|
POLL_TIMEOUT, ObjectParser.ValueType.STRING);
|
||||||
PARSER.declareString(ConstructingObjectParser.constructorArg(), RECORDED_HISTORY_UUID);
|
|
||||||
PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> p.mapStrings(), HEADERS);
|
PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> p.mapStrings(), HEADERS);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -96,7 +94,6 @@ public class ShardFollowTask implements XPackPlugin.XPackPersistentTaskParams {
|
|||||||
private final int maxWriteBufferSize;
|
private final int maxWriteBufferSize;
|
||||||
private final TimeValue maxRetryDelay;
|
private final TimeValue maxRetryDelay;
|
||||||
private final TimeValue pollTimeout;
|
private final TimeValue pollTimeout;
|
||||||
private final String recordedLeaderIndexHistoryUUID;
|
|
||||||
private final Map<String, String> headers;
|
private final Map<String, String> headers;
|
||||||
|
|
||||||
ShardFollowTask(
|
ShardFollowTask(
|
||||||
@ -110,7 +107,6 @@ public class ShardFollowTask implements XPackPlugin.XPackPersistentTaskParams {
|
|||||||
final int maxWriteBufferSize,
|
final int maxWriteBufferSize,
|
||||||
final TimeValue maxRetryDelay,
|
final TimeValue maxRetryDelay,
|
||||||
final TimeValue pollTimeout,
|
final TimeValue pollTimeout,
|
||||||
final String recordedLeaderIndexHistoryUUID,
|
|
||||||
final Map<String, String> headers) {
|
final Map<String, String> headers) {
|
||||||
this.leaderClusterAlias = leaderClusterAlias;
|
this.leaderClusterAlias = leaderClusterAlias;
|
||||||
this.followShardId = followShardId;
|
this.followShardId = followShardId;
|
||||||
@ -122,7 +118,6 @@ public class ShardFollowTask implements XPackPlugin.XPackPersistentTaskParams {
|
|||||||
this.maxWriteBufferSize = maxWriteBufferSize;
|
this.maxWriteBufferSize = maxWriteBufferSize;
|
||||||
this.maxRetryDelay = maxRetryDelay;
|
this.maxRetryDelay = maxRetryDelay;
|
||||||
this.pollTimeout = pollTimeout;
|
this.pollTimeout = pollTimeout;
|
||||||
this.recordedLeaderIndexHistoryUUID = recordedLeaderIndexHistoryUUID;
|
|
||||||
this.headers = headers != null ? Collections.unmodifiableMap(headers) : Collections.emptyMap();
|
this.headers = headers != null ? Collections.unmodifiableMap(headers) : Collections.emptyMap();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -137,7 +132,6 @@ public class ShardFollowTask implements XPackPlugin.XPackPersistentTaskParams {
|
|||||||
this.maxWriteBufferSize = in.readVInt();
|
this.maxWriteBufferSize = in.readVInt();
|
||||||
this.maxRetryDelay = in.readTimeValue();
|
this.maxRetryDelay = in.readTimeValue();
|
||||||
this.pollTimeout = in.readTimeValue();
|
this.pollTimeout = in.readTimeValue();
|
||||||
this.recordedLeaderIndexHistoryUUID = in.readString();
|
|
||||||
this.headers = Collections.unmodifiableMap(in.readMap(StreamInput::readString, StreamInput::readString));
|
this.headers = Collections.unmodifiableMap(in.readMap(StreamInput::readString, StreamInput::readString));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -185,10 +179,6 @@ public class ShardFollowTask implements XPackPlugin.XPackPersistentTaskParams {
|
|||||||
return followShardId.getIndex().getUUID() + "-" + followShardId.getId();
|
return followShardId.getIndex().getUUID() + "-" + followShardId.getId();
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getRecordedLeaderIndexHistoryUUID() {
|
|
||||||
return recordedLeaderIndexHistoryUUID;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Map<String, String> getHeaders() {
|
public Map<String, String> getHeaders() {
|
||||||
return headers;
|
return headers;
|
||||||
}
|
}
|
||||||
@ -210,7 +200,6 @@ public class ShardFollowTask implements XPackPlugin.XPackPersistentTaskParams {
|
|||||||
out.writeVInt(maxWriteBufferSize);
|
out.writeVInt(maxWriteBufferSize);
|
||||||
out.writeTimeValue(maxRetryDelay);
|
out.writeTimeValue(maxRetryDelay);
|
||||||
out.writeTimeValue(pollTimeout);
|
out.writeTimeValue(pollTimeout);
|
||||||
out.writeString(recordedLeaderIndexHistoryUUID);
|
|
||||||
out.writeMap(headers, StreamOutput::writeString, StreamOutput::writeString);
|
out.writeMap(headers, StreamOutput::writeString, StreamOutput::writeString);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -237,7 +226,6 @@ public class ShardFollowTask implements XPackPlugin.XPackPersistentTaskParams {
|
|||||||
builder.field(MAX_WRITE_BUFFER_SIZE.getPreferredName(), maxWriteBufferSize);
|
builder.field(MAX_WRITE_BUFFER_SIZE.getPreferredName(), maxWriteBufferSize);
|
||||||
builder.field(MAX_RETRY_DELAY.getPreferredName(), maxRetryDelay.getStringRep());
|
builder.field(MAX_RETRY_DELAY.getPreferredName(), maxRetryDelay.getStringRep());
|
||||||
builder.field(POLL_TIMEOUT.getPreferredName(), pollTimeout.getStringRep());
|
builder.field(POLL_TIMEOUT.getPreferredName(), pollTimeout.getStringRep());
|
||||||
builder.field(RECORDED_HISTORY_UUID.getPreferredName(), recordedLeaderIndexHistoryUUID);
|
|
||||||
builder.field(HEADERS.getPreferredName(), headers);
|
builder.field(HEADERS.getPreferredName(), headers);
|
||||||
return builder.endObject();
|
return builder.endObject();
|
||||||
}
|
}
|
||||||
@ -257,7 +245,6 @@ public class ShardFollowTask implements XPackPlugin.XPackPersistentTaskParams {
|
|||||||
maxWriteBufferSize == that.maxWriteBufferSize &&
|
maxWriteBufferSize == that.maxWriteBufferSize &&
|
||||||
Objects.equals(maxRetryDelay, that.maxRetryDelay) &&
|
Objects.equals(maxRetryDelay, that.maxRetryDelay) &&
|
||||||
Objects.equals(pollTimeout, that.pollTimeout) &&
|
Objects.equals(pollTimeout, that.pollTimeout) &&
|
||||||
Objects.equals(recordedLeaderIndexHistoryUUID, that.recordedLeaderIndexHistoryUUID) &&
|
|
||||||
Objects.equals(headers, that.headers);
|
Objects.equals(headers, that.headers);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -274,8 +261,8 @@ public class ShardFollowTask implements XPackPlugin.XPackPersistentTaskParams {
|
|||||||
maxWriteBufferSize,
|
maxWriteBufferSize,
|
||||||
maxRetryDelay,
|
maxRetryDelay,
|
||||||
pollTimeout,
|
pollTimeout,
|
||||||
recordedLeaderIndexHistoryUUID,
|
headers
|
||||||
headers);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
public String toString() {
|
public String toString() {
|
||||||
|
@ -17,12 +17,15 @@ import org.elasticsearch.cluster.ClusterState;
|
|||||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||||
import org.elasticsearch.cluster.metadata.MappingMetaData;
|
import org.elasticsearch.cluster.metadata.MappingMetaData;
|
||||||
import org.elasticsearch.cluster.routing.IndexRoutingTable;
|
import org.elasticsearch.cluster.routing.IndexRoutingTable;
|
||||||
|
import org.elasticsearch.cluster.service.ClusterService;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.common.unit.TimeValue;
|
import org.elasticsearch.common.unit.TimeValue;
|
||||||
import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
|
import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
|
||||||
import org.elasticsearch.common.xcontent.XContentType;
|
import org.elasticsearch.common.xcontent.XContentType;
|
||||||
import org.elasticsearch.index.Index;
|
import org.elasticsearch.index.Index;
|
||||||
import org.elasticsearch.index.IndexNotFoundException;
|
import org.elasticsearch.index.IndexNotFoundException;
|
||||||
|
import org.elasticsearch.index.engine.CommitStats;
|
||||||
|
import org.elasticsearch.index.engine.Engine;
|
||||||
import org.elasticsearch.index.seqno.SeqNoStats;
|
import org.elasticsearch.index.seqno.SeqNoStats;
|
||||||
import org.elasticsearch.index.shard.ShardId;
|
import org.elasticsearch.index.shard.ShardId;
|
||||||
import org.elasticsearch.index.shard.ShardNotFoundException;
|
import org.elasticsearch.index.shard.ShardNotFoundException;
|
||||||
@ -47,16 +50,19 @@ import java.util.function.Consumer;
|
|||||||
import java.util.function.LongConsumer;
|
import java.util.function.LongConsumer;
|
||||||
|
|
||||||
import static org.elasticsearch.xpack.ccr.CcrLicenseChecker.wrapClient;
|
import static org.elasticsearch.xpack.ccr.CcrLicenseChecker.wrapClient;
|
||||||
|
import static org.elasticsearch.xpack.ccr.action.TransportResumeFollowAction.extractLeaderShardHistoryUUIDs;
|
||||||
|
|
||||||
public class ShardFollowTasksExecutor extends PersistentTasksExecutor<ShardFollowTask> {
|
public class ShardFollowTasksExecutor extends PersistentTasksExecutor<ShardFollowTask> {
|
||||||
|
|
||||||
private final Client client;
|
private final Client client;
|
||||||
private final ThreadPool threadPool;
|
private final ThreadPool threadPool;
|
||||||
|
private final ClusterService clusterService;
|
||||||
|
|
||||||
public ShardFollowTasksExecutor(Settings settings, Client client, ThreadPool threadPool) {
|
public ShardFollowTasksExecutor(Settings settings, Client client, ThreadPool threadPool, ClusterService clusterService) {
|
||||||
super(settings, ShardFollowTask.NAME, Ccr.CCR_THREAD_POOL_NAME);
|
super(settings, ShardFollowTask.NAME, Ccr.CCR_THREAD_POOL_NAME);
|
||||||
this.client = client;
|
this.client = client;
|
||||||
this.threadPool = threadPool;
|
this.threadPool = threadPool;
|
||||||
|
this.clusterService = clusterService;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -99,8 +105,10 @@ public class ShardFollowTasksExecutor extends PersistentTasksExecutor<ShardFollo
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
return new ShardFollowNodeTask(
|
|
||||||
id, type, action, getDescription(taskInProgress), parentTaskId, headers, params, scheduler, System::nanoTime) {
|
final String recordedLeaderShardHistoryUUID = getLeaderShardHistoryUUID(params);
|
||||||
|
return new ShardFollowNodeTask(id, type, action, getDescription(taskInProgress), parentTaskId, headers, params,
|
||||||
|
scheduler, System::nanoTime) {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void innerUpdateMapping(LongConsumer handler, Consumer<Exception> errorHandler) {
|
protected void innerUpdateMapping(LongConsumer handler, Consumer<Exception> errorHandler) {
|
||||||
@ -135,12 +143,14 @@ public class ShardFollowTasksExecutor extends PersistentTasksExecutor<ShardFollo
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void innerSendBulkShardOperationsRequest(
|
protected void innerSendBulkShardOperationsRequest(
|
||||||
final List<Translog.Operation> operations,
|
final String followerHistoryUUID,
|
||||||
final long maxSeqNoOfUpdatesOrDeletes,
|
final List<Translog.Operation> operations,
|
||||||
final Consumer<BulkShardOperationsResponse> handler,
|
final long maxSeqNoOfUpdatesOrDeletes,
|
||||||
final Consumer<Exception> errorHandler) {
|
final Consumer<BulkShardOperationsResponse> handler,
|
||||||
final BulkShardOperationsRequest request = new BulkShardOperationsRequest(
|
final Consumer<Exception> errorHandler) {
|
||||||
params.getFollowShardId(), operations, maxSeqNoOfUpdatesOrDeletes);
|
|
||||||
|
final BulkShardOperationsRequest request = new BulkShardOperationsRequest(params.getFollowShardId(),
|
||||||
|
followerHistoryUUID, operations, maxSeqNoOfUpdatesOrDeletes);
|
||||||
followerClient.execute(BulkShardOperationsAction.INSTANCE, request,
|
followerClient.execute(BulkShardOperationsAction.INSTANCE, request,
|
||||||
ActionListener.wrap(response -> handler.accept(response), errorHandler));
|
ActionListener.wrap(response -> handler.accept(response), errorHandler));
|
||||||
}
|
}
|
||||||
@ -149,7 +159,7 @@ public class ShardFollowTasksExecutor extends PersistentTasksExecutor<ShardFollo
|
|||||||
protected void innerSendShardChangesRequest(long from, int maxOperationCount, Consumer<ShardChangesAction.Response> handler,
|
protected void innerSendShardChangesRequest(long from, int maxOperationCount, Consumer<ShardChangesAction.Response> handler,
|
||||||
Consumer<Exception> errorHandler) {
|
Consumer<Exception> errorHandler) {
|
||||||
ShardChangesAction.Request request =
|
ShardChangesAction.Request request =
|
||||||
new ShardChangesAction.Request(params.getLeaderShardId(), params.getRecordedLeaderIndexHistoryUUID());
|
new ShardChangesAction.Request(params.getLeaderShardId(), recordedLeaderShardHistoryUUID);
|
||||||
request.setFromSeqNo(from);
|
request.setFromSeqNo(from);
|
||||||
request.setMaxOperationCount(maxOperationCount);
|
request.setMaxOperationCount(maxOperationCount);
|
||||||
request.setMaxBatchSize(params.getMaxBatchSize());
|
request.setMaxBatchSize(params.getMaxBatchSize());
|
||||||
@ -159,8 +169,15 @@ public class ShardFollowTasksExecutor extends PersistentTasksExecutor<ShardFollo
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
interface BiLongConsumer {
|
private String getLeaderShardHistoryUUID(ShardFollowTask params) {
|
||||||
void accept(long x, long y);
|
IndexMetaData followIndexMetaData = clusterService.state().metaData().index(params.getFollowShardId().getIndex());
|
||||||
|
Map<String, String> ccrIndexMetadata = followIndexMetaData.getCustomData(Ccr.CCR_CUSTOM_METADATA_KEY);
|
||||||
|
String[] recordedLeaderShardHistoryUUIDs = extractLeaderShardHistoryUUIDs(ccrIndexMetadata);
|
||||||
|
return recordedLeaderShardHistoryUUIDs[params.getLeaderShardId().id()];
|
||||||
|
}
|
||||||
|
|
||||||
|
interface FollowerStatsInfoHandler {
|
||||||
|
void accept(String followerHistoryUUID, long globalCheckpoint, long maxSeqNo);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -169,7 +186,9 @@ public class ShardFollowTasksExecutor extends PersistentTasksExecutor<ShardFollo
|
|||||||
ShardFollowNodeTask shardFollowNodeTask = (ShardFollowNodeTask) task;
|
ShardFollowNodeTask shardFollowNodeTask = (ShardFollowNodeTask) task;
|
||||||
logger.info("{} Starting to track leader shard {}", params.getFollowShardId(), params.getLeaderShardId());
|
logger.info("{} Starting to track leader shard {}", params.getFollowShardId(), params.getLeaderShardId());
|
||||||
|
|
||||||
BiLongConsumer handler = (followerGCP, maxSeqNo) -> shardFollowNodeTask.start(followerGCP, maxSeqNo, followerGCP, maxSeqNo);
|
FollowerStatsInfoHandler handler = (followerHistoryUUID, followerGCP, maxSeqNo) -> {
|
||||||
|
shardFollowNodeTask.start(followerHistoryUUID, followerGCP, maxSeqNo, followerGCP, maxSeqNo);
|
||||||
|
};
|
||||||
Consumer<Exception> errorHandler = e -> {
|
Consumer<Exception> errorHandler = e -> {
|
||||||
if (shardFollowNodeTask.isStopped()) {
|
if (shardFollowNodeTask.isStopped()) {
|
||||||
return;
|
return;
|
||||||
@ -184,13 +203,13 @@ public class ShardFollowTasksExecutor extends PersistentTasksExecutor<ShardFollo
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
fetchGlobalCheckpoint(followerClient, params.getFollowShardId(), handler, errorHandler);
|
fetchFollowerShardInfo(followerClient, params.getFollowShardId(), handler, errorHandler);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void fetchGlobalCheckpoint(
|
private void fetchFollowerShardInfo(
|
||||||
final Client client,
|
final Client client,
|
||||||
final ShardId shardId,
|
final ShardId shardId,
|
||||||
final BiLongConsumer handler,
|
final FollowerStatsInfoHandler handler,
|
||||||
final Consumer<Exception> errorHandler) {
|
final Consumer<Exception> errorHandler) {
|
||||||
client.admin().indices().stats(new IndicesStatsRequest().indices(shardId.getIndexName()), ActionListener.wrap(r -> {
|
client.admin().indices().stats(new IndicesStatsRequest().indices(shardId.getIndexName()), ActionListener.wrap(r -> {
|
||||||
IndexStats indexStats = r.getIndex(shardId.getIndexName());
|
IndexStats indexStats = r.getIndex(shardId.getIndexName());
|
||||||
@ -204,10 +223,14 @@ public class ShardFollowTasksExecutor extends PersistentTasksExecutor<ShardFollo
|
|||||||
.filter(shardStats -> shardStats.getShardRouting().primary())
|
.filter(shardStats -> shardStats.getShardRouting().primary())
|
||||||
.findAny();
|
.findAny();
|
||||||
if (filteredShardStats.isPresent()) {
|
if (filteredShardStats.isPresent()) {
|
||||||
final SeqNoStats seqNoStats = filteredShardStats.get().getSeqNoStats();
|
final ShardStats shardStats = filteredShardStats.get();
|
||||||
|
final CommitStats commitStats = shardStats.getCommitStats();
|
||||||
|
final String historyUUID = commitStats.getUserData().get(Engine.HISTORY_UUID_KEY);
|
||||||
|
|
||||||
|
final SeqNoStats seqNoStats = shardStats.getSeqNoStats();
|
||||||
final long globalCheckpoint = seqNoStats.getGlobalCheckpoint();
|
final long globalCheckpoint = seqNoStats.getGlobalCheckpoint();
|
||||||
final long maxSeqNo = seqNoStats.getMaxSeqNo();
|
final long maxSeqNo = seqNoStats.getMaxSeqNo();
|
||||||
handler.accept(globalCheckpoint, maxSeqNo);
|
handler.accept(historyUUID, globalCheckpoint, maxSeqNo);
|
||||||
} else {
|
} else {
|
||||||
errorHandler.accept(new ShardNotFoundException(shardId));
|
errorHandler.accept(new ShardNotFoundException(shardId));
|
||||||
}
|
}
|
||||||
|
@ -174,7 +174,7 @@ public final class TransportPutFollowAction
|
|||||||
listener::onFailure);
|
listener::onFailure);
|
||||||
// Can't use create index api here, because then index templates can alter the mappings / settings.
|
// Can't use create index api here, because then index templates can alter the mappings / settings.
|
||||||
// And index templates could introduce settings / mappings that are incompatible with the leader index.
|
// And index templates could introduce settings / mappings that are incompatible with the leader index.
|
||||||
clusterService.submitStateUpdateTask("follow_index_action", new AckedClusterStateUpdateTask<Boolean>(request, handler) {
|
clusterService.submitStateUpdateTask("create_following_index", new AckedClusterStateUpdateTask<Boolean>(request, handler) {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Boolean newResponse(final boolean acknowledged) {
|
protected Boolean newResponse(final boolean acknowledged) {
|
||||||
|
@ -192,12 +192,9 @@ public class TransportResumeFollowAction extends HandledTransportAction<ResumeFo
|
|||||||
for (int i = 0; i < numShards; i++) {
|
for (int i = 0; i < numShards; i++) {
|
||||||
final int shardId = i;
|
final int shardId = i;
|
||||||
String taskId = followIndexMetadata.getIndexUUID() + "-" + shardId;
|
String taskId = followIndexMetadata.getIndexUUID() + "-" + shardId;
|
||||||
Map<String, String> ccrIndexMetadata = followIndexMetadata.getCustomData(Ccr.CCR_CUSTOM_METADATA_KEY);
|
|
||||||
String[] recordedLeaderShardHistoryUUIDs = extractIndexShardHistoryUUIDs(ccrIndexMetadata);
|
|
||||||
String recordedLeaderShardHistoryUUID = recordedLeaderShardHistoryUUIDs[shardId];
|
|
||||||
|
|
||||||
final ShardFollowTask shardFollowTask = createShardFollowTask(shardId, clusterNameAlias, request,
|
final ShardFollowTask shardFollowTask = createShardFollowTask(shardId, clusterNameAlias, request,
|
||||||
leaderIndexMetadata, followIndexMetadata, recordedLeaderShardHistoryUUID, filteredHeaders);
|
leaderIndexMetadata, followIndexMetadata, filteredHeaders);
|
||||||
persistentTasksService.sendStartRequest(taskId, ShardFollowTask.NAME, shardFollowTask,
|
persistentTasksService.sendStartRequest(taskId, ShardFollowTask.NAME, shardFollowTask,
|
||||||
new ActionListener<PersistentTasksCustomMetaData.PersistentTask<ShardFollowTask>>() {
|
new ActionListener<PersistentTasksCustomMetaData.PersistentTask<ShardFollowTask>>() {
|
||||||
@Override
|
@Override
|
||||||
@ -263,7 +260,7 @@ public class TransportResumeFollowAction extends HandledTransportAction<ResumeFo
|
|||||||
"] as leader index but instead reference [" + recordedLeaderIndexUUID + "] as leader index");
|
"] as leader index but instead reference [" + recordedLeaderIndexUUID + "] as leader index");
|
||||||
}
|
}
|
||||||
|
|
||||||
String[] recordedHistoryUUIDs = extractIndexShardHistoryUUIDs(ccrIndexMetadata);
|
String[] recordedHistoryUUIDs = extractLeaderShardHistoryUUIDs(ccrIndexMetadata);
|
||||||
assert recordedHistoryUUIDs.length == leaderIndexHistoryUUID.length;
|
assert recordedHistoryUUIDs.length == leaderIndexHistoryUUID.length;
|
||||||
for (int i = 0; i < leaderIndexHistoryUUID.length; i++) {
|
for (int i = 0; i < leaderIndexHistoryUUID.length; i++) {
|
||||||
String recordedLeaderIndexHistoryUUID = recordedHistoryUUIDs[i];
|
String recordedLeaderIndexHistoryUUID = recordedHistoryUUIDs[i];
|
||||||
@ -311,7 +308,6 @@ public class TransportResumeFollowAction extends HandledTransportAction<ResumeFo
|
|||||||
ResumeFollowAction.Request request,
|
ResumeFollowAction.Request request,
|
||||||
IndexMetaData leaderIndexMetadata,
|
IndexMetaData leaderIndexMetadata,
|
||||||
IndexMetaData followIndexMetadata,
|
IndexMetaData followIndexMetadata,
|
||||||
String recordedLeaderShardHistoryUUID,
|
|
||||||
Map<String, String> filteredHeaders
|
Map<String, String> filteredHeaders
|
||||||
) {
|
) {
|
||||||
int maxBatchOperationCount;
|
int maxBatchOperationCount;
|
||||||
@ -363,13 +359,16 @@ public class TransportResumeFollowAction extends HandledTransportAction<ResumeFo
|
|||||||
maxWriteBufferSize,
|
maxWriteBufferSize,
|
||||||
maxRetryDelay,
|
maxRetryDelay,
|
||||||
pollTimeout,
|
pollTimeout,
|
||||||
recordedLeaderShardHistoryUUID,
|
|
||||||
filteredHeaders
|
filteredHeaders
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static String[] extractIndexShardHistoryUUIDs(Map<String, String> ccrIndexMetaData) {
|
static String[] extractLeaderShardHistoryUUIDs(Map<String, String> ccrIndexMetaData) {
|
||||||
String historyUUIDs = ccrIndexMetaData.get(Ccr.CCR_CUSTOM_METADATA_LEADER_INDEX_SHARD_HISTORY_UUIDS);
|
String historyUUIDs = ccrIndexMetaData.get(Ccr.CCR_CUSTOM_METADATA_LEADER_INDEX_SHARD_HISTORY_UUIDS);
|
||||||
|
if (historyUUIDs == null) {
|
||||||
|
throw new IllegalArgumentException("leader index shard UUIDs are missing");
|
||||||
|
}
|
||||||
|
|
||||||
return historyUUIDs.split(",");
|
return historyUUIDs.split(",");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -16,19 +16,28 @@ import java.util.List;
|
|||||||
|
|
||||||
public final class BulkShardOperationsRequest extends ReplicatedWriteRequest<BulkShardOperationsRequest> {
|
public final class BulkShardOperationsRequest extends ReplicatedWriteRequest<BulkShardOperationsRequest> {
|
||||||
|
|
||||||
|
private String historyUUID;
|
||||||
private List<Translog.Operation> operations;
|
private List<Translog.Operation> operations;
|
||||||
private long maxSeqNoOfUpdatesOrDeletes;
|
private long maxSeqNoOfUpdatesOrDeletes;
|
||||||
|
|
||||||
public BulkShardOperationsRequest() {
|
public BulkShardOperationsRequest() {
|
||||||
}
|
}
|
||||||
|
|
||||||
public BulkShardOperationsRequest(ShardId shardId, List<Translog.Operation> operations, long maxSeqNoOfUpdatesOrDeletes) {
|
public BulkShardOperationsRequest(final ShardId shardId,
|
||||||
|
final String historyUUID,
|
||||||
|
final List<Translog.Operation> operations,
|
||||||
|
long maxSeqNoOfUpdatesOrDeletes) {
|
||||||
super(shardId);
|
super(shardId);
|
||||||
setRefreshPolicy(RefreshPolicy.NONE);
|
setRefreshPolicy(RefreshPolicy.NONE);
|
||||||
|
this.historyUUID = historyUUID;
|
||||||
this.operations = operations;
|
this.operations = operations;
|
||||||
this.maxSeqNoOfUpdatesOrDeletes = maxSeqNoOfUpdatesOrDeletes;
|
this.maxSeqNoOfUpdatesOrDeletes = maxSeqNoOfUpdatesOrDeletes;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public String getHistoryUUID() {
|
||||||
|
return historyUUID;
|
||||||
|
}
|
||||||
|
|
||||||
public List<Translog.Operation> getOperations() {
|
public List<Translog.Operation> getOperations() {
|
||||||
return operations;
|
return operations;
|
||||||
}
|
}
|
||||||
@ -40,6 +49,7 @@ public final class BulkShardOperationsRequest extends ReplicatedWriteRequest<Bul
|
|||||||
@Override
|
@Override
|
||||||
public void readFrom(final StreamInput in) throws IOException {
|
public void readFrom(final StreamInput in) throws IOException {
|
||||||
super.readFrom(in);
|
super.readFrom(in);
|
||||||
|
historyUUID = in.readString();
|
||||||
maxSeqNoOfUpdatesOrDeletes = in.readZLong();
|
maxSeqNoOfUpdatesOrDeletes = in.readZLong();
|
||||||
operations = in.readList(Translog.Operation::readOperation);
|
operations = in.readList(Translog.Operation::readOperation);
|
||||||
}
|
}
|
||||||
@ -47,6 +57,7 @@ public final class BulkShardOperationsRequest extends ReplicatedWriteRequest<Bul
|
|||||||
@Override
|
@Override
|
||||||
public void writeTo(final StreamOutput out) throws IOException {
|
public void writeTo(final StreamOutput out) throws IOException {
|
||||||
super.writeTo(out);
|
super.writeTo(out);
|
||||||
|
out.writeString(historyUUID);
|
||||||
out.writeZLong(maxSeqNoOfUpdatesOrDeletes);
|
out.writeZLong(maxSeqNoOfUpdatesOrDeletes);
|
||||||
out.writeVInt(operations.size());
|
out.writeVInt(operations.size());
|
||||||
for (Translog.Operation operation : operations) {
|
for (Translog.Operation operation : operations) {
|
||||||
@ -57,7 +68,8 @@ public final class BulkShardOperationsRequest extends ReplicatedWriteRequest<Bul
|
|||||||
@Override
|
@Override
|
||||||
public String toString() {
|
public String toString() {
|
||||||
return "BulkShardOperationsRequest{" +
|
return "BulkShardOperationsRequest{" +
|
||||||
"operations=" + operations.size()+
|
"historyUUID=" + historyUUID +
|
||||||
|
", operations=" + operations.size() +
|
||||||
", maxSeqNoUpdates=" + maxSeqNoOfUpdatesOrDeletes +
|
", maxSeqNoUpdates=" + maxSeqNoOfUpdatesOrDeletes +
|
||||||
", shardId=" + shardId +
|
", shardId=" + shardId +
|
||||||
", timeout=" + timeout +
|
", timeout=" + timeout +
|
||||||
|
@ -61,17 +61,23 @@ public class TransportBulkShardOperationsAction
|
|||||||
@Override
|
@Override
|
||||||
protected WritePrimaryResult<BulkShardOperationsRequest, BulkShardOperationsResponse> shardOperationOnPrimary(
|
protected WritePrimaryResult<BulkShardOperationsRequest, BulkShardOperationsResponse> shardOperationOnPrimary(
|
||||||
final BulkShardOperationsRequest request, final IndexShard primary) throws Exception {
|
final BulkShardOperationsRequest request, final IndexShard primary) throws Exception {
|
||||||
return shardOperationOnPrimary(
|
return shardOperationOnPrimary(request.shardId(), request.getHistoryUUID(), request.getOperations(),
|
||||||
request.shardId(), request.getOperations(), request.getMaxSeqNoOfUpdatesOrDeletes(), primary, logger);
|
request.getMaxSeqNoOfUpdatesOrDeletes(), primary, logger);
|
||||||
}
|
}
|
||||||
|
|
||||||
// public for testing purposes only
|
// public for testing purposes only
|
||||||
public static WritePrimaryResult<BulkShardOperationsRequest, BulkShardOperationsResponse> shardOperationOnPrimary(
|
public static WritePrimaryResult<BulkShardOperationsRequest, BulkShardOperationsResponse> shardOperationOnPrimary(
|
||||||
final ShardId shardId,
|
final ShardId shardId,
|
||||||
|
final String historyUUID,
|
||||||
final List<Translog.Operation> sourceOperations,
|
final List<Translog.Operation> sourceOperations,
|
||||||
final long maxSeqNoOfUpdatesOrDeletes,
|
final long maxSeqNoOfUpdatesOrDeletes,
|
||||||
final IndexShard primary,
|
final IndexShard primary,
|
||||||
final Logger logger) throws IOException {
|
final Logger logger) throws IOException {
|
||||||
|
if (historyUUID.equalsIgnoreCase(primary.getHistoryUUID()) == false) {
|
||||||
|
throw new IllegalStateException("unexpected history uuid, expected [" + historyUUID +
|
||||||
|
"], actual [" + primary.getHistoryUUID() + "], shard is likely restored from snapshot or force allocated");
|
||||||
|
}
|
||||||
|
|
||||||
final List<Translog.Operation> targetOperations = sourceOperations.stream().map(operation -> {
|
final List<Translog.Operation> targetOperations = sourceOperations.stream().map(operation -> {
|
||||||
final Translog.Operation operationWithPrimaryTerm;
|
final Translog.Operation operationWithPrimaryTerm;
|
||||||
switch (operation.opType()) {
|
switch (operation.opType()) {
|
||||||
@ -110,7 +116,7 @@ public class TransportBulkShardOperationsAction
|
|||||||
primary.advanceMaxSeqNoOfUpdatesOrDeletes(maxSeqNoOfUpdatesOrDeletes);
|
primary.advanceMaxSeqNoOfUpdatesOrDeletes(maxSeqNoOfUpdatesOrDeletes);
|
||||||
final Translog.Location location = applyTranslogOperations(targetOperations, primary, Engine.Operation.Origin.PRIMARY);
|
final Translog.Location location = applyTranslogOperations(targetOperations, primary, Engine.Operation.Origin.PRIMARY);
|
||||||
final BulkShardOperationsRequest replicaRequest = new BulkShardOperationsRequest(
|
final BulkShardOperationsRequest replicaRequest = new BulkShardOperationsRequest(
|
||||||
shardId, targetOperations, maxSeqNoOfUpdatesOrDeletes);
|
shardId, historyUUID, targetOperations, maxSeqNoOfUpdatesOrDeletes);
|
||||||
return new CcrWritePrimaryResult(replicaRequest, location, primary, logger);
|
return new CcrWritePrimaryResult(replicaRequest, location, primary, logger);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -51,7 +51,7 @@ public class ShardFollowNodeTaskRandomTests extends ESTestCase {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private void startAndAssertAndStopTask(ShardFollowNodeTask task, TestRun testRun) throws Exception {
|
private void startAndAssertAndStopTask(ShardFollowNodeTask task, TestRun testRun) throws Exception {
|
||||||
task.start(testRun.startSeqNo - 1, testRun.startSeqNo - 1, testRun.startSeqNo - 1, testRun.startSeqNo - 1);
|
task.start("uuid", testRun.startSeqNo - 1, testRun.startSeqNo - 1, testRun.startSeqNo - 1, testRun.startSeqNo - 1);
|
||||||
assertBusy(() -> {
|
assertBusy(() -> {
|
||||||
ShardFollowNodeTaskStatus status = task.getStatus();
|
ShardFollowNodeTaskStatus status = task.getStatus();
|
||||||
assertThat(status.leaderGlobalCheckpoint(), equalTo(testRun.finalExpectedGlobalCheckpoint));
|
assertThat(status.leaderGlobalCheckpoint(), equalTo(testRun.finalExpectedGlobalCheckpoint));
|
||||||
@ -85,7 +85,6 @@ public class ShardFollowNodeTaskRandomTests extends ESTestCase {
|
|||||||
10240,
|
10240,
|
||||||
TimeValue.timeValueMillis(10),
|
TimeValue.timeValueMillis(10),
|
||||||
TimeValue.timeValueMillis(10),
|
TimeValue.timeValueMillis(10),
|
||||||
"uuid",
|
|
||||||
Collections.emptyMap()
|
Collections.emptyMap()
|
||||||
);
|
);
|
||||||
|
|
||||||
@ -111,10 +110,10 @@ public class ShardFollowNodeTaskRandomTests extends ESTestCase {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void innerSendBulkShardOperationsRequest(
|
protected void innerSendBulkShardOperationsRequest(
|
||||||
List<Translog.Operation> operations,
|
String followerHistoryUUID, List<Translog.Operation> operations,
|
||||||
long maxSeqNoOfUpdates,
|
long maxSeqNoOfUpdates,
|
||||||
Consumer<BulkShardOperationsResponse> handler,
|
Consumer<BulkShardOperationsResponse> handler,
|
||||||
Consumer<Exception> errorHandler) {
|
Consumer<Exception> errorHandler) {
|
||||||
for(Translog.Operation op : operations) {
|
for(Translog.Operation op : operations) {
|
||||||
tracker.markSeqNoAsCompleted(op.seqNo());
|
tracker.markSeqNoAsCompleted(op.seqNo());
|
||||||
}
|
}
|
||||||
|
@ -125,7 +125,7 @@ public class ShardFollowNodeTaskTests extends ESTestCase {
|
|||||||
|
|
||||||
shardChangesRequests.clear();
|
shardChangesRequests.clear();
|
||||||
// The call the updateMapping is a noop, so noting happens.
|
// The call the updateMapping is a noop, so noting happens.
|
||||||
task.start(128L, 128L, task.getStatus().followerGlobalCheckpoint(), task.getStatus().followerMaxSeqNo());
|
task.start("uuid", 128L, 128L, task.getStatus().followerGlobalCheckpoint(), task.getStatus().followerMaxSeqNo());
|
||||||
task.markAsCompleted();
|
task.markAsCompleted();
|
||||||
task.coordinateReads();
|
task.coordinateReads();
|
||||||
assertThat(shardChangesRequests.size(), equalTo(0));
|
assertThat(shardChangesRequests.size(), equalTo(0));
|
||||||
@ -682,7 +682,6 @@ public class ShardFollowNodeTaskTests extends ESTestCase {
|
|||||||
bufferWriteLimit,
|
bufferWriteLimit,
|
||||||
TimeValue.ZERO,
|
TimeValue.ZERO,
|
||||||
TimeValue.ZERO,
|
TimeValue.ZERO,
|
||||||
"uuid",
|
|
||||||
Collections.emptyMap()
|
Collections.emptyMap()
|
||||||
);
|
);
|
||||||
|
|
||||||
@ -715,10 +714,10 @@ public class ShardFollowNodeTaskTests extends ESTestCase {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void innerSendBulkShardOperationsRequest(
|
protected void innerSendBulkShardOperationsRequest(
|
||||||
final List<Translog.Operation> operations,
|
String followerHistoryUUID, final List<Translog.Operation> operations,
|
||||||
final long maxSeqNoOfUpdates,
|
final long maxSeqNoOfUpdates,
|
||||||
final Consumer<BulkShardOperationsResponse> handler,
|
final Consumer<BulkShardOperationsResponse> handler,
|
||||||
final Consumer<Exception> errorHandler) {
|
final Consumer<Exception> errorHandler) {
|
||||||
bulkShardOperationRequests.add(operations);
|
bulkShardOperationRequests.add(operations);
|
||||||
Exception writeFailure = ShardFollowNodeTaskTests.this.writeFailures.poll();
|
Exception writeFailure = ShardFollowNodeTaskTests.this.writeFailures.poll();
|
||||||
if (writeFailure != null) {
|
if (writeFailure != null) {
|
||||||
@ -796,7 +795,7 @@ public class ShardFollowNodeTaskTests extends ESTestCase {
|
|||||||
|
|
||||||
void startTask(ShardFollowNodeTask task, long leaderGlobalCheckpoint, long followerGlobalCheckpoint) {
|
void startTask(ShardFollowNodeTask task, long leaderGlobalCheckpoint, long followerGlobalCheckpoint) {
|
||||||
// The call the updateMapping is a noop, so noting happens.
|
// The call the updateMapping is a noop, so noting happens.
|
||||||
task.start(leaderGlobalCheckpoint, leaderGlobalCheckpoint, followerGlobalCheckpoint, followerGlobalCheckpoint);
|
task.start("uuid", leaderGlobalCheckpoint, leaderGlobalCheckpoint, followerGlobalCheckpoint, followerGlobalCheckpoint);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -63,6 +63,7 @@ public class ShardFollowTaskReplicationTests extends ESIndexLevelReplicationTest
|
|||||||
final SeqNoStats leaderSeqNoStats = leaderGroup.getPrimary().seqNoStats();
|
final SeqNoStats leaderSeqNoStats = leaderGroup.getPrimary().seqNoStats();
|
||||||
final SeqNoStats followerSeqNoStats = followerGroup.getPrimary().seqNoStats();
|
final SeqNoStats followerSeqNoStats = followerGroup.getPrimary().seqNoStats();
|
||||||
shardFollowTask.start(
|
shardFollowTask.start(
|
||||||
|
followerGroup.getPrimary().getHistoryUUID(),
|
||||||
leaderSeqNoStats.getGlobalCheckpoint(),
|
leaderSeqNoStats.getGlobalCheckpoint(),
|
||||||
leaderSeqNoStats.getMaxSeqNo(),
|
leaderSeqNoStats.getMaxSeqNo(),
|
||||||
followerSeqNoStats.getGlobalCheckpoint(),
|
followerSeqNoStats.getGlobalCheckpoint(),
|
||||||
@ -103,6 +104,7 @@ public class ShardFollowTaskReplicationTests extends ESIndexLevelReplicationTest
|
|||||||
final SeqNoStats leaderSeqNoStats = leaderGroup.getPrimary().seqNoStats();
|
final SeqNoStats leaderSeqNoStats = leaderGroup.getPrimary().seqNoStats();
|
||||||
final SeqNoStats followerSeqNoStats = followerGroup.getPrimary().seqNoStats();
|
final SeqNoStats followerSeqNoStats = followerGroup.getPrimary().seqNoStats();
|
||||||
shardFollowTask.start(
|
shardFollowTask.start(
|
||||||
|
followerGroup.getPrimary().getHistoryUUID(),
|
||||||
leaderSeqNoStats.getGlobalCheckpoint(),
|
leaderSeqNoStats.getGlobalCheckpoint(),
|
||||||
leaderSeqNoStats.getMaxSeqNo(),
|
leaderSeqNoStats.getMaxSeqNo(),
|
||||||
followerSeqNoStats.getGlobalCheckpoint(),
|
followerSeqNoStats.getGlobalCheckpoint(),
|
||||||
@ -137,7 +139,7 @@ public class ShardFollowTaskReplicationTests extends ESIndexLevelReplicationTest
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testChangeHistoryUUID() throws Exception {
|
public void testChangeLeaderHistoryUUID() throws Exception {
|
||||||
try (ReplicationGroup leaderGroup = createGroup(0);
|
try (ReplicationGroup leaderGroup = createGroup(0);
|
||||||
ReplicationGroup followerGroup = createFollowGroup(0)) {
|
ReplicationGroup followerGroup = createFollowGroup(0)) {
|
||||||
leaderGroup.startAll();
|
leaderGroup.startAll();
|
||||||
@ -148,6 +150,7 @@ public class ShardFollowTaskReplicationTests extends ESIndexLevelReplicationTest
|
|||||||
final SeqNoStats leaderSeqNoStats = leaderGroup.getPrimary().seqNoStats();
|
final SeqNoStats leaderSeqNoStats = leaderGroup.getPrimary().seqNoStats();
|
||||||
final SeqNoStats followerSeqNoStats = followerGroup.getPrimary().seqNoStats();
|
final SeqNoStats followerSeqNoStats = followerGroup.getPrimary().seqNoStats();
|
||||||
shardFollowTask.start(
|
shardFollowTask.start(
|
||||||
|
followerGroup.getPrimary().getHistoryUUID(),
|
||||||
leaderSeqNoStats.getGlobalCheckpoint(),
|
leaderSeqNoStats.getGlobalCheckpoint(),
|
||||||
leaderSeqNoStats.getMaxSeqNo(),
|
leaderSeqNoStats.getMaxSeqNo(),
|
||||||
followerSeqNoStats.getGlobalCheckpoint(),
|
followerSeqNoStats.getGlobalCheckpoint(),
|
||||||
@ -177,6 +180,47 @@ public class ShardFollowTaskReplicationTests extends ESIndexLevelReplicationTest
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testChangeFollowerHistoryUUID() throws Exception {
|
||||||
|
try (ReplicationGroup leaderGroup = createGroup(0);
|
||||||
|
ReplicationGroup followerGroup = createFollowGroup(0)) {
|
||||||
|
leaderGroup.startAll();
|
||||||
|
int docCount = leaderGroup.appendDocs(randomInt(64));
|
||||||
|
leaderGroup.assertAllEqual(docCount);
|
||||||
|
followerGroup.startAll();
|
||||||
|
ShardFollowNodeTask shardFollowTask = createShardFollowTask(leaderGroup, followerGroup);
|
||||||
|
final SeqNoStats leaderSeqNoStats = leaderGroup.getPrimary().seqNoStats();
|
||||||
|
final SeqNoStats followerSeqNoStats = followerGroup.getPrimary().seqNoStats();
|
||||||
|
shardFollowTask.start(
|
||||||
|
followerGroup.getPrimary().getHistoryUUID(),
|
||||||
|
leaderSeqNoStats.getGlobalCheckpoint(),
|
||||||
|
leaderSeqNoStats.getMaxSeqNo(),
|
||||||
|
followerSeqNoStats.getGlobalCheckpoint(),
|
||||||
|
followerSeqNoStats.getMaxSeqNo());
|
||||||
|
leaderGroup.syncGlobalCheckpoint();
|
||||||
|
leaderGroup.assertAllEqual(docCount);
|
||||||
|
Set<String> indexedDocIds = getShardDocUIDs(leaderGroup.getPrimary());
|
||||||
|
assertBusy(() -> {
|
||||||
|
assertThat(followerGroup.getPrimary().getGlobalCheckpoint(), equalTo(leaderGroup.getPrimary().getGlobalCheckpoint()));
|
||||||
|
followerGroup.assertAllEqual(indexedDocIds.size());
|
||||||
|
});
|
||||||
|
|
||||||
|
String oldHistoryUUID = followerGroup.getPrimary().getHistoryUUID();
|
||||||
|
followerGroup.reinitPrimaryShard();
|
||||||
|
followerGroup.getPrimary().store().bootstrapNewHistory();
|
||||||
|
recoverShardFromStore(followerGroup.getPrimary());
|
||||||
|
String newHistoryUUID = followerGroup.getPrimary().getHistoryUUID();
|
||||||
|
|
||||||
|
// force the global checkpoint on the leader to advance
|
||||||
|
leaderGroup.appendDocs(64);
|
||||||
|
|
||||||
|
assertBusy(() -> {
|
||||||
|
assertThat(shardFollowTask.isStopped(), is(true));
|
||||||
|
assertThat(shardFollowTask.getFailure().getMessage(), equalTo("unexpected history uuid, expected [" + oldHistoryUUID +
|
||||||
|
"], actual [" + newHistoryUUID + "], shard is likely restored from snapshot or force allocated"));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected ReplicationGroup createGroup(int replicas, Settings settings) throws IOException {
|
protected ReplicationGroup createGroup(int replicas, Settings settings) throws IOException {
|
||||||
Settings newSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
Settings newSettings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
|
||||||
@ -217,9 +261,9 @@ public class ShardFollowTaskReplicationTests extends ESIndexLevelReplicationTest
|
|||||||
between(1, 4), 10240,
|
between(1, 4), 10240,
|
||||||
TimeValue.timeValueMillis(10),
|
TimeValue.timeValueMillis(10),
|
||||||
TimeValue.timeValueMillis(10),
|
TimeValue.timeValueMillis(10),
|
||||||
leaderGroup.getPrimary().getHistoryUUID(),
|
|
||||||
Collections.emptyMap()
|
Collections.emptyMap()
|
||||||
);
|
);
|
||||||
|
final String recordedLeaderIndexHistoryUUID = leaderGroup.getPrimary().getHistoryUUID();
|
||||||
|
|
||||||
BiConsumer<TimeValue, Runnable> scheduler = (delay, task) -> threadPool.schedule(delay, ThreadPool.Names.GENERIC, task);
|
BiConsumer<TimeValue, Runnable> scheduler = (delay, task) -> threadPool.schedule(delay, ThreadPool.Names.GENERIC, task);
|
||||||
AtomicBoolean stopped = new AtomicBoolean(false);
|
AtomicBoolean stopped = new AtomicBoolean(false);
|
||||||
@ -245,13 +289,14 @@ public class ShardFollowTaskReplicationTests extends ESIndexLevelReplicationTest
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void innerSendBulkShardOperationsRequest(
|
protected void innerSendBulkShardOperationsRequest(
|
||||||
final List<Translog.Operation> operations,
|
final String followerHistoryUUID,
|
||||||
final long maxSeqNoOfUpdates,
|
final List<Translog.Operation> operations,
|
||||||
final Consumer<BulkShardOperationsResponse> handler,
|
final long maxSeqNoOfUpdates,
|
||||||
final Consumer<Exception> errorHandler) {
|
final Consumer<BulkShardOperationsResponse> handler,
|
||||||
|
final Consumer<Exception> errorHandler) {
|
||||||
Runnable task = () -> {
|
Runnable task = () -> {
|
||||||
BulkShardOperationsRequest request = new BulkShardOperationsRequest(
|
BulkShardOperationsRequest request = new BulkShardOperationsRequest(params.getFollowShardId(),
|
||||||
params.getFollowShardId(), operations, maxSeqNoOfUpdates);
|
followerHistoryUUID, operations, maxSeqNoOfUpdates);
|
||||||
ActionListener<BulkShardOperationsResponse> listener = ActionListener.wrap(handler::accept, errorHandler);
|
ActionListener<BulkShardOperationsResponse> listener = ActionListener.wrap(handler::accept, errorHandler);
|
||||||
new CCRAction(request, listener, followerGroup).execute();
|
new CCRAction(request, listener, followerGroup).execute();
|
||||||
};
|
};
|
||||||
@ -277,7 +322,7 @@ public class ShardFollowTaskReplicationTests extends ESIndexLevelReplicationTest
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
Translog.Operation[] ops = ShardChangesAction.getOperations(indexShard, seqNoStats.getGlobalCheckpoint(), from,
|
Translog.Operation[] ops = ShardChangesAction.getOperations(indexShard, seqNoStats.getGlobalCheckpoint(), from,
|
||||||
maxOperationCount, params.getRecordedLeaderIndexHistoryUUID(), params.getMaxBatchSize());
|
maxOperationCount, recordedLeaderIndexHistoryUUID, params.getMaxBatchSize());
|
||||||
// hard code mapping version; this is ok, as mapping updates are not tested here
|
// hard code mapping version; this is ok, as mapping updates are not tested here
|
||||||
final ShardChangesAction.Response response = new ShardChangesAction.Response(
|
final ShardChangesAction.Response response = new ShardChangesAction.Response(
|
||||||
1L,
|
1L,
|
||||||
@ -340,8 +385,8 @@ public class ShardFollowTaskReplicationTests extends ESIndexLevelReplicationTest
|
|||||||
@Override
|
@Override
|
||||||
protected PrimaryResult performOnPrimary(IndexShard primary, BulkShardOperationsRequest request) throws Exception {
|
protected PrimaryResult performOnPrimary(IndexShard primary, BulkShardOperationsRequest request) throws Exception {
|
||||||
TransportWriteAction.WritePrimaryResult<BulkShardOperationsRequest, BulkShardOperationsResponse> result =
|
TransportWriteAction.WritePrimaryResult<BulkShardOperationsRequest, BulkShardOperationsResponse> result =
|
||||||
TransportBulkShardOperationsAction.shardOperationOnPrimary(primary.shardId(), request.getOperations(),
|
TransportBulkShardOperationsAction.shardOperationOnPrimary(primary.shardId(), request.getHistoryUUID(),
|
||||||
request.getMaxSeqNoOfUpdatesOrDeletes(), primary, logger);
|
request.getOperations(), request.getMaxSeqNoOfUpdatesOrDeletes(), primary, logger);
|
||||||
return new PrimaryResult(result.replicaRequest(), result.finalResponseIfSuccessful);
|
return new PrimaryResult(result.replicaRequest(), result.finalResponseIfSuccessful);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -36,7 +36,6 @@ public class ShardFollowTaskTests extends AbstractSerializingTestCase<ShardFollo
|
|||||||
randomIntBetween(1, Integer.MAX_VALUE),
|
randomIntBetween(1, Integer.MAX_VALUE),
|
||||||
TimeValue.parseTimeValue(randomTimeValue(), ""),
|
TimeValue.parseTimeValue(randomTimeValue(), ""),
|
||||||
TimeValue.parseTimeValue(randomTimeValue(), ""),
|
TimeValue.parseTimeValue(randomTimeValue(), ""),
|
||||||
randomAlphaOfLength(4),
|
|
||||||
randomBoolean() ? null : Collections.singletonMap("key", "value")
|
randomBoolean() ? null : Collections.singletonMap("key", "value")
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -83,7 +83,6 @@ public class TransportUnfollowActionTests extends ESTestCase {
|
|||||||
10240,
|
10240,
|
||||||
TimeValue.timeValueMillis(10),
|
TimeValue.timeValueMillis(10),
|
||||||
TimeValue.timeValueMillis(10),
|
TimeValue.timeValueMillis(10),
|
||||||
"uuid",
|
|
||||||
Collections.emptyMap()
|
Collections.emptyMap()
|
||||||
);
|
);
|
||||||
PersistentTasksCustomMetaData.PersistentTask<?> task =
|
PersistentTasksCustomMetaData.PersistentTask<?> task =
|
||||||
|
@ -59,7 +59,8 @@ public class BulkShardOperationsTests extends IndexShardTestCase {
|
|||||||
}
|
}
|
||||||
|
|
||||||
final TransportWriteAction.WritePrimaryResult<BulkShardOperationsRequest, BulkShardOperationsResponse> result =
|
final TransportWriteAction.WritePrimaryResult<BulkShardOperationsRequest, BulkShardOperationsResponse> result =
|
||||||
TransportBulkShardOperationsAction.shardOperationOnPrimary(followerPrimary.shardId(), operations,
|
TransportBulkShardOperationsAction.shardOperationOnPrimary(followerPrimary.shardId(), followerPrimary.getHistoryUUID(),
|
||||||
|
operations,
|
||||||
numOps - 1, followerPrimary, logger);
|
numOps - 1, followerPrimary, logger);
|
||||||
|
|
||||||
try (Translog.Snapshot snapshot = followerPrimary.getHistoryOperations("test", 0)) {
|
try (Translog.Snapshot snapshot = followerPrimary.getHistoryOperations("test", 0)) {
|
||||||
|
@ -222,7 +222,7 @@ public class RollupJobConfig implements NamedWriteable, ToXContentObject {
|
|||||||
builder.endArray();
|
builder.endArray();
|
||||||
}
|
}
|
||||||
if (timeout != null) {
|
if (timeout != null) {
|
||||||
builder.field(TIMEOUT, timeout);
|
builder.field(TIMEOUT, timeout.getStringRep());
|
||||||
}
|
}
|
||||||
builder.field(PAGE_SIZE, pageSize);
|
builder.field(PAGE_SIZE, pageSize);
|
||||||
}
|
}
|
||||||
|
@ -163,6 +163,6 @@ public class RollupJobStatus implements Task.Status, PersistentTaskState {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int hashCode() {
|
public int hashCode() {
|
||||||
return Objects.hash(state, currentPosition, upgradedDocumentID);
|
return Objects.hash(state, currentPosition, upgradedDocumentID);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -477,9 +477,7 @@ public class SSLConfigurationReloaderTests extends ESTestCase {
|
|||||||
try (InputStream is = Files.newInputStream(keyStorePath)) {
|
try (InputStream is = Files.newInputStream(keyStorePath)) {
|
||||||
keyStore.load(is, keyStorePass.toCharArray());
|
keyStore.load(is, keyStorePass.toCharArray());
|
||||||
}
|
}
|
||||||
// TODO Revisit TLS1.2 pinning when TLS1.3 is fully supported
|
final SSLContext sslContext = new SSLContextBuilder().loadKeyMaterial(keyStore, keyStorePass.toCharArray())
|
||||||
// https://github.com/elastic/elasticsearch/issues/32276
|
|
||||||
final SSLContext sslContext = new SSLContextBuilder().useProtocol("TLSv1.2").loadKeyMaterial(keyStore, keyStorePass.toCharArray())
|
|
||||||
.build();
|
.build();
|
||||||
MockWebServer server = new MockWebServer(sslContext, false);
|
MockWebServer server = new MockWebServer(sslContext, false);
|
||||||
server.enqueue(new MockResponse().setResponseCode(200).setBody("body"));
|
server.enqueue(new MockResponse().setResponseCode(200).setBody("body"));
|
||||||
@ -493,9 +491,7 @@ public class SSLConfigurationReloaderTests extends ESTestCase {
|
|||||||
keyStore.load(null, password.toCharArray());
|
keyStore.load(null, password.toCharArray());
|
||||||
keyStore.setKeyEntry("testnode_ec", PemUtils.readPrivateKey(keyPath, password::toCharArray), password.toCharArray(),
|
keyStore.setKeyEntry("testnode_ec", PemUtils.readPrivateKey(keyPath, password::toCharArray), password.toCharArray(),
|
||||||
CertParsingUtils.readCertificates(Collections.singletonList(certPath)));
|
CertParsingUtils.readCertificates(Collections.singletonList(certPath)));
|
||||||
// TODO Revisit TLS1.2 pinning when TLS1.3 is fully supported
|
final SSLContext sslContext = new SSLContextBuilder().loadKeyMaterial(keyStore, password.toCharArray())
|
||||||
// https://github.com/elastic/elasticsearch/issues/32276
|
|
||||||
final SSLContext sslContext = new SSLContextBuilder().useProtocol("TLSv1.2").loadKeyMaterial(keyStore, password.toCharArray())
|
|
||||||
.build();
|
.build();
|
||||||
MockWebServer server = new MockWebServer(sslContext, false);
|
MockWebServer server = new MockWebServer(sslContext, false);
|
||||||
server.enqueue(new MockResponse().setResponseCode(200).setBody("body"));
|
server.enqueue(new MockResponse().setResponseCode(200).setBody("body"));
|
||||||
@ -510,7 +506,7 @@ public class SSLConfigurationReloaderTests extends ESTestCase {
|
|||||||
try (InputStream is = Files.newInputStream(trustStorePath)) {
|
try (InputStream is = Files.newInputStream(trustStorePath)) {
|
||||||
trustStore.load(is, trustStorePass.toCharArray());
|
trustStore.load(is, trustStorePass.toCharArray());
|
||||||
}
|
}
|
||||||
final SSLContext sslContext = new SSLContextBuilder().useProtocol("TLSv1.2").loadTrustMaterial(trustStore, null).build();
|
final SSLContext sslContext = new SSLContextBuilder().loadTrustMaterial(trustStore, null).build();
|
||||||
return HttpClients.custom().setSSLContext(sslContext).build();
|
return HttpClients.custom().setSSLContext(sslContext).build();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -527,7 +523,7 @@ public class SSLConfigurationReloaderTests extends ESTestCase {
|
|||||||
for (Certificate cert : CertParsingUtils.readCertificates(trustedCertificatePaths)) {
|
for (Certificate cert : CertParsingUtils.readCertificates(trustedCertificatePaths)) {
|
||||||
trustStore.setCertificateEntry(cert.toString(), cert);
|
trustStore.setCertificateEntry(cert.toString(), cert);
|
||||||
}
|
}
|
||||||
final SSLContext sslContext = new SSLContextBuilder().useProtocol("TLSv1.2").loadTrustMaterial(trustStore, null).build();
|
final SSLContext sslContext = new SSLContextBuilder().loadTrustMaterial(trustStore, null).build();
|
||||||
return HttpClients.custom().setSSLContext(sslContext).build();
|
return HttpClients.custom().setSSLContext(sslContext).build();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -314,7 +314,7 @@ public class MonitoringIT extends ESSingleNodeTestCase {
|
|||||||
private void assertClusterStatsMonitoringDoc(final Map<String, Object> document,
|
private void assertClusterStatsMonitoringDoc(final Map<String, Object> document,
|
||||||
final boolean apmIndicesExist) {
|
final boolean apmIndicesExist) {
|
||||||
final Map<String, Object> source = (Map<String, Object>) document.get("_source");
|
final Map<String, Object> source = (Map<String, Object>) document.get("_source");
|
||||||
assertEquals(11, source.size());
|
assertEquals(12, source.size());
|
||||||
|
|
||||||
assertThat((String) source.get("cluster_name"), not(isEmptyOrNullString()));
|
assertThat((String) source.get("cluster_name"), not(isEmptyOrNullString()));
|
||||||
assertThat(source.get("version"), equalTo(Version.CURRENT.toString()));
|
assertThat(source.get("version"), equalTo(Version.CURRENT.toString()));
|
||||||
|
@ -8,6 +8,7 @@ package org.elasticsearch.xpack.rollup.rest;
|
|||||||
import org.elasticsearch.action.support.IndicesOptions;
|
import org.elasticsearch.action.support.IndicesOptions;
|
||||||
import org.elasticsearch.client.node.NodeClient;
|
import org.elasticsearch.client.node.NodeClient;
|
||||||
import org.elasticsearch.common.ParseField;
|
import org.elasticsearch.common.ParseField;
|
||||||
|
import org.elasticsearch.common.Strings;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.rest.BaseRestHandler;
|
import org.elasticsearch.rest.BaseRestHandler;
|
||||||
import org.elasticsearch.rest.RestController;
|
import org.elasticsearch.rest.RestController;
|
||||||
@ -27,7 +28,8 @@ public class RestGetRollupIndexCapsAction extends BaseRestHandler {
|
|||||||
protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) {
|
protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) {
|
||||||
String index = restRequest.param(INDEX.getPreferredName());
|
String index = restRequest.param(INDEX.getPreferredName());
|
||||||
IndicesOptions options = IndicesOptions.fromRequest(restRequest, IndicesOptions.STRICT_EXPAND_OPEN_FORBID_CLOSED);
|
IndicesOptions options = IndicesOptions.fromRequest(restRequest, IndicesOptions.STRICT_EXPAND_OPEN_FORBID_CLOSED);
|
||||||
GetRollupIndexCapsAction.Request request = new GetRollupIndexCapsAction.Request(new String[]{index}, options);
|
GetRollupIndexCapsAction.Request request =
|
||||||
|
new GetRollupIndexCapsAction.Request(Strings.splitStringByCommaToArray(index), options);
|
||||||
return channel -> client.execute(GetRollupIndexCapsAction.INSTANCE, request, new RestToXContentListener<>(channel));
|
return channel -> client.execute(GetRollupIndexCapsAction.INSTANCE, request, new RestToXContentListener<>(channel));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -307,8 +307,8 @@ unquoteIdentifier
|
|||||||
;
|
;
|
||||||
|
|
||||||
number
|
number
|
||||||
: (PLUS | MINUS)? DECIMAL_VALUE #decimalLiteral
|
: DECIMAL_VALUE #decimalLiteral
|
||||||
| (PLUS | MINUS)? INTEGER_VALUE #integerLiteral
|
| INTEGER_VALUE #integerLiteral
|
||||||
;
|
;
|
||||||
|
|
||||||
string
|
string
|
||||||
@ -454,7 +454,7 @@ DIGIT_IDENTIFIER
|
|||||||
;
|
;
|
||||||
|
|
||||||
TABLE_IDENTIFIER
|
TABLE_IDENTIFIER
|
||||||
: (LETTER | DIGIT | '_' | '@' | ASTERISK)+
|
: (LETTER | DIGIT | '_')+
|
||||||
;
|
;
|
||||||
|
|
||||||
QUOTED_IDENTIFIER
|
QUOTED_IDENTIFIER
|
||||||
|
@ -771,9 +771,9 @@ public class Analyzer extends RuleExecutor<LogicalPlan> {
|
|||||||
return uf;
|
return uf;
|
||||||
}
|
}
|
||||||
|
|
||||||
String normalizedName = functionRegistry.concreteFunctionName(name);
|
String functionName = functionRegistry.resolveAlias(name);
|
||||||
|
|
||||||
List<Function> list = getList(seen, normalizedName);
|
List<Function> list = getList(seen, functionName);
|
||||||
// first try to resolve from seen functions
|
// first try to resolve from seen functions
|
||||||
if (!list.isEmpty()) {
|
if (!list.isEmpty()) {
|
||||||
for (Function seenFunction : list) {
|
for (Function seenFunction : list) {
|
||||||
@ -784,11 +784,11 @@ public class Analyzer extends RuleExecutor<LogicalPlan> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// not seen before, use the registry
|
// not seen before, use the registry
|
||||||
if (!functionRegistry.functionExists(name)) {
|
if (!functionRegistry.functionExists(functionName)) {
|
||||||
return uf.missing(normalizedName, functionRegistry.listFunctions());
|
return uf.missing(functionName, functionRegistry.listFunctions());
|
||||||
}
|
}
|
||||||
// TODO: look into Generator for significant terms, etc..
|
// TODO: look into Generator for significant terms, etc..
|
||||||
FunctionDefinition def = functionRegistry.resolveFunction(normalizedName);
|
FunctionDefinition def = functionRegistry.resolveFunction(functionName);
|
||||||
Function f = uf.buildResolved(timeZone, def);
|
Function f = uf.buildResolved(timeZone, def);
|
||||||
|
|
||||||
list.add(f);
|
list.add(f);
|
||||||
|
@ -90,6 +90,7 @@ import java.util.Collection;
|
|||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.LinkedHashMap;
|
import java.util.LinkedHashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.Locale;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.TimeZone;
|
import java.util.TimeZone;
|
||||||
import java.util.function.BiFunction;
|
import java.util.function.BiFunction;
|
||||||
@ -211,21 +212,23 @@ public class FunctionRegistry {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public FunctionDefinition resolveFunction(String name) {
|
public FunctionDefinition resolveFunction(String functionName) {
|
||||||
FunctionDefinition def = defs.get(normalize(name));
|
FunctionDefinition def = defs.get(functionName);
|
||||||
if (def == null) {
|
if (def == null) {
|
||||||
throw new SqlIllegalArgumentException("Cannot find function {}; this should have been caught during analysis", name);
|
throw new SqlIllegalArgumentException(
|
||||||
|
"Cannot find function {}; this should have been caught during analysis",
|
||||||
|
functionName);
|
||||||
}
|
}
|
||||||
return def;
|
return def;
|
||||||
}
|
}
|
||||||
|
|
||||||
public String concreteFunctionName(String alias) {
|
public String resolveAlias(String alias) {
|
||||||
String normalized = normalize(alias);
|
String upperCase = alias.toUpperCase(Locale.ROOT);
|
||||||
return aliases.getOrDefault(normalized, normalized);
|
return aliases.getOrDefault(upperCase, upperCase);
|
||||||
}
|
}
|
||||||
|
|
||||||
public boolean functionExists(String name) {
|
public boolean functionExists(String functionName) {
|
||||||
return defs.containsKey(normalize(name));
|
return defs.containsKey(functionName);
|
||||||
}
|
}
|
||||||
|
|
||||||
public Collection<FunctionDefinition> listFunctions() {
|
public Collection<FunctionDefinition> listFunctions() {
|
||||||
|
@ -286,6 +286,9 @@ abstract class ExpressionBuilder extends IdentifierBuilder {
|
|||||||
case SqlBaseParser.PLUS:
|
case SqlBaseParser.PLUS:
|
||||||
return value;
|
return value;
|
||||||
case SqlBaseParser.MINUS:
|
case SqlBaseParser.MINUS:
|
||||||
|
if (value instanceof Literal) { // Minus already processed together with literal number
|
||||||
|
return value;
|
||||||
|
}
|
||||||
return new Neg(source(ctx.operator), value);
|
return new Neg(source(ctx.operator), value);
|
||||||
default:
|
default:
|
||||||
throw new ParsingException(loc, "Unknown arithemtic {}", ctx.operator.getText());
|
throw new ParsingException(loc, "Unknown arithemtic {}", ctx.operator.getText());
|
||||||
@ -483,38 +486,40 @@ abstract class ExpressionBuilder extends IdentifierBuilder {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Literal visitDecimalLiteral(DecimalLiteralContext ctx) {
|
public Literal visitDecimalLiteral(DecimalLiteralContext ctx) {
|
||||||
|
String ctxText = (hasMinusFromParent(ctx) ? "-" : "") + ctx.getText();
|
||||||
double value;
|
double value;
|
||||||
try {
|
try {
|
||||||
value = Double.parseDouble(ctx.getText());
|
value = Double.parseDouble(ctxText);
|
||||||
} catch (NumberFormatException nfe) {
|
} catch (NumberFormatException nfe) {
|
||||||
throw new ParsingException(source(ctx), "Cannot parse number [{}]", ctx.getText());
|
throw new ParsingException(source(ctx), "Cannot parse number [{}]", ctxText);
|
||||||
}
|
}
|
||||||
if (Double.isInfinite(value)) {
|
if (Double.isInfinite(value)) {
|
||||||
throw new ParsingException(source(ctx), "Number [{}] is too large", ctx.getText());
|
throw new ParsingException(source(ctx), "Number [{}] is too large", ctxText);
|
||||||
}
|
}
|
||||||
if (Double.isNaN(value)) {
|
if (Double.isNaN(value)) {
|
||||||
throw new ParsingException(source(ctx), "[{}] cannot be parsed as a number (NaN)", ctx.getText());
|
throw new ParsingException(source(ctx), "[{}] cannot be parsed as a number (NaN)", ctxText);
|
||||||
}
|
}
|
||||||
return new Literal(source(ctx), Double.valueOf(value), DataType.DOUBLE);
|
return new Literal(source(ctx), Double.valueOf(value), DataType.DOUBLE);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Literal visitIntegerLiteral(IntegerLiteralContext ctx) {
|
public Literal visitIntegerLiteral(IntegerLiteralContext ctx) {
|
||||||
|
String ctxText = (hasMinusFromParent(ctx) ? "-" : "") + ctx.getText();
|
||||||
long value;
|
long value;
|
||||||
try {
|
try {
|
||||||
value = Long.parseLong(ctx.getText());
|
value = Long.parseLong(ctxText);
|
||||||
} catch (NumberFormatException nfe) {
|
} catch (NumberFormatException nfe) {
|
||||||
try {
|
try {
|
||||||
BigInteger bi = new BigInteger(ctx.getText());
|
BigInteger bi = new BigInteger(ctxText);
|
||||||
try {
|
try {
|
||||||
bi.longValueExact();
|
bi.longValueExact();
|
||||||
} catch (ArithmeticException ae) {
|
} catch (ArithmeticException ae) {
|
||||||
throw new ParsingException(source(ctx), "Number [{}] is too large", ctx.getText());
|
throw new ParsingException(source(ctx), "Number [{}] is too large", ctxText);
|
||||||
}
|
}
|
||||||
} catch (NumberFormatException ex) {
|
} catch (NumberFormatException ex) {
|
||||||
// parsing fails, go through
|
// parsing fails, go through
|
||||||
}
|
}
|
||||||
throw new ParsingException(source(ctx), "Cannot parse number [{}]", ctx.getText());
|
throw new ParsingException(source(ctx), "Cannot parse number [{}]", ctxText);
|
||||||
}
|
}
|
||||||
|
|
||||||
DataType type = DataType.LONG;
|
DataType type = DataType.LONG;
|
||||||
@ -681,4 +686,21 @@ abstract class ExpressionBuilder extends IdentifierBuilder {
|
|||||||
|
|
||||||
return new Literal(source(ctx), string, DataType.KEYWORD);
|
return new Literal(source(ctx), string, DataType.KEYWORD);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private boolean hasMinusFromParent(SqlBaseParser.NumberContext ctx) {
|
||||||
|
ParserRuleContext parentCtx = ctx.getParent();
|
||||||
|
if (parentCtx != null && parentCtx instanceof SqlBaseParser.NumericLiteralContext) {
|
||||||
|
parentCtx = parentCtx.getParent();
|
||||||
|
if (parentCtx != null && parentCtx instanceof SqlBaseParser.ConstantDefaultContext) {
|
||||||
|
parentCtx = parentCtx.getParent();
|
||||||
|
if (parentCtx != null && parentCtx instanceof SqlBaseParser.ValueExpressionDefaultContext) {
|
||||||
|
parentCtx = parentCtx.getParent();
|
||||||
|
if (parentCtx != null && parentCtx instanceof SqlBaseParser.ArithmeticUnaryContext) {
|
||||||
|
return ((ArithmeticUnaryContext) parentCtx).MINUS() != null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -142,7 +142,7 @@ class SqlBaseLexer extends Lexer {
|
|||||||
public ATN getATN() { return _ATN; }
|
public ATN getATN() { return _ATN; }
|
||||||
|
|
||||||
public static final String _serializedATN =
|
public static final String _serializedATN =
|
||||||
"\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\2k\u0370\b\1\4\2\t"+
|
"\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\2k\u036f\b\1\4\2\t"+
|
||||||
"\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13"+
|
"\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13"+
|
||||||
"\t\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22"+
|
"\t\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22"+
|
||||||
"\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31\t\31"+
|
"\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31\t\31"+
|
||||||
@ -187,13 +187,13 @@ class SqlBaseLexer extends Lexer {
|
|||||||
"\16a\u02ec\3a\6a\u02f0\na\ra\16a\u02f1\3a\3a\7a\u02f6\na\fa\16a\u02f9"+
|
"\16a\u02ec\3a\6a\u02f0\na\ra\16a\u02f1\3a\3a\7a\u02f6\na\fa\16a\u02f9"+
|
||||||
"\13a\5a\u02fb\na\3a\3a\3a\3a\6a\u0301\na\ra\16a\u0302\3a\3a\5a\u0307\n"+
|
"\13a\5a\u02fb\na\3a\3a\3a\3a\6a\u0301\na\ra\16a\u0302\3a\3a\5a\u0307\n"+
|
||||||
"a\3b\3b\5b\u030b\nb\3b\3b\3b\7b\u0310\nb\fb\16b\u0313\13b\3c\3c\3c\3c"+
|
"a\3b\3b\5b\u030b\nb\3b\3b\3b\7b\u0310\nb\fb\16b\u0313\13b\3c\3c\3c\3c"+
|
||||||
"\6c\u0319\nc\rc\16c\u031a\3d\3d\3d\3d\6d\u0321\nd\rd\16d\u0322\3e\3e\3"+
|
"\6c\u0319\nc\rc\16c\u031a\3d\3d\3d\6d\u0320\nd\rd\16d\u0321\3e\3e\3e\3"+
|
||||||
"e\3e\7e\u0329\ne\fe\16e\u032c\13e\3e\3e\3f\3f\3f\3f\7f\u0334\nf\ff\16"+
|
"e\7e\u0328\ne\fe\16e\u032b\13e\3e\3e\3f\3f\3f\3f\7f\u0333\nf\ff\16f\u0336"+
|
||||||
"f\u0337\13f\3f\3f\3g\3g\5g\u033d\ng\3g\6g\u0340\ng\rg\16g\u0341\3h\3h"+
|
"\13f\3f\3f\3g\3g\5g\u033c\ng\3g\6g\u033f\ng\rg\16g\u0340\3h\3h\3i\3i\3"+
|
||||||
"\3i\3i\3j\3j\3j\3j\7j\u034c\nj\fj\16j\u034f\13j\3j\5j\u0352\nj\3j\5j\u0355"+
|
"j\3j\3j\3j\7j\u034b\nj\fj\16j\u034e\13j\3j\5j\u0351\nj\3j\5j\u0354\nj"+
|
||||||
"\nj\3j\3j\3k\3k\3k\3k\3k\7k\u035e\nk\fk\16k\u0361\13k\3k\3k\3k\3k\3k\3"+
|
"\3j\3j\3k\3k\3k\3k\3k\7k\u035d\nk\fk\16k\u0360\13k\3k\3k\3k\3k\3k\3l\6"+
|
||||||
"l\6l\u0369\nl\rl\16l\u036a\3l\3l\3m\3m\3\u035f\2n\3\3\5\4\7\5\t\6\13\7"+
|
"l\u0368\nl\rl\16l\u0369\3l\3l\3m\3m\3\u035e\2n\3\3\5\4\7\5\t\6\13\7\r"+
|
||||||
"\r\b\17\t\21\n\23\13\25\f\27\r\31\16\33\17\35\20\37\21!\22#\23%\24\'\25"+
|
"\b\17\t\21\n\23\13\25\f\27\r\31\16\33\17\35\20\37\21!\22#\23%\24\'\25"+
|
||||||
")\26+\27-\30/\31\61\32\63\33\65\34\67\359\36;\37= ?!A\"C#E$G%I&K\'M(O"+
|
")\26+\27-\30/\31\61\32\63\33\65\34\67\359\36;\37= ?!A\"C#E$G%I&K\'M(O"+
|
||||||
")Q*S+U,W-Y.[/]\60_\61a\62c\63e\64g\65i\66k\67m8o9q:s;u<w=y>{?}@\177A\u0081"+
|
")Q*S+U,W-Y.[/]\60_\61a\62c\63e\64g\65i\66k\67m8o9q:s;u<w=y>{?}@\177A\u0081"+
|
||||||
"B\u0083C\u0085D\u0087E\u0089F\u008bG\u008dH\u008fI\u0091J\u0093K\u0095"+
|
"B\u0083C\u0085D\u0087E\u0089F\u008bG\u008dH\u008fI\u0091J\u0093K\u0095"+
|
||||||
@ -201,7 +201,7 @@ class SqlBaseLexer extends Lexer {
|
|||||||
"V\u00abW\u00adX\u00afY\u00b1Z\u00b3[\u00b5\\\u00b7]\u00b9^\u00bb_\u00bd"+
|
"V\u00abW\u00adX\u00afY\u00b1Z\u00b3[\u00b5\\\u00b7]\u00b9^\u00bb_\u00bd"+
|
||||||
"`\u00bfa\u00c1b\u00c3c\u00c5d\u00c7e\u00c9f\u00cbg\u00cd\2\u00cf\2\u00d1"+
|
"`\u00bfa\u00c1b\u00c3c\u00c5d\u00c7e\u00c9f\u00cbg\u00cd\2\u00cf\2\u00d1"+
|
||||||
"\2\u00d3h\u00d5i\u00d7j\u00d9k\3\2\f\3\2))\4\2BBaa\5\2<<BBaa\3\2$$\3\2"+
|
"\2\u00d3h\u00d5i\u00d7j\u00d9k\3\2\f\3\2))\4\2BBaa\5\2<<BBaa\3\2$$\3\2"+
|
||||||
"bb\4\2--//\3\2\62;\3\2C\\\4\2\f\f\17\17\5\2\13\f\17\17\"\"\u0392\2\3\3"+
|
"bb\4\2--//\3\2\62;\3\2C\\\4\2\f\f\17\17\5\2\13\f\17\17\"\"\u0390\2\3\3"+
|
||||||
"\2\2\2\2\5\3\2\2\2\2\7\3\2\2\2\2\t\3\2\2\2\2\13\3\2\2\2\2\r\3\2\2\2\2"+
|
"\2\2\2\2\5\3\2\2\2\2\7\3\2\2\2\2\t\3\2\2\2\2\13\3\2\2\2\2\r\3\2\2\2\2"+
|
||||||
"\17\3\2\2\2\2\21\3\2\2\2\2\23\3\2\2\2\2\25\3\2\2\2\2\27\3\2\2\2\2\31\3"+
|
"\17\3\2\2\2\2\21\3\2\2\2\2\23\3\2\2\2\2\25\3\2\2\2\2\27\3\2\2\2\2\31\3"+
|
||||||
"\2\2\2\2\33\3\2\2\2\2\35\3\2\2\2\2\37\3\2\2\2\2!\3\2\2\2\2#\3\2\2\2\2"+
|
"\2\2\2\2\33\3\2\2\2\2\35\3\2\2\2\2\37\3\2\2\2\2!\3\2\2\2\2#\3\2\2\2\2"+
|
||||||
@ -245,10 +245,10 @@ class SqlBaseLexer extends Lexer {
|
|||||||
"\2\2\u00ad\u02bb\3\2\2\2\u00af\u02bd\3\2\2\2\u00b1\u02bf\3\2\2\2\u00b3"+
|
"\2\2\u00ad\u02bb\3\2\2\2\u00af\u02bd\3\2\2\2\u00b1\u02bf\3\2\2\2\u00b3"+
|
||||||
"\u02c1\3\2\2\2\u00b5\u02c3\3\2\2\2\u00b7\u02c5\3\2\2\2\u00b9\u02c8\3\2"+
|
"\u02c1\3\2\2\2\u00b5\u02c3\3\2\2\2\u00b7\u02c5\3\2\2\2\u00b9\u02c8\3\2"+
|
||||||
"\2\2\u00bb\u02ca\3\2\2\2\u00bd\u02cc\3\2\2\2\u00bf\u02d8\3\2\2\2\u00c1"+
|
"\2\2\u00bb\u02ca\3\2\2\2\u00bd\u02cc\3\2\2\2\u00bf\u02d8\3\2\2\2\u00c1"+
|
||||||
"\u0306\3\2\2\2\u00c3\u030a\3\2\2\2\u00c5\u0314\3\2\2\2\u00c7\u0320\3\2"+
|
"\u0306\3\2\2\2\u00c3\u030a\3\2\2\2\u00c5\u0314\3\2\2\2\u00c7\u031f\3\2"+
|
||||||
"\2\2\u00c9\u0324\3\2\2\2\u00cb\u032f\3\2\2\2\u00cd\u033a\3\2\2\2\u00cf"+
|
"\2\2\u00c9\u0323\3\2\2\2\u00cb\u032e\3\2\2\2\u00cd\u0339\3\2\2\2\u00cf"+
|
||||||
"\u0343\3\2\2\2\u00d1\u0345\3\2\2\2\u00d3\u0347\3\2\2\2\u00d5\u0358\3\2"+
|
"\u0342\3\2\2\2\u00d1\u0344\3\2\2\2\u00d3\u0346\3\2\2\2\u00d5\u0357\3\2"+
|
||||||
"\2\2\u00d7\u0368\3\2\2\2\u00d9\u036e\3\2\2\2\u00db\u00dc\7*\2\2\u00dc"+
|
"\2\2\u00d7\u0367\3\2\2\2\u00d9\u036d\3\2\2\2\u00db\u00dc\7*\2\2\u00dc"+
|
||||||
"\4\3\2\2\2\u00dd\u00de\7+\2\2\u00de\6\3\2\2\2\u00df\u00e0\7.\2\2\u00e0"+
|
"\4\3\2\2\2\u00dd\u00de\7+\2\2\u00de\6\3\2\2\2\u00df\u00e0\7.\2\2\u00e0"+
|
||||||
"\b\3\2\2\2\u00e1\u00e2\7<\2\2\u00e2\n\3\2\2\2\u00e3\u00e4\7C\2\2\u00e4"+
|
"\b\3\2\2\2\u00e1\u00e2\7<\2\2\u00e2\n\3\2\2\2\u00e3\u00e4\7C\2\2\u00e4"+
|
||||||
"\u00e5\7N\2\2\u00e5\u00e6\7N\2\2\u00e6\f\3\2\2\2\u00e7\u00e8\7C\2\2\u00e8"+
|
"\u00e5\7N\2\2\u00e5\u00e6\7N\2\2\u00e6\f\3\2\2\2\u00e7\u00e8\7C\2\2\u00e8"+
|
||||||
@ -408,38 +408,37 @@ class SqlBaseLexer extends Lexer {
|
|||||||
"\2\2\u0313\u0311\3\2\2\2\u0314\u0318\5\u00cfh\2\u0315\u0319\5\u00d1i\2"+
|
"\2\2\u0313\u0311\3\2\2\2\u0314\u0318\5\u00cfh\2\u0315\u0319\5\u00d1i\2"+
|
||||||
"\u0316\u0319\5\u00cfh\2\u0317\u0319\t\4\2\2\u0318\u0315\3\2\2\2\u0318"+
|
"\u0316\u0319\5\u00cfh\2\u0317\u0319\t\4\2\2\u0318\u0315\3\2\2\2\u0318"+
|
||||||
"\u0316\3\2\2\2\u0318\u0317\3\2\2\2\u0319\u031a\3\2\2\2\u031a\u0318\3\2"+
|
"\u0316\3\2\2\2\u0318\u0317\3\2\2\2\u0319\u031a\3\2\2\2\u031a\u0318\3\2"+
|
||||||
"\2\2\u031a\u031b\3\2\2\2\u031b\u00c6\3\2\2\2\u031c\u0321\5\u00d1i\2\u031d"+
|
"\2\2\u031a\u031b\3\2\2\2\u031b\u00c6\3\2\2\2\u031c\u0320\5\u00d1i\2\u031d"+
|
||||||
"\u0321\5\u00cfh\2\u031e\u0321\t\3\2\2\u031f\u0321\5\u00b1Y\2\u0320\u031c"+
|
"\u0320\5\u00cfh\2\u031e\u0320\7a\2\2\u031f\u031c\3\2\2\2\u031f\u031d\3"+
|
||||||
"\3\2\2\2\u0320\u031d\3\2\2\2\u0320\u031e\3\2\2\2\u0320\u031f\3\2\2\2\u0321"+
|
"\2\2\2\u031f\u031e\3\2\2\2\u0320\u0321\3\2\2\2\u0321\u031f\3\2\2\2\u0321"+
|
||||||
"\u0322\3\2\2\2\u0322\u0320\3\2\2\2\u0322\u0323\3\2\2\2\u0323\u00c8\3\2"+
|
"\u0322\3\2\2\2\u0322\u00c8\3\2\2\2\u0323\u0329\7$\2\2\u0324\u0328\n\5"+
|
||||||
"\2\2\u0324\u032a\7$\2\2\u0325\u0329\n\5\2\2\u0326\u0327\7$\2\2\u0327\u0329"+
|
"\2\2\u0325\u0326\7$\2\2\u0326\u0328\7$\2\2\u0327\u0324\3\2\2\2\u0327\u0325"+
|
||||||
"\7$\2\2\u0328\u0325\3\2\2\2\u0328\u0326\3\2\2\2\u0329\u032c\3\2\2\2\u032a"+
|
"\3\2\2\2\u0328\u032b\3\2\2\2\u0329\u0327\3\2\2\2\u0329\u032a\3\2\2\2\u032a"+
|
||||||
"\u0328\3\2\2\2\u032a\u032b\3\2\2\2\u032b\u032d\3\2\2\2\u032c\u032a\3\2"+
|
"\u032c\3\2\2\2\u032b\u0329\3\2\2\2\u032c\u032d\7$\2\2\u032d\u00ca\3\2"+
|
||||||
"\2\2\u032d\u032e\7$\2\2\u032e\u00ca\3\2\2\2\u032f\u0335\7b\2\2\u0330\u0334"+
|
"\2\2\u032e\u0334\7b\2\2\u032f\u0333\n\6\2\2\u0330\u0331\7b\2\2\u0331\u0333"+
|
||||||
"\n\6\2\2\u0331\u0332\7b\2\2\u0332\u0334\7b\2\2\u0333\u0330\3\2\2\2\u0333"+
|
"\7b\2\2\u0332\u032f\3\2\2\2\u0332\u0330\3\2\2\2\u0333\u0336\3\2\2\2\u0334"+
|
||||||
"\u0331\3\2\2\2\u0334\u0337\3\2\2\2\u0335\u0333\3\2\2\2\u0335\u0336\3\2"+
|
"\u0332\3\2\2\2\u0334\u0335\3\2\2\2\u0335\u0337\3\2\2\2\u0336\u0334\3\2"+
|
||||||
"\2\2\u0336\u0338\3\2\2\2\u0337\u0335\3\2\2\2\u0338\u0339\7b\2\2\u0339"+
|
"\2\2\u0337\u0338\7b\2\2\u0338\u00cc\3\2\2\2\u0339\u033b\7G\2\2\u033a\u033c"+
|
||||||
"\u00cc\3\2\2\2\u033a\u033c\7G\2\2\u033b\u033d\t\7\2\2\u033c\u033b\3\2"+
|
"\t\7\2\2\u033b\u033a\3\2\2\2\u033b\u033c\3\2\2\2\u033c\u033e\3\2\2\2\u033d"+
|
||||||
"\2\2\u033c\u033d\3\2\2\2\u033d\u033f\3\2\2\2\u033e\u0340\5\u00cfh\2\u033f"+
|
"\u033f\5\u00cfh\2\u033e\u033d\3\2\2\2\u033f\u0340\3\2\2\2\u0340\u033e"+
|
||||||
"\u033e\3\2\2\2\u0340\u0341\3\2\2\2\u0341\u033f\3\2\2\2\u0341\u0342\3\2"+
|
"\3\2\2\2\u0340\u0341\3\2\2\2\u0341\u00ce\3\2\2\2\u0342\u0343\t\b\2\2\u0343"+
|
||||||
"\2\2\u0342\u00ce\3\2\2\2\u0343\u0344\t\b\2\2\u0344\u00d0\3\2\2\2\u0345"+
|
"\u00d0\3\2\2\2\u0344\u0345\t\t\2\2\u0345\u00d2\3\2\2\2\u0346\u0347\7/"+
|
||||||
"\u0346\t\t\2\2\u0346\u00d2\3\2\2\2\u0347\u0348\7/\2\2\u0348\u0349\7/\2"+
|
"\2\2\u0347\u0348\7/\2\2\u0348\u034c\3\2\2\2\u0349\u034b\n\n\2\2\u034a"+
|
||||||
"\2\u0349\u034d\3\2\2\2\u034a\u034c\n\n\2\2\u034b\u034a\3\2\2\2\u034c\u034f"+
|
"\u0349\3\2\2\2\u034b\u034e\3\2\2\2\u034c\u034a\3\2\2\2\u034c\u034d\3\2"+
|
||||||
"\3\2\2\2\u034d\u034b\3\2\2\2\u034d\u034e\3\2\2\2\u034e\u0351\3\2\2\2\u034f"+
|
"\2\2\u034d\u0350\3\2\2\2\u034e\u034c\3\2\2\2\u034f\u0351\7\17\2\2\u0350"+
|
||||||
"\u034d\3\2\2\2\u0350\u0352\7\17\2\2\u0351\u0350\3\2\2\2\u0351\u0352\3"+
|
"\u034f\3\2\2\2\u0350\u0351\3\2\2\2\u0351\u0353\3\2\2\2\u0352\u0354\7\f"+
|
||||||
"\2\2\2\u0352\u0354\3\2\2\2\u0353\u0355\7\f\2\2\u0354\u0353\3\2\2\2\u0354"+
|
"\2\2\u0353\u0352\3\2\2\2\u0353\u0354\3\2\2\2\u0354\u0355\3\2\2\2\u0355"+
|
||||||
"\u0355\3\2\2\2\u0355\u0356\3\2\2\2\u0356\u0357\bj\2\2\u0357\u00d4\3\2"+
|
"\u0356\bj\2\2\u0356\u00d4\3\2\2\2\u0357\u0358\7\61\2\2\u0358\u0359\7,"+
|
||||||
"\2\2\u0358\u0359\7\61\2\2\u0359\u035a\7,\2\2\u035a\u035f\3\2\2\2\u035b"+
|
"\2\2\u0359\u035e\3\2\2\2\u035a\u035d\5\u00d5k\2\u035b\u035d\13\2\2\2\u035c"+
|
||||||
"\u035e\5\u00d5k\2\u035c\u035e\13\2\2\2\u035d\u035b\3\2\2\2\u035d\u035c"+
|
"\u035a\3\2\2\2\u035c\u035b\3\2\2\2\u035d\u0360\3\2\2\2\u035e\u035f\3\2"+
|
||||||
"\3\2\2\2\u035e\u0361\3\2\2\2\u035f\u0360\3\2\2\2\u035f\u035d\3\2\2\2\u0360"+
|
"\2\2\u035e\u035c\3\2\2\2\u035f\u0361\3\2\2\2\u0360\u035e\3\2\2\2\u0361"+
|
||||||
"\u0362\3\2\2\2\u0361\u035f\3\2\2\2\u0362\u0363\7,\2\2\u0363\u0364\7\61"+
|
"\u0362\7,\2\2\u0362\u0363\7\61\2\2\u0363\u0364\3\2\2\2\u0364\u0365\bk"+
|
||||||
"\2\2\u0364\u0365\3\2\2\2\u0365\u0366\bk\2\2\u0366\u00d6\3\2\2\2\u0367"+
|
"\2\2\u0365\u00d6\3\2\2\2\u0366\u0368\t\13\2\2\u0367\u0366\3\2\2\2\u0368"+
|
||||||
"\u0369\t\13\2\2\u0368\u0367\3\2\2\2\u0369\u036a\3\2\2\2\u036a\u0368\3"+
|
"\u0369\3\2\2\2\u0369\u0367\3\2\2\2\u0369\u036a\3\2\2\2\u036a\u036b\3\2"+
|
||||||
"\2\2\2\u036a\u036b\3\2\2\2\u036b\u036c\3\2\2\2\u036c\u036d\bl\2\2\u036d"+
|
"\2\2\u036b\u036c\bl\2\2\u036c\u00d8\3\2\2\2\u036d\u036e\13\2\2\2\u036e"+
|
||||||
"\u00d8\3\2\2\2\u036e\u036f\13\2\2\2\u036f\u00da\3\2\2\2\"\2\u02af\u02d0"+
|
"\u00da\3\2\2\2\"\2\u02af\u02d0\u02d2\u02da\u02df\u02e5\u02ec\u02f1\u02f7"+
|
||||||
"\u02d2\u02da\u02df\u02e5\u02ec\u02f1\u02f7\u02fa\u0302\u0306\u030a\u030f"+
|
"\u02fa\u0302\u0306\u030a\u030f\u0311\u0318\u031a\u031f\u0321\u0327\u0329"+
|
||||||
"\u0311\u0318\u031a\u0320\u0322\u0328\u032a\u0333\u0335\u033c\u0341\u034d"+
|
"\u0332\u0334\u033b\u0340\u034c\u0350\u0353\u035c\u035e\u0369\3\2\3\2";
|
||||||
"\u0351\u0354\u035d\u035f\u036a\3\2\3\2";
|
|
||||||
public static final ATN _ATN =
|
public static final ATN _ATN =
|
||||||
new ATNDeserializer().deserialize(_serializedATN.toCharArray());
|
new ATNDeserializer().deserialize(_serializedATN.toCharArray());
|
||||||
static {
|
static {
|
||||||
|
@ -3748,9 +3748,54 @@ class SqlBaseParser extends Parser {
|
|||||||
enterOuterAlt(_localctx, 1);
|
enterOuterAlt(_localctx, 1);
|
||||||
{
|
{
|
||||||
setState(539);
|
setState(539);
|
||||||
_errHandler.sync(this);
|
switch (_input.LA(1)) {
|
||||||
switch ( getInterpreter().adaptivePredict(_input,72,_ctx) ) {
|
case T__0:
|
||||||
case 1:
|
case ANALYZE:
|
||||||
|
case ANALYZED:
|
||||||
|
case CAST:
|
||||||
|
case CATALOGS:
|
||||||
|
case COLUMNS:
|
||||||
|
case DEBUG:
|
||||||
|
case EXECUTABLE:
|
||||||
|
case EXPLAIN:
|
||||||
|
case EXTRACT:
|
||||||
|
case FALSE:
|
||||||
|
case FORMAT:
|
||||||
|
case FUNCTIONS:
|
||||||
|
case GRAPHVIZ:
|
||||||
|
case LEFT:
|
||||||
|
case MAPPED:
|
||||||
|
case NULL:
|
||||||
|
case OPTIMIZED:
|
||||||
|
case PARSED:
|
||||||
|
case PHYSICAL:
|
||||||
|
case PLAN:
|
||||||
|
case RIGHT:
|
||||||
|
case RLIKE:
|
||||||
|
case QUERY:
|
||||||
|
case SCHEMAS:
|
||||||
|
case SHOW:
|
||||||
|
case SYS:
|
||||||
|
case TABLES:
|
||||||
|
case TEXT:
|
||||||
|
case TRUE:
|
||||||
|
case TYPE:
|
||||||
|
case TYPES:
|
||||||
|
case VERIFY:
|
||||||
|
case FUNCTION_ESC:
|
||||||
|
case DATE_ESC:
|
||||||
|
case TIME_ESC:
|
||||||
|
case TIMESTAMP_ESC:
|
||||||
|
case GUID_ESC:
|
||||||
|
case ASTERISK:
|
||||||
|
case PARAM:
|
||||||
|
case STRING:
|
||||||
|
case INTEGER_VALUE:
|
||||||
|
case DECIMAL_VALUE:
|
||||||
|
case IDENTIFIER:
|
||||||
|
case DIGIT_IDENTIFIER:
|
||||||
|
case QUOTED_IDENTIFIER:
|
||||||
|
case BACKQUOTED_IDENTIFIER:
|
||||||
{
|
{
|
||||||
_localctx = new ValueExpressionDefaultContext(_localctx);
|
_localctx = new ValueExpressionDefaultContext(_localctx);
|
||||||
_ctx = _localctx;
|
_ctx = _localctx;
|
||||||
@ -3760,7 +3805,8 @@ class SqlBaseParser extends Parser {
|
|||||||
primaryExpression();
|
primaryExpression();
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
case 2:
|
case PLUS:
|
||||||
|
case MINUS:
|
||||||
{
|
{
|
||||||
_localctx = new ArithmeticUnaryContext(_localctx);
|
_localctx = new ArithmeticUnaryContext(_localctx);
|
||||||
_ctx = _localctx;
|
_ctx = _localctx;
|
||||||
@ -3777,6 +3823,8 @@ class SqlBaseParser extends Parser {
|
|||||||
valueExpression(4);
|
valueExpression(4);
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
|
default:
|
||||||
|
throw new NoViableAltException(this);
|
||||||
}
|
}
|
||||||
_ctx.stop = _input.LT(-1);
|
_ctx.stop = _input.LT(-1);
|
||||||
setState(553);
|
setState(553);
|
||||||
@ -4861,8 +4909,6 @@ class SqlBaseParser extends Parser {
|
|||||||
match(NULL);
|
match(NULL);
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
case PLUS:
|
|
||||||
case MINUS:
|
|
||||||
case INTEGER_VALUE:
|
case INTEGER_VALUE:
|
||||||
case DECIMAL_VALUE:
|
case DECIMAL_VALUE:
|
||||||
_localctx = new NumericLiteralContext(_localctx);
|
_localctx = new NumericLiteralContext(_localctx);
|
||||||
@ -5592,8 +5638,6 @@ class SqlBaseParser extends Parser {
|
|||||||
}
|
}
|
||||||
public static class DecimalLiteralContext extends NumberContext {
|
public static class DecimalLiteralContext extends NumberContext {
|
||||||
public TerminalNode DECIMAL_VALUE() { return getToken(SqlBaseParser.DECIMAL_VALUE, 0); }
|
public TerminalNode DECIMAL_VALUE() { return getToken(SqlBaseParser.DECIMAL_VALUE, 0); }
|
||||||
public TerminalNode PLUS() { return getToken(SqlBaseParser.PLUS, 0); }
|
|
||||||
public TerminalNode MINUS() { return getToken(SqlBaseParser.MINUS, 0); }
|
|
||||||
public DecimalLiteralContext(NumberContext ctx) { copyFrom(ctx); }
|
public DecimalLiteralContext(NumberContext ctx) { copyFrom(ctx); }
|
||||||
@Override
|
@Override
|
||||||
public void enterRule(ParseTreeListener listener) {
|
public void enterRule(ParseTreeListener listener) {
|
||||||
@ -5611,8 +5655,6 @@ class SqlBaseParser extends Parser {
|
|||||||
}
|
}
|
||||||
public static class IntegerLiteralContext extends NumberContext {
|
public static class IntegerLiteralContext extends NumberContext {
|
||||||
public TerminalNode INTEGER_VALUE() { return getToken(SqlBaseParser.INTEGER_VALUE, 0); }
|
public TerminalNode INTEGER_VALUE() { return getToken(SqlBaseParser.INTEGER_VALUE, 0); }
|
||||||
public TerminalNode PLUS() { return getToken(SqlBaseParser.PLUS, 0); }
|
|
||||||
public TerminalNode MINUS() { return getToken(SqlBaseParser.MINUS, 0); }
|
|
||||||
public IntegerLiteralContext(NumberContext ctx) { copyFrom(ctx); }
|
public IntegerLiteralContext(NumberContext ctx) { copyFrom(ctx); }
|
||||||
@Override
|
@Override
|
||||||
public void enterRule(ParseTreeListener listener) {
|
public void enterRule(ParseTreeListener listener) {
|
||||||
@ -5632,55 +5674,27 @@ class SqlBaseParser extends Parser {
|
|||||||
public final NumberContext number() throws RecognitionException {
|
public final NumberContext number() throws RecognitionException {
|
||||||
NumberContext _localctx = new NumberContext(_ctx, getState());
|
NumberContext _localctx = new NumberContext(_ctx, getState());
|
||||||
enterRule(_localctx, 94, RULE_number);
|
enterRule(_localctx, 94, RULE_number);
|
||||||
int _la;
|
|
||||||
try {
|
try {
|
||||||
setState(712);
|
setState(706);
|
||||||
_errHandler.sync(this);
|
switch (_input.LA(1)) {
|
||||||
switch ( getInterpreter().adaptivePredict(_input,95,_ctx) ) {
|
case DECIMAL_VALUE:
|
||||||
case 1:
|
|
||||||
_localctx = new DecimalLiteralContext(_localctx);
|
_localctx = new DecimalLiteralContext(_localctx);
|
||||||
enterOuterAlt(_localctx, 1);
|
enterOuterAlt(_localctx, 1);
|
||||||
{
|
{
|
||||||
setState(705);
|
setState(704);
|
||||||
_la = _input.LA(1);
|
|
||||||
if (_la==PLUS || _la==MINUS) {
|
|
||||||
{
|
|
||||||
setState(704);
|
|
||||||
_la = _input.LA(1);
|
|
||||||
if ( !(_la==PLUS || _la==MINUS) ) {
|
|
||||||
_errHandler.recoverInline(this);
|
|
||||||
} else {
|
|
||||||
consume();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
setState(707);
|
|
||||||
match(DECIMAL_VALUE);
|
match(DECIMAL_VALUE);
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
case 2:
|
case INTEGER_VALUE:
|
||||||
_localctx = new IntegerLiteralContext(_localctx);
|
_localctx = new IntegerLiteralContext(_localctx);
|
||||||
enterOuterAlt(_localctx, 2);
|
enterOuterAlt(_localctx, 2);
|
||||||
{
|
{
|
||||||
setState(709);
|
setState(705);
|
||||||
_la = _input.LA(1);
|
|
||||||
if (_la==PLUS || _la==MINUS) {
|
|
||||||
{
|
|
||||||
setState(708);
|
|
||||||
_la = _input.LA(1);
|
|
||||||
if ( !(_la==PLUS || _la==MINUS) ) {
|
|
||||||
_errHandler.recoverInline(this);
|
|
||||||
} else {
|
|
||||||
consume();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
setState(711);
|
|
||||||
match(INTEGER_VALUE);
|
match(INTEGER_VALUE);
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
|
default:
|
||||||
|
throw new NoViableAltException(this);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
catch (RecognitionException re) {
|
catch (RecognitionException re) {
|
||||||
@ -5723,7 +5737,7 @@ class SqlBaseParser extends Parser {
|
|||||||
try {
|
try {
|
||||||
enterOuterAlt(_localctx, 1);
|
enterOuterAlt(_localctx, 1);
|
||||||
{
|
{
|
||||||
setState(714);
|
setState(708);
|
||||||
_la = _input.LA(1);
|
_la = _input.LA(1);
|
||||||
if ( !(_la==PARAM || _la==STRING) ) {
|
if ( !(_la==PARAM || _la==STRING) ) {
|
||||||
_errHandler.recoverInline(this);
|
_errHandler.recoverInline(this);
|
||||||
@ -5795,7 +5809,7 @@ class SqlBaseParser extends Parser {
|
|||||||
try {
|
try {
|
||||||
enterOuterAlt(_localctx, 1);
|
enterOuterAlt(_localctx, 1);
|
||||||
{
|
{
|
||||||
setState(716);
|
setState(710);
|
||||||
_la = _input.LA(1);
|
_la = _input.LA(1);
|
||||||
if ( !(((((_la - 6)) & ~0x3f) == 0 && ((1L << (_la - 6)) & ((1L << (ANALYZE - 6)) | (1L << (ANALYZED - 6)) | (1L << (CATALOGS - 6)) | (1L << (COLUMNS - 6)) | (1L << (DEBUG - 6)) | (1L << (EXECUTABLE - 6)) | (1L << (EXPLAIN - 6)) | (1L << (FORMAT - 6)) | (1L << (FUNCTIONS - 6)) | (1L << (GRAPHVIZ - 6)) | (1L << (MAPPED - 6)) | (1L << (OPTIMIZED - 6)) | (1L << (PARSED - 6)) | (1L << (PHYSICAL - 6)) | (1L << (PLAN - 6)) | (1L << (RLIKE - 6)) | (1L << (QUERY - 6)) | (1L << (SCHEMAS - 6)) | (1L << (SHOW - 6)) | (1L << (SYS - 6)) | (1L << (TABLES - 6)) | (1L << (TEXT - 6)) | (1L << (TYPE - 6)) | (1L << (TYPES - 6)) | (1L << (VERIFY - 6)))) != 0)) ) {
|
if ( !(((((_la - 6)) & ~0x3f) == 0 && ((1L << (_la - 6)) & ((1L << (ANALYZE - 6)) | (1L << (ANALYZED - 6)) | (1L << (CATALOGS - 6)) | (1L << (COLUMNS - 6)) | (1L << (DEBUG - 6)) | (1L << (EXECUTABLE - 6)) | (1L << (EXPLAIN - 6)) | (1L << (FORMAT - 6)) | (1L << (FUNCTIONS - 6)) | (1L << (GRAPHVIZ - 6)) | (1L << (MAPPED - 6)) | (1L << (OPTIMIZED - 6)) | (1L << (PARSED - 6)) | (1L << (PHYSICAL - 6)) | (1L << (PLAN - 6)) | (1L << (RLIKE - 6)) | (1L << (QUERY - 6)) | (1L << (SCHEMAS - 6)) | (1L << (SHOW - 6)) | (1L << (SYS - 6)) | (1L << (TABLES - 6)) | (1L << (TEXT - 6)) | (1L << (TYPE - 6)) | (1L << (TYPES - 6)) | (1L << (VERIFY - 6)))) != 0)) ) {
|
||||||
_errHandler.recoverInline(this);
|
_errHandler.recoverInline(this);
|
||||||
@ -5846,7 +5860,7 @@ class SqlBaseParser extends Parser {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public static final String _serializedATN =
|
public static final String _serializedATN =
|
||||||
"\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\3l\u02d1\4\2\t\2\4"+
|
"\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\3l\u02cb\4\2\t\2\4"+
|
||||||
"\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t"+
|
"\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t"+
|
||||||
"\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22"+
|
"\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22"+
|
||||||
"\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31\t\31"+
|
"\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31\t\31"+
|
||||||
@ -5898,237 +5912,234 @@ class SqlBaseParser extends Parser {
|
|||||||
"\3(\3(\3(\3(\3(\3(\3(\3(\3(\3(\3(\3(\3(\3(\3(\3(\3(\5(\u0296\n(\3)\3)"+
|
"\3(\3(\3(\3(\3(\3(\3(\3(\3(\3(\3(\3(\3(\3(\3(\3(\3(\5(\u0296\n(\3)\3)"+
|
||||||
"\3*\3*\3+\3+\3,\3,\3,\7,\u02a1\n,\f,\16,\u02a4\13,\3,\3,\3-\3-\5-\u02aa"+
|
"\3*\3*\3+\3+\3,\3,\3,\7,\u02a1\n,\f,\16,\u02a4\13,\3,\3,\3-\3-\5-\u02aa"+
|
||||||
"\n-\3.\3.\3.\5.\u02af\n.\3.\3.\3.\3.\5.\u02b5\n.\3.\5.\u02b8\n.\3/\3/"+
|
"\n-\3.\3.\3.\5.\u02af\n.\3.\3.\3.\3.\5.\u02b5\n.\3.\5.\u02b8\n.\3/\3/"+
|
||||||
"\5/\u02bc\n/\3\60\3\60\3\60\5\60\u02c1\n\60\3\61\5\61\u02c4\n\61\3\61"+
|
"\5/\u02bc\n/\3\60\3\60\3\60\5\60\u02c1\n\60\3\61\3\61\5\61\u02c5\n\61"+
|
||||||
"\3\61\5\61\u02c8\n\61\3\61\5\61\u02cb\n\61\3\62\3\62\3\63\3\63\3\63\2"+
|
"\3\62\3\62\3\63\3\63\3\63\2\4.<\64\2\4\6\b\n\f\16\20\22\24\26\30\32\34"+
|
||||||
"\4.<\64\2\4\6\b\n\f\16\20\22\24\26\30\32\34\36 \"$&(*,.\60\62\64\668:"+
|
"\36 \"$&(*,.\60\62\64\668:<>@BDFHJLNPRTVXZ\\^`bd\2\20\b\2\7\7\t\t\31\31"+
|
||||||
"<>@BDFHJLNPRTVXZ\\^`bd\2\20\b\2\7\7\t\t\31\31,,\62\62\66\66\4\2\"\"BB"+
|
",,\62\62\66\66\4\2\"\"BB\4\2\t\t\62\62\4\2\37\37%%\3\2\25\26\4\2\7\7a"+
|
||||||
"\4\2\t\t\62\62\4\2\37\37%%\3\2\25\26\4\2\7\7aa\4\2\r\r\25\25\4\2\7\7\27"+
|
"a\4\2\r\r\25\25\4\2\7\7\27\27\3\2XY\3\2Z\\\3\2RW\4\2\35\35CC\3\2_`\20"+
|
||||||
"\27\3\2XY\3\2Z\\\3\2RW\4\2\35\35CC\3\2_`\20\2\b\t\22\24\31\31\33\33\36"+
|
"\2\b\t\22\24\31\31\33\33\36\36!\",,\62\62\668:<>?ABDEGG\u0320\2f\3\2\2"+
|
||||||
"\36!\",,\62\62\668:<>?ABDEGG\u0328\2f\3\2\2\2\4i\3\2\2\2\6\u00d0\3\2\2"+
|
"\2\4i\3\2\2\2\6\u00d0\3\2\2\2\b\u00db\3\2\2\2\n\u00df\3\2\2\2\f\u00f4"+
|
||||||
"\2\b\u00db\3\2\2\2\n\u00df\3\2\2\2\f\u00f4\3\2\2\2\16\u00fb\3\2\2\2\20"+
|
"\3\2\2\2\16\u00fb\3\2\2\2\20\u00fd\3\2\2\2\22\u0101\3\2\2\2\24\u011d\3"+
|
||||||
"\u00fd\3\2\2\2\22\u0101\3\2\2\2\24\u011d\3\2\2\2\26\u0127\3\2\2\2\30\u0131"+
|
"\2\2\2\26\u0127\3\2\2\2\30\u0131\3\2\2\2\32\u0140\3\2\2\2\34\u0142\3\2"+
|
||||||
"\3\2\2\2\32\u0140\3\2\2\2\34\u0142\3\2\2\2\36\u0148\3\2\2\2 \u014a\3\2"+
|
"\2\2\36\u0148\3\2\2\2 \u014a\3\2\2\2\"\u0151\3\2\2\2$\u0163\3\2\2\2&\u0174"+
|
||||||
"\2\2\"\u0151\3\2\2\2$\u0163\3\2\2\2&\u0174\3\2\2\2(\u0184\3\2\2\2*\u019f"+
|
"\3\2\2\2(\u0184\3\2\2\2*\u019f\3\2\2\2,\u01a1\3\2\2\2.\u01c2\3\2\2\2\60"+
|
||||||
"\3\2\2\2,\u01a1\3\2\2\2.\u01c2\3\2\2\2\60\u01d3\3\2\2\2\62\u01d6\3\2\2"+
|
"\u01d3\3\2\2\2\62\u01d6\3\2\2\2\64\u0208\3\2\2\2\66\u020a\3\2\2\28\u020d"+
|
||||||
"\2\64\u0208\3\2\2\2\66\u020a\3\2\2\28\u020d\3\2\2\2:\u0217\3\2\2\2<\u021d"+
|
"\3\2\2\2:\u0217\3\2\2\2<\u021d\3\2\2\2>\u0241\3\2\2\2@\u0248\3\2\2\2B"+
|
||||||
"\3\2\2\2>\u0241\3\2\2\2@\u0248\3\2\2\2B\u024a\3\2\2\2D\u0256\3\2\2\2F"+
|
"\u024a\3\2\2\2D\u0256\3\2\2\2F\u0258\3\2\2\2H\u0264\3\2\2\2J\u0266\3\2"+
|
||||||
"\u0258\3\2\2\2H\u0264\3\2\2\2J\u0266\3\2\2\2L\u027a\3\2\2\2N\u0295\3\2"+
|
"\2\2L\u027a\3\2\2\2N\u0295\3\2\2\2P\u0297\3\2\2\2R\u0299\3\2\2\2T\u029b"+
|
||||||
"\2\2P\u0297\3\2\2\2R\u0299\3\2\2\2T\u029b\3\2\2\2V\u02a2\3\2\2\2X\u02a9"+
|
"\3\2\2\2V\u02a2\3\2\2\2X\u02a9\3\2\2\2Z\u02b7\3\2\2\2\\\u02bb\3\2\2\2"+
|
||||||
"\3\2\2\2Z\u02b7\3\2\2\2\\\u02bb\3\2\2\2^\u02c0\3\2\2\2`\u02ca\3\2\2\2"+
|
"^\u02c0\3\2\2\2`\u02c4\3\2\2\2b\u02c6\3\2\2\2d\u02c8\3\2\2\2fg\5\6\4\2"+
|
||||||
"b\u02cc\3\2\2\2d\u02ce\3\2\2\2fg\5\6\4\2gh\7\2\2\3h\3\3\2\2\2ij\5,\27"+
|
"gh\7\2\2\3h\3\3\2\2\2ij\5,\27\2jk\7\2\2\3k\5\3\2\2\2l\u00d1\5\b\5\2m{"+
|
||||||
"\2jk\7\2\2\3k\5\3\2\2\2l\u00d1\5\b\5\2m{\7\33\2\2nw\7\3\2\2op\78\2\2p"+
|
"\7\33\2\2nw\7\3\2\2op\78\2\2pv\t\2\2\2qr\7\36\2\2rv\t\3\2\2st\7G\2\2t"+
|
||||||
"v\t\2\2\2qr\7\36\2\2rv\t\3\2\2st\7G\2\2tv\5R*\2uo\3\2\2\2uq\3\2\2\2us"+
|
"v\5R*\2uo\3\2\2\2uq\3\2\2\2us\3\2\2\2vy\3\2\2\2wu\3\2\2\2wx\3\2\2\2xz"+
|
||||||
"\3\2\2\2vy\3\2\2\2wu\3\2\2\2wx\3\2\2\2xz\3\2\2\2yw\3\2\2\2z|\7\4\2\2{"+
|
"\3\2\2\2yw\3\2\2\2z|\7\4\2\2{n\3\2\2\2{|\3\2\2\2|}\3\2\2\2}\u00d1\5\6"+
|
||||||
"n\3\2\2\2{|\3\2\2\2|}\3\2\2\2}\u00d1\5\6\4\2~\u008a\7\24\2\2\177\u0086"+
|
"\4\2~\u008a\7\24\2\2\177\u0086\7\3\2\2\u0080\u0081\78\2\2\u0081\u0085"+
|
||||||
"\7\3\2\2\u0080\u0081\78\2\2\u0081\u0085\t\4\2\2\u0082\u0083\7\36\2\2\u0083"+
|
"\t\4\2\2\u0082\u0083\7\36\2\2\u0083\u0085\t\3\2\2\u0084\u0080\3\2\2\2"+
|
||||||
"\u0085\t\3\2\2\u0084\u0080\3\2\2\2\u0084\u0082\3\2\2\2\u0085\u0088\3\2"+
|
"\u0084\u0082\3\2\2\2\u0085\u0088\3\2\2\2\u0086\u0084\3\2\2\2\u0086\u0087"+
|
||||||
"\2\2\u0086\u0084\3\2\2\2\u0086\u0087\3\2\2\2\u0087\u0089\3\2\2\2\u0088"+
|
"\3\2\2\2\u0087\u0089\3\2\2\2\u0088\u0086\3\2\2\2\u0089\u008b\7\4\2\2\u008a"+
|
||||||
"\u0086\3\2\2\2\u0089\u008b\7\4\2\2\u008a\177\3\2\2\2\u008a\u008b\3\2\2"+
|
"\177\3\2\2\2\u008a\u008b\3\2\2\2\u008b\u008c\3\2\2\2\u008c\u00d1\5\6\4"+
|
||||||
"\2\u008b\u008c\3\2\2\2\u008c\u00d1\5\6\4\2\u008d\u008e\7>\2\2\u008e\u0091"+
|
"\2\u008d\u008e\7>\2\2\u008e\u0091\7A\2\2\u008f\u0092\5\66\34\2\u0090\u0092"+
|
||||||
"\7A\2\2\u008f\u0092\5\66\34\2\u0090\u0092\5Z.\2\u0091\u008f\3\2\2\2\u0091"+
|
"\5Z.\2\u0091\u008f\3\2\2\2\u0091\u0090\3\2\2\2\u0091\u0092\3\2\2\2\u0092"+
|
||||||
"\u0090\3\2\2\2\u0091\u0092\3\2\2\2\u0092\u00d1\3\2\2\2\u0093\u0094\7>"+
|
"\u00d1\3\2\2\2\u0093\u0094\7>\2\2\u0094\u0095\7\23\2\2\u0095\u0098\t\5"+
|
||||||
"\2\2\u0094\u0095\7\23\2\2\u0095\u0098\t\5\2\2\u0096\u0099\5\66\34\2\u0097"+
|
"\2\2\u0096\u0099\5\66\34\2\u0097\u0099\5Z.\2\u0098\u0096\3\2\2\2\u0098"+
|
||||||
"\u0099\5Z.\2\u0098\u0096\3\2\2\2\u0098\u0097\3\2\2\2\u0099\u00d1\3\2\2"+
|
"\u0097\3\2\2\2\u0099\u00d1\3\2\2\2\u009a\u009d\t\6\2\2\u009b\u009e\5\66"+
|
||||||
"\2\u009a\u009d\t\6\2\2\u009b\u009e\5\66\34\2\u009c\u009e\5Z.\2\u009d\u009b"+
|
"\34\2\u009c\u009e\5Z.\2\u009d\u009b\3\2\2\2\u009d\u009c\3\2\2\2\u009e"+
|
||||||
"\3\2\2\2\u009d\u009c\3\2\2\2\u009e\u00d1\3\2\2\2\u009f\u00a0\7>\2\2\u00a0"+
|
"\u00d1\3\2\2\2\u009f\u00a0\7>\2\2\u00a0\u00a2\7!\2\2\u00a1\u00a3\5\66"+
|
||||||
"\u00a2\7!\2\2\u00a1\u00a3\5\66\34\2\u00a2\u00a1\3\2\2\2\u00a2\u00a3\3"+
|
"\34\2\u00a2\u00a1\3\2\2\2\u00a2\u00a3\3\2\2\2\u00a3\u00d1\3\2\2\2\u00a4"+
|
||||||
"\2\2\2\u00a3\u00d1\3\2\2\2\u00a4\u00a5\7>\2\2\u00a5\u00d1\7<\2\2\u00a6"+
|
"\u00a5\7>\2\2\u00a5\u00d1\7<\2\2\u00a6\u00a7\7?\2\2\u00a7\u00d1\7\22\2"+
|
||||||
"\u00a7\7?\2\2\u00a7\u00d1\7\22\2\2\u00a8\u00a9\7?\2\2\u00a9\u00ac\7A\2"+
|
"\2\u00a8\u00a9\7?\2\2\u00a9\u00ac\7A\2\2\u00aa\u00ab\7\21\2\2\u00ab\u00ad"+
|
||||||
"\2\u00aa\u00ab\7\21\2\2\u00ab\u00ad\5\66\34\2\u00ac\u00aa\3\2\2\2\u00ac"+
|
"\5\66\34\2\u00ac\u00aa\3\2\2\2\u00ac\u00ad\3\2\2\2\u00ad\u00b0\3\2\2\2"+
|
||||||
"\u00ad\3\2\2\2\u00ad\u00b0\3\2\2\2\u00ae\u00b1\5\66\34\2\u00af\u00b1\5"+
|
"\u00ae\u00b1\5\66\34\2\u00af\u00b1\5Z.\2\u00b0\u00ae\3\2\2\2\u00b0\u00af"+
|
||||||
"Z.\2\u00b0\u00ae\3\2\2\2\u00b0\u00af\3\2\2\2\u00b0\u00b1\3\2\2\2\u00b1"+
|
"\3\2\2\2\u00b0\u00b1\3\2\2\2\u00b1\u00bb\3\2\2\2\u00b2\u00b3\7D\2\2\u00b3"+
|
||||||
"\u00bb\3\2\2\2\u00b2\u00b3\7D\2\2\u00b3\u00b8\5b\62\2\u00b4\u00b5\7\5"+
|
"\u00b8\5b\62\2\u00b4\u00b5\7\5\2\2\u00b5\u00b7\5b\62\2\u00b6\u00b4\3\2"+
|
||||||
"\2\2\u00b5\u00b7\5b\62\2\u00b6\u00b4\3\2\2\2\u00b7\u00ba\3\2\2\2\u00b8"+
|
"\2\2\u00b7\u00ba\3\2\2\2\u00b8\u00b6\3\2\2\2\u00b8\u00b9\3\2\2\2\u00b9"+
|
||||||
"\u00b6\3\2\2\2\u00b8\u00b9\3\2\2\2\u00b9\u00bc\3\2\2\2\u00ba\u00b8\3\2"+
|
"\u00bc\3\2\2\2\u00ba\u00b8\3\2\2\2\u00bb\u00b2\3\2\2\2\u00bb\u00bc\3\2"+
|
||||||
"\2\2\u00bb\u00b2\3\2\2\2\u00bb\u00bc\3\2\2\2\u00bc\u00d1\3\2\2\2\u00bd"+
|
"\2\2\u00bc\u00d1\3\2\2\2\u00bd\u00be\7?\2\2\u00be\u00c1\7\23\2\2\u00bf"+
|
||||||
"\u00be\7?\2\2\u00be\u00c1\7\23\2\2\u00bf\u00c0\7\21\2\2\u00c0\u00c2\5"+
|
"\u00c0\7\21\2\2\u00c0\u00c2\5b\62\2\u00c1\u00bf\3\2\2\2\u00c1\u00c2\3"+
|
||||||
"b\62\2\u00c1\u00bf\3\2\2\2\u00c1\u00c2\3\2\2\2\u00c2\u00c6\3\2\2\2\u00c3"+
|
"\2\2\2\u00c2\u00c6\3\2\2\2\u00c3\u00c4\7@\2\2\u00c4\u00c7\5\66\34\2\u00c5"+
|
||||||
"\u00c4\7@\2\2\u00c4\u00c7\5\66\34\2\u00c5\u00c7\5Z.\2\u00c6\u00c3\3\2"+
|
"\u00c7\5Z.\2\u00c6\u00c3\3\2\2\2\u00c6\u00c5\3\2\2\2\u00c6\u00c7\3\2\2"+
|
||||||
"\2\2\u00c6\u00c5\3\2\2\2\u00c6\u00c7\3\2\2\2\u00c7\u00c9\3\2\2\2\u00c8"+
|
"\2\u00c7\u00c9\3\2\2\2\u00c8\u00ca\5\66\34\2\u00c9\u00c8\3\2\2\2\u00c9"+
|
||||||
"\u00ca\5\66\34\2\u00c9\u00c8\3\2\2\2\u00c9\u00ca\3\2\2\2\u00ca\u00d1\3"+
|
"\u00ca\3\2\2\2\u00ca\u00d1\3\2\2\2\u00cb\u00cc\7?\2\2\u00cc\u00d1\7E\2"+
|
||||||
"\2\2\2\u00cb\u00cc\7?\2\2\u00cc\u00d1\7E\2\2\u00cd\u00ce\7?\2\2\u00ce"+
|
"\2\u00cd\u00ce\7?\2\2\u00ce\u00cf\7@\2\2\u00cf\u00d1\7E\2\2\u00d0l\3\2"+
|
||||||
"\u00cf\7@\2\2\u00cf\u00d1\7E\2\2\u00d0l\3\2\2\2\u00d0m\3\2\2\2\u00d0~"+
|
"\2\2\u00d0m\3\2\2\2\u00d0~\3\2\2\2\u00d0\u008d\3\2\2\2\u00d0\u0093\3\2"+
|
||||||
"\3\2\2\2\u00d0\u008d\3\2\2\2\u00d0\u0093\3\2\2\2\u00d0\u009a\3\2\2\2\u00d0"+
|
"\2\2\u00d0\u009a\3\2\2\2\u00d0\u009f\3\2\2\2\u00d0\u00a4\3\2\2\2\u00d0"+
|
||||||
"\u009f\3\2\2\2\u00d0\u00a4\3\2\2\2\u00d0\u00a6\3\2\2\2\u00d0\u00a8\3\2"+
|
"\u00a6\3\2\2\2\u00d0\u00a8\3\2\2\2\u00d0\u00bd\3\2\2\2\u00d0\u00cb\3\2"+
|
||||||
"\2\2\u00d0\u00bd\3\2\2\2\u00d0\u00cb\3\2\2\2\u00d0\u00cd\3\2\2\2\u00d1"+
|
"\2\2\u00d0\u00cd\3\2\2\2\u00d1\7\3\2\2\2\u00d2\u00d3\7I\2\2\u00d3\u00d8"+
|
||||||
"\7\3\2\2\2\u00d2\u00d3\7I\2\2\u00d3\u00d8\5\34\17\2\u00d4\u00d5\7\5\2"+
|
"\5\34\17\2\u00d4\u00d5\7\5\2\2\u00d5\u00d7\5\34\17\2\u00d6\u00d4\3\2\2"+
|
||||||
"\2\u00d5\u00d7\5\34\17\2\u00d6\u00d4\3\2\2\2\u00d7\u00da\3\2\2\2\u00d8"+
|
"\2\u00d7\u00da\3\2\2\2\u00d8\u00d6\3\2\2\2\u00d8\u00d9\3\2\2\2\u00d9\u00dc"+
|
||||||
"\u00d6\3\2\2\2\u00d8\u00d9\3\2\2\2\u00d9\u00dc\3\2\2\2\u00da\u00d8\3\2"+
|
"\3\2\2\2\u00da\u00d8\3\2\2\2\u00db\u00d2\3\2\2\2\u00db\u00dc\3\2\2\2\u00dc"+
|
||||||
"\2\2\u00db\u00d2\3\2\2\2\u00db\u00dc\3\2\2\2\u00dc\u00dd\3\2\2\2\u00dd"+
|
"\u00dd\3\2\2\2\u00dd\u00de\5\n\6\2\u00de\t\3\2\2\2\u00df\u00ea\5\16\b"+
|
||||||
"\u00de\5\n\6\2\u00de\t\3\2\2\2\u00df\u00ea\5\16\b\2\u00e0\u00e1\7\64\2"+
|
"\2\u00e0\u00e1\7\64\2\2\u00e1\u00e2\7\17\2\2\u00e2\u00e7\5\20\t\2\u00e3"+
|
||||||
"\2\u00e1\u00e2\7\17\2\2\u00e2\u00e7\5\20\t\2\u00e3\u00e4\7\5\2\2\u00e4"+
|
"\u00e4\7\5\2\2\u00e4\u00e6\5\20\t\2\u00e5\u00e3\3\2\2\2\u00e6\u00e9\3"+
|
||||||
"\u00e6\5\20\t\2\u00e5\u00e3\3\2\2\2\u00e6\u00e9\3\2\2\2\u00e7\u00e5\3"+
|
"\2\2\2\u00e7\u00e5\3\2\2\2\u00e7\u00e8\3\2\2\2\u00e8\u00eb\3\2\2\2\u00e9"+
|
||||||
"\2\2\2\u00e7\u00e8\3\2\2\2\u00e8\u00eb\3\2\2\2\u00e9\u00e7\3\2\2\2\u00ea"+
|
"\u00e7\3\2\2\2\u00ea\u00e0\3\2\2\2\u00ea\u00eb\3\2\2\2\u00eb\u00ed\3\2"+
|
||||||
"\u00e0\3\2\2\2\u00ea\u00eb\3\2\2\2\u00eb\u00ed\3\2\2\2\u00ec\u00ee\5\f"+
|
"\2\2\u00ec\u00ee\5\f\7\2\u00ed\u00ec\3\2\2\2\u00ed\u00ee\3\2\2\2\u00ee"+
|
||||||
"\7\2\u00ed\u00ec\3\2\2\2\u00ed\u00ee\3\2\2\2\u00ee\13\3\2\2\2\u00ef\u00f0"+
|
"\13\3\2\2\2\u00ef\u00f0\7+\2\2\u00f0\u00f5\t\7\2\2\u00f1\u00f2\7L\2\2"+
|
||||||
"\7+\2\2\u00f0\u00f5\t\7\2\2\u00f1\u00f2\7L\2\2\u00f2\u00f3\t\7\2\2\u00f3"+
|
"\u00f2\u00f3\t\7\2\2\u00f3\u00f5\7Q\2\2\u00f4\u00ef\3\2\2\2\u00f4\u00f1"+
|
||||||
"\u00f5\7Q\2\2\u00f4\u00ef\3\2\2\2\u00f4\u00f1\3\2\2\2\u00f5\r\3\2\2\2"+
|
"\3\2\2\2\u00f5\r\3\2\2\2\u00f6\u00fc\5\22\n\2\u00f7\u00f8\7\3\2\2\u00f8"+
|
||||||
"\u00f6\u00fc\5\22\n\2\u00f7\u00f8\7\3\2\2\u00f8\u00f9\5\n\6\2\u00f9\u00fa"+
|
"\u00f9\5\n\6\2\u00f9\u00fa\7\4\2\2\u00fa\u00fc\3\2\2\2\u00fb\u00f6\3\2"+
|
||||||
"\7\4\2\2\u00fa\u00fc\3\2\2\2\u00fb\u00f6\3\2\2\2\u00fb\u00f7\3\2\2\2\u00fc"+
|
"\2\2\u00fb\u00f7\3\2\2\2\u00fc\17\3\2\2\2\u00fd\u00ff\5,\27\2\u00fe\u0100"+
|
||||||
"\17\3\2\2\2\u00fd\u00ff\5,\27\2\u00fe\u0100\t\b\2\2\u00ff\u00fe\3\2\2"+
|
"\t\b\2\2\u00ff\u00fe\3\2\2\2\u00ff\u0100\3\2\2\2\u0100\21\3\2\2\2\u0101"+
|
||||||
"\2\u00ff\u0100\3\2\2\2\u0100\21\3\2\2\2\u0101\u0103\7=\2\2\u0102\u0104"+
|
"\u0103\7=\2\2\u0102\u0104\5\36\20\2\u0103\u0102\3\2\2\2\u0103\u0104\3"+
|
||||||
"\5\36\20\2\u0103\u0102\3\2\2\2\u0103\u0104\3\2\2\2\u0104\u0105\3\2\2\2"+
|
"\2\2\2\u0104\u0105\3\2\2\2\u0105\u010a\5 \21\2\u0106\u0107\7\5\2\2\u0107"+
|
||||||
"\u0105\u010a\5 \21\2\u0106\u0107\7\5\2\2\u0107\u0109\5 \21\2\u0108\u0106"+
|
"\u0109\5 \21\2\u0108\u0106\3\2\2\2\u0109\u010c\3\2\2\2\u010a\u0108\3\2"+
|
||||||
"\3\2\2\2\u0109\u010c\3\2\2\2\u010a\u0108\3\2\2\2\u010a\u010b\3\2\2\2\u010b"+
|
"\2\2\u010a\u010b\3\2\2\2\u010b\u010e\3\2\2\2\u010c\u010a\3\2\2\2\u010d"+
|
||||||
"\u010e\3\2\2\2\u010c\u010a\3\2\2\2\u010d\u010f\5\24\13\2\u010e\u010d\3"+
|
"\u010f\5\24\13\2\u010e\u010d\3\2\2\2\u010e\u010f\3\2\2\2\u010f\u0112\3"+
|
||||||
"\2\2\2\u010e\u010f\3\2\2\2\u010f\u0112\3\2\2\2\u0110\u0111\7H\2\2\u0111"+
|
"\2\2\2\u0110\u0111\7H\2\2\u0111\u0113\5.\30\2\u0112\u0110\3\2\2\2\u0112"+
|
||||||
"\u0113\5.\30\2\u0112\u0110\3\2\2\2\u0112\u0113\3\2\2\2\u0113\u0117\3\2"+
|
"\u0113\3\2\2\2\u0113\u0117\3\2\2\2\u0114\u0115\7#\2\2\u0115\u0116\7\17"+
|
||||||
"\2\2\u0114\u0115\7#\2\2\u0115\u0116\7\17\2\2\u0116\u0118\5\26\f\2\u0117"+
|
"\2\2\u0116\u0118\5\26\f\2\u0117\u0114\3\2\2\2\u0117\u0118\3\2\2\2\u0118"+
|
||||||
"\u0114\3\2\2\2\u0117\u0118\3\2\2\2\u0118\u011b\3\2\2\2\u0119\u011a\7$"+
|
"\u011b\3\2\2\2\u0119\u011a\7$\2\2\u011a\u011c\5.\30\2\u011b\u0119\3\2"+
|
||||||
"\2\2\u011a\u011c\5.\30\2\u011b\u0119\3\2\2\2\u011b\u011c\3\2\2\2\u011c"+
|
"\2\2\u011b\u011c\3\2\2\2\u011c\23\3\2\2\2\u011d\u011e\7\37\2\2\u011e\u0123"+
|
||||||
"\23\3\2\2\2\u011d\u011e\7\37\2\2\u011e\u0123\5\"\22\2\u011f\u0120\7\5"+
|
"\5\"\22\2\u011f\u0120\7\5\2\2\u0120\u0122\5\"\22\2\u0121\u011f\3\2\2\2"+
|
||||||
"\2\2\u0120\u0122\5\"\22\2\u0121\u011f\3\2\2\2\u0122\u0125\3\2\2\2\u0123"+
|
"\u0122\u0125\3\2\2\2\u0123\u0121\3\2\2\2\u0123\u0124\3\2\2\2\u0124\25"+
|
||||||
"\u0121\3\2\2\2\u0123\u0124\3\2\2\2\u0124\25\3\2\2\2\u0125\u0123\3\2\2"+
|
"\3\2\2\2\u0125\u0123\3\2\2\2\u0126\u0128\5\36\20\2\u0127\u0126\3\2\2\2"+
|
||||||
"\2\u0126\u0128\5\36\20\2\u0127\u0126\3\2\2\2\u0127\u0128\3\2\2\2\u0128"+
|
"\u0127\u0128\3\2\2\2\u0128\u0129\3\2\2\2\u0129\u012e\5\30\r\2\u012a\u012b"+
|
||||||
"\u0129\3\2\2\2\u0129\u012e\5\30\r\2\u012a\u012b\7\5\2\2\u012b\u012d\5"+
|
"\7\5\2\2\u012b\u012d\5\30\r\2\u012c\u012a\3\2\2\2\u012d\u0130\3\2\2\2"+
|
||||||
"\30\r\2\u012c\u012a\3\2\2\2\u012d\u0130\3\2\2\2\u012e\u012c\3\2\2\2\u012e"+
|
"\u012e\u012c\3\2\2\2\u012e\u012f\3\2\2\2\u012f\27\3\2\2\2\u0130\u012e"+
|
||||||
"\u012f\3\2\2\2\u012f\27\3\2\2\2\u0130\u012e\3\2\2\2\u0131\u0132\5\32\16"+
|
"\3\2\2\2\u0131\u0132\5\32\16\2\u0132\31\3\2\2\2\u0133\u013c\7\3\2\2\u0134"+
|
||||||
"\2\u0132\31\3\2\2\2\u0133\u013c\7\3\2\2\u0134\u0139\5,\27\2\u0135\u0136"+
|
"\u0139\5,\27\2\u0135\u0136\7\5\2\2\u0136\u0138\5,\27\2\u0137\u0135\3\2"+
|
||||||
"\7\5\2\2\u0136\u0138\5,\27\2\u0137\u0135\3\2\2\2\u0138\u013b\3\2\2\2\u0139"+
|
"\2\2\u0138\u013b\3\2\2\2\u0139\u0137\3\2\2\2\u0139\u013a\3\2\2\2\u013a"+
|
||||||
"\u0137\3\2\2\2\u0139\u013a\3\2\2\2\u013a\u013d\3\2\2\2\u013b\u0139\3\2"+
|
"\u013d\3\2\2\2\u013b\u0139\3\2\2\2\u013c\u0134\3\2\2\2\u013c\u013d\3\2"+
|
||||||
"\2\2\u013c\u0134\3\2\2\2\u013c\u013d\3\2\2\2\u013d\u013e\3\2\2\2\u013e"+
|
"\2\2\u013d\u013e\3\2\2\2\u013e\u0141\7\4\2\2\u013f\u0141\5,\27\2\u0140"+
|
||||||
"\u0141\7\4\2\2\u013f\u0141\5,\27\2\u0140\u0133\3\2\2\2\u0140\u013f\3\2"+
|
"\u0133\3\2\2\2\u0140\u013f\3\2\2\2\u0141\33\3\2\2\2\u0142\u0143\5X-\2"+
|
||||||
"\2\2\u0141\33\3\2\2\2\u0142\u0143\5X-\2\u0143\u0144\7\f\2\2\u0144\u0145"+
|
"\u0143\u0144\7\f\2\2\u0144\u0145\7\3\2\2\u0145\u0146\5\n\6\2\u0146\u0147"+
|
||||||
"\7\3\2\2\u0145\u0146\5\n\6\2\u0146\u0147\7\4\2\2\u0147\35\3\2\2\2\u0148"+
|
"\7\4\2\2\u0147\35\3\2\2\2\u0148\u0149\t\t\2\2\u0149\37\3\2\2\2\u014a\u014f"+
|
||||||
"\u0149\t\t\2\2\u0149\37\3\2\2\2\u014a\u014f\5,\27\2\u014b\u014d\7\f\2"+
|
"\5,\27\2\u014b\u014d\7\f\2\2\u014c\u014b\3\2\2\2\u014c\u014d\3\2\2\2\u014d"+
|
||||||
"\2\u014c\u014b\3\2\2\2\u014c\u014d\3\2\2\2\u014d\u014e\3\2\2\2\u014e\u0150"+
|
"\u014e\3\2\2\2\u014e\u0150\5X-\2\u014f\u014c\3\2\2\2\u014f\u0150\3\2\2"+
|
||||||
"\5X-\2\u014f\u014c\3\2\2\2\u014f\u0150\3\2\2\2\u0150!\3\2\2\2\u0151\u0155"+
|
"\2\u0150!\3\2\2\2\u0151\u0155\5*\26\2\u0152\u0154\5$\23\2\u0153\u0152"+
|
||||||
"\5*\26\2\u0152\u0154\5$\23\2\u0153\u0152\3\2\2\2\u0154\u0157\3\2\2\2\u0155"+
|
"\3\2\2\2\u0154\u0157\3\2\2\2\u0155\u0153\3\2\2\2\u0155\u0156\3\2\2\2\u0156"+
|
||||||
"\u0153\3\2\2\2\u0155\u0156\3\2\2\2\u0156#\3\2\2\2\u0157\u0155\3\2\2\2"+
|
"#\3\2\2\2\u0157\u0155\3\2\2\2\u0158\u0159\5&\24\2\u0159\u015a\7(\2\2\u015a"+
|
||||||
"\u0158\u0159\5&\24\2\u0159\u015a\7(\2\2\u015a\u015c\5*\26\2\u015b\u015d"+
|
"\u015c\5*\26\2\u015b\u015d\5(\25\2\u015c\u015b\3\2\2\2\u015c\u015d\3\2"+
|
||||||
"\5(\25\2\u015c\u015b\3\2\2\2\u015c\u015d\3\2\2\2\u015d\u0164\3\2\2\2\u015e"+
|
"\2\2\u015d\u0164\3\2\2\2\u015e\u015f\7.\2\2\u015f\u0160\5&\24\2\u0160"+
|
||||||
"\u015f\7.\2\2\u015f\u0160\5&\24\2\u0160\u0161\7(\2\2\u0161\u0162\5*\26"+
|
"\u0161\7(\2\2\u0161\u0162\5*\26\2\u0162\u0164\3\2\2\2\u0163\u0158\3\2"+
|
||||||
"\2\u0162\u0164\3\2\2\2\u0163\u0158\3\2\2\2\u0163\u015e\3\2\2\2\u0164%"+
|
"\2\2\u0163\u015e\3\2\2\2\u0164%\3\2\2\2\u0165\u0167\7&\2\2\u0166\u0165"+
|
||||||
"\3\2\2\2\u0165\u0167\7&\2\2\u0166\u0165\3\2\2\2\u0166\u0167\3\2\2\2\u0167"+
|
"\3\2\2\2\u0166\u0167\3\2\2\2\u0167\u0175\3\2\2\2\u0168\u016a\7)\2\2\u0169"+
|
||||||
"\u0175\3\2\2\2\u0168\u016a\7)\2\2\u0169\u016b\7\65\2\2\u016a\u0169\3\2"+
|
"\u016b\7\65\2\2\u016a\u0169\3\2\2\2\u016a\u016b\3\2\2\2\u016b\u0175\3"+
|
||||||
"\2\2\u016a\u016b\3\2\2\2\u016b\u0175\3\2\2\2\u016c\u016e\79\2\2\u016d"+
|
"\2\2\2\u016c\u016e\79\2\2\u016d\u016f\7\65\2\2\u016e\u016d\3\2\2\2\u016e"+
|
||||||
"\u016f\7\65\2\2\u016e\u016d\3\2\2\2\u016e\u016f\3\2\2\2\u016f\u0175\3"+
|
"\u016f\3\2\2\2\u016f\u0175\3\2\2\2\u0170\u0172\7 \2\2\u0171\u0173\7\65"+
|
||||||
"\2\2\2\u0170\u0172\7 \2\2\u0171\u0173\7\65\2\2\u0172\u0171\3\2\2\2\u0172"+
|
"\2\2\u0172\u0171\3\2\2\2\u0172\u0173\3\2\2\2\u0173\u0175\3\2\2\2\u0174"+
|
||||||
"\u0173\3\2\2\2\u0173\u0175\3\2\2\2\u0174\u0166\3\2\2\2\u0174\u0168\3\2"+
|
"\u0166\3\2\2\2\u0174\u0168\3\2\2\2\u0174\u016c\3\2\2\2\u0174\u0170\3\2"+
|
||||||
"\2\2\u0174\u016c\3\2\2\2\u0174\u0170\3\2\2\2\u0175\'\3\2\2\2\u0176\u0177"+
|
"\2\2\u0175\'\3\2\2\2\u0176\u0177\7\61\2\2\u0177\u0185\5.\30\2\u0178\u0179"+
|
||||||
"\7\61\2\2\u0177\u0185\5.\30\2\u0178\u0179\7F\2\2\u0179\u017a\7\3\2\2\u017a"+
|
"\7F\2\2\u0179\u017a\7\3\2\2\u017a\u017f\5X-\2\u017b\u017c\7\5\2\2\u017c"+
|
||||||
"\u017f\5X-\2\u017b\u017c\7\5\2\2\u017c\u017e\5X-\2\u017d\u017b\3\2\2\2"+
|
"\u017e\5X-\2\u017d\u017b\3\2\2\2\u017e\u0181\3\2\2\2\u017f\u017d\3\2\2"+
|
||||||
"\u017e\u0181\3\2\2\2\u017f\u017d\3\2\2\2\u017f\u0180\3\2\2\2\u0180\u0182"+
|
"\2\u017f\u0180\3\2\2\2\u0180\u0182\3\2\2\2\u0181\u017f\3\2\2\2\u0182\u0183"+
|
||||||
"\3\2\2\2\u0181\u017f\3\2\2\2\u0182\u0183\7\4\2\2\u0183\u0185\3\2\2\2\u0184"+
|
"\7\4\2\2\u0183\u0185\3\2\2\2\u0184\u0176\3\2\2\2\u0184\u0178\3\2\2\2\u0185"+
|
||||||
"\u0176\3\2\2\2\u0184\u0178\3\2\2\2\u0185)\3\2\2\2\u0186\u018b\5Z.\2\u0187"+
|
")\3\2\2\2\u0186\u018b\5Z.\2\u0187\u0189\7\f\2\2\u0188\u0187\3\2\2\2\u0188"+
|
||||||
"\u0189\7\f\2\2\u0188\u0187\3\2\2\2\u0188\u0189\3\2\2\2\u0189\u018a\3\2"+
|
"\u0189\3\2\2\2\u0189\u018a\3\2\2\2\u018a\u018c\5V,\2\u018b\u0188\3\2\2"+
|
||||||
"\2\2\u018a\u018c\5V,\2\u018b\u0188\3\2\2\2\u018b\u018c\3\2\2\2\u018c\u01a0"+
|
"\2\u018b\u018c\3\2\2\2\u018c\u01a0\3\2\2\2\u018d\u018e\7\3\2\2\u018e\u018f"+
|
||||||
"\3\2\2\2\u018d\u018e\7\3\2\2\u018e\u018f\5\n\6\2\u018f\u0194\7\4\2\2\u0190"+
|
"\5\n\6\2\u018f\u0194\7\4\2\2\u0190\u0192\7\f\2\2\u0191\u0190\3\2\2\2\u0191"+
|
||||||
"\u0192\7\f\2\2\u0191\u0190\3\2\2\2\u0191\u0192\3\2\2\2\u0192\u0193\3\2"+
|
"\u0192\3\2\2\2\u0192\u0193\3\2\2\2\u0193\u0195\5V,\2\u0194\u0191\3\2\2"+
|
||||||
"\2\2\u0193\u0195\5V,\2\u0194\u0191\3\2\2\2\u0194\u0195\3\2\2\2\u0195\u01a0"+
|
"\2\u0194\u0195\3\2\2\2\u0195\u01a0\3\2\2\2\u0196\u0197\7\3\2\2\u0197\u0198"+
|
||||||
"\3\2\2\2\u0196\u0197\7\3\2\2\u0197\u0198\5\"\22\2\u0198\u019d\7\4\2\2"+
|
"\5\"\22\2\u0198\u019d\7\4\2\2\u0199\u019b\7\f\2\2\u019a\u0199\3\2\2\2"+
|
||||||
"\u0199\u019b\7\f\2\2\u019a\u0199\3\2\2\2\u019a\u019b\3\2\2\2\u019b\u019c"+
|
"\u019a\u019b\3\2\2\2\u019b\u019c\3\2\2\2\u019c\u019e\5V,\2\u019d\u019a"+
|
||||||
"\3\2\2\2\u019c\u019e\5V,\2\u019d\u019a\3\2\2\2\u019d\u019e\3\2\2\2\u019e"+
|
"\3\2\2\2\u019d\u019e\3\2\2\2\u019e\u01a0\3\2\2\2\u019f\u0186\3\2\2\2\u019f"+
|
||||||
"\u01a0\3\2\2\2\u019f\u0186\3\2\2\2\u019f\u018d\3\2\2\2\u019f\u0196\3\2"+
|
"\u018d\3\2\2\2\u019f\u0196\3\2\2\2\u01a0+\3\2\2\2\u01a1\u01a2\5.\30\2"+
|
||||||
"\2\2\u01a0+\3\2\2\2\u01a1\u01a2\5.\30\2\u01a2-\3\2\2\2\u01a3\u01a4\b\30"+
|
"\u01a2-\3\2\2\2\u01a3\u01a4\b\30\1\2\u01a4\u01a5\7/\2\2\u01a5\u01c3\5"+
|
||||||
"\1\2\u01a4\u01a5\7/\2\2\u01a5\u01c3\5.\30\n\u01a6\u01a7\7\32\2\2\u01a7"+
|
".\30\n\u01a6\u01a7\7\32\2\2\u01a7\u01a8\7\3\2\2\u01a8\u01a9\5\b\5\2\u01a9"+
|
||||||
"\u01a8\7\3\2\2\u01a8\u01a9\5\b\5\2\u01a9\u01aa\7\4\2\2\u01aa\u01c3\3\2"+
|
"\u01aa\7\4\2\2\u01aa\u01c3\3\2\2\2\u01ab\u01ac\7;\2\2\u01ac\u01ad\7\3"+
|
||||||
"\2\2\u01ab\u01ac\7;\2\2\u01ac\u01ad\7\3\2\2\u01ad\u01ae\5b\62\2\u01ae"+
|
"\2\2\u01ad\u01ae\5b\62\2\u01ae\u01af\5\60\31\2\u01af\u01b0\7\4\2\2\u01b0"+
|
||||||
"\u01af\5\60\31\2\u01af\u01b0\7\4\2\2\u01b0\u01c3\3\2\2\2\u01b1\u01b2\7"+
|
"\u01c3\3\2\2\2\u01b1\u01b2\7-\2\2\u01b2\u01b3\7\3\2\2\u01b3\u01b4\5V,"+
|
||||||
"-\2\2\u01b2\u01b3\7\3\2\2\u01b3\u01b4\5V,\2\u01b4\u01b5\7\5\2\2\u01b5"+
|
"\2\u01b4\u01b5\7\5\2\2\u01b5\u01b6\5b\62\2\u01b6\u01b7\5\60\31\2\u01b7"+
|
||||||
"\u01b6\5b\62\2\u01b6\u01b7\5\60\31\2\u01b7\u01b8\7\4\2\2\u01b8\u01c3\3"+
|
"\u01b8\7\4\2\2\u01b8\u01c3\3\2\2\2\u01b9\u01ba\7-\2\2\u01ba\u01bb\7\3"+
|
||||||
"\2\2\2\u01b9\u01ba\7-\2\2\u01ba\u01bb\7\3\2\2\u01bb\u01bc\5b\62\2\u01bc"+
|
"\2\2\u01bb\u01bc\5b\62\2\u01bc\u01bd\7\5\2\2\u01bd\u01be\5b\62\2\u01be"+
|
||||||
"\u01bd\7\5\2\2\u01bd\u01be\5b\62\2\u01be\u01bf\5\60\31\2\u01bf\u01c0\7"+
|
"\u01bf\5\60\31\2\u01bf\u01c0\7\4\2\2\u01c0\u01c3\3\2\2\2\u01c1\u01c3\5"+
|
||||||
"\4\2\2\u01c0\u01c3\3\2\2\2\u01c1\u01c3\5\62\32\2\u01c2\u01a3\3\2\2\2\u01c2"+
|
"\62\32\2\u01c2\u01a3\3\2\2\2\u01c2\u01a6\3\2\2\2\u01c2\u01ab\3\2\2\2\u01c2"+
|
||||||
"\u01a6\3\2\2\2\u01c2\u01ab\3\2\2\2\u01c2\u01b1\3\2\2\2\u01c2\u01b9\3\2"+
|
"\u01b1\3\2\2\2\u01c2\u01b9\3\2\2\2\u01c2\u01c1\3\2\2\2\u01c3\u01cc\3\2"+
|
||||||
"\2\2\u01c2\u01c1\3\2\2\2\u01c3\u01cc\3\2\2\2\u01c4\u01c5\f\4\2\2\u01c5"+
|
"\2\2\u01c4\u01c5\f\4\2\2\u01c5\u01c6\7\n\2\2\u01c6\u01cb\5.\30\5\u01c7"+
|
||||||
"\u01c6\7\n\2\2\u01c6\u01cb\5.\30\5\u01c7\u01c8\f\3\2\2\u01c8\u01c9\7\63"+
|
"\u01c8\f\3\2\2\u01c8\u01c9\7\63\2\2\u01c9\u01cb\5.\30\4\u01ca\u01c4\3"+
|
||||||
"\2\2\u01c9\u01cb\5.\30\4\u01ca\u01c4\3\2\2\2\u01ca\u01c7\3\2\2\2\u01cb"+
|
"\2\2\2\u01ca\u01c7\3\2\2\2\u01cb\u01ce\3\2\2\2\u01cc\u01ca\3\2\2\2\u01cc"+
|
||||||
"\u01ce\3\2\2\2\u01cc\u01ca\3\2\2\2\u01cc\u01cd\3\2\2\2\u01cd/\3\2\2\2"+
|
"\u01cd\3\2\2\2\u01cd/\3\2\2\2\u01ce\u01cc\3\2\2\2\u01cf\u01d0\7\5\2\2"+
|
||||||
"\u01ce\u01cc\3\2\2\2\u01cf\u01d0\7\5\2\2\u01d0\u01d2\5b\62\2\u01d1\u01cf"+
|
"\u01d0\u01d2\5b\62\2\u01d1\u01cf\3\2\2\2\u01d2\u01d5\3\2\2\2\u01d3\u01d1"+
|
||||||
"\3\2\2\2\u01d2\u01d5\3\2\2\2\u01d3\u01d1\3\2\2\2\u01d3\u01d4\3\2\2\2\u01d4"+
|
"\3\2\2\2\u01d3\u01d4\3\2\2\2\u01d4\61\3\2\2\2\u01d5\u01d3\3\2\2\2\u01d6"+
|
||||||
"\61\3\2\2\2\u01d5\u01d3\3\2\2\2\u01d6\u01d8\5<\37\2\u01d7\u01d9\5\64\33"+
|
"\u01d8\5<\37\2\u01d7\u01d9\5\64\33\2\u01d8\u01d7\3\2\2\2\u01d8\u01d9\3"+
|
||||||
"\2\u01d8\u01d7\3\2\2\2\u01d8\u01d9\3\2\2\2\u01d9\63\3\2\2\2\u01da\u01dc"+
|
"\2\2\2\u01d9\63\3\2\2\2\u01da\u01dc\7/\2\2\u01db\u01da\3\2\2\2\u01db\u01dc"+
|
||||||
"\7/\2\2\u01db\u01da\3\2\2\2\u01db\u01dc\3\2\2\2\u01dc\u01dd\3\2\2\2\u01dd"+
|
"\3\2\2\2\u01dc\u01dd\3\2\2\2\u01dd\u01de\7\16\2\2\u01de\u01df\5<\37\2"+
|
||||||
"\u01de\7\16\2\2\u01de\u01df\5<\37\2\u01df\u01e0\7\n\2\2\u01e0\u01e1\5"+
|
"\u01df\u01e0\7\n\2\2\u01e0\u01e1\5<\37\2\u01e1\u0209\3\2\2\2\u01e2\u01e4"+
|
||||||
"<\37\2\u01e1\u0209\3\2\2\2\u01e2\u01e4\7/\2\2\u01e3\u01e2\3\2\2\2\u01e3"+
|
"\7/\2\2\u01e3\u01e2\3\2\2\2\u01e3\u01e4\3\2\2\2\u01e4\u01e5\3\2\2\2\u01e5"+
|
||||||
"\u01e4\3\2\2\2\u01e4\u01e5\3\2\2\2\u01e5\u01e6\7%\2\2\u01e6\u01e7\7\3"+
|
"\u01e6\7%\2\2\u01e6\u01e7\7\3\2\2\u01e7\u01ec\5,\27\2\u01e8\u01e9\7\5"+
|
||||||
"\2\2\u01e7\u01ec\5,\27\2\u01e8\u01e9\7\5\2\2\u01e9\u01eb\5,\27\2\u01ea"+
|
"\2\2\u01e9\u01eb\5,\27\2\u01ea\u01e8\3\2\2\2\u01eb\u01ee\3\2\2\2\u01ec"+
|
||||||
"\u01e8\3\2\2\2\u01eb\u01ee\3\2\2\2\u01ec\u01ea\3\2\2\2\u01ec\u01ed\3\2"+
|
"\u01ea\3\2\2\2\u01ec\u01ed\3\2\2\2\u01ed\u01ef\3\2\2\2\u01ee\u01ec\3\2"+
|
||||||
"\2\2\u01ed\u01ef\3\2\2\2\u01ee\u01ec\3\2\2\2\u01ef\u01f0\7\4\2\2\u01f0"+
|
"\2\2\u01ef\u01f0\7\4\2\2\u01f0\u0209\3\2\2\2\u01f1\u01f3\7/\2\2\u01f2"+
|
||||||
"\u0209\3\2\2\2\u01f1\u01f3\7/\2\2\u01f2\u01f1\3\2\2\2\u01f2\u01f3\3\2"+
|
"\u01f1\3\2\2\2\u01f2\u01f3\3\2\2\2\u01f3\u01f4\3\2\2\2\u01f4\u01f5\7%"+
|
||||||
"\2\2\u01f3\u01f4\3\2\2\2\u01f4\u01f5\7%\2\2\u01f5\u01f6\7\3\2\2\u01f6"+
|
"\2\2\u01f5\u01f6\7\3\2\2\u01f6\u01f7\5\b\5\2\u01f7\u01f8\7\4\2\2\u01f8"+
|
||||||
"\u01f7\5\b\5\2\u01f7\u01f8\7\4\2\2\u01f8\u0209\3\2\2\2\u01f9\u01fb\7/"+
|
"\u0209\3\2\2\2\u01f9\u01fb\7/\2\2\u01fa\u01f9\3\2\2\2\u01fa\u01fb\3\2"+
|
||||||
"\2\2\u01fa\u01f9\3\2\2\2\u01fa\u01fb\3\2\2\2\u01fb\u01fc\3\2\2\2\u01fc"+
|
"\2\2\u01fb\u01fc\3\2\2\2\u01fc\u01fd\7*\2\2\u01fd\u0209\58\35\2\u01fe"+
|
||||||
"\u01fd\7*\2\2\u01fd\u0209\58\35\2\u01fe\u0200\7/\2\2\u01ff\u01fe\3\2\2"+
|
"\u0200\7/\2\2\u01ff\u01fe\3\2\2\2\u01ff\u0200\3\2\2\2\u0200\u0201\3\2"+
|
||||||
"\2\u01ff\u0200\3\2\2\2\u0200\u0201\3\2\2\2\u0201\u0202\7:\2\2\u0202\u0209"+
|
"\2\2\u0201\u0202\7:\2\2\u0202\u0209\5b\62\2\u0203\u0205\7\'\2\2\u0204"+
|
||||||
"\5b\62\2\u0203\u0205\7\'\2\2\u0204\u0206\7/\2\2\u0205\u0204\3\2\2\2\u0205"+
|
"\u0206\7/\2\2\u0205\u0204\3\2\2\2\u0205\u0206\3\2\2\2\u0206\u0207\3\2"+
|
||||||
"\u0206\3\2\2\2\u0206\u0207\3\2\2\2\u0207\u0209\7\60\2\2\u0208\u01db\3"+
|
"\2\2\u0207\u0209\7\60\2\2\u0208\u01db\3\2\2\2\u0208\u01e3\3\2\2\2\u0208"+
|
||||||
"\2\2\2\u0208\u01e3\3\2\2\2\u0208\u01f2\3\2\2\2\u0208\u01fa\3\2\2\2\u0208"+
|
"\u01f2\3\2\2\2\u0208\u01fa\3\2\2\2\u0208\u01ff\3\2\2\2\u0208\u0203\3\2"+
|
||||||
"\u01ff\3\2\2\2\u0208\u0203\3\2\2\2\u0209\65\3\2\2\2\u020a\u020b\7*\2\2"+
|
"\2\2\u0209\65\3\2\2\2\u020a\u020b\7*\2\2\u020b\u020c\58\35\2\u020c\67"+
|
||||||
"\u020b\u020c\58\35\2\u020c\67\3\2\2\2\u020d\u020f\5b\62\2\u020e\u0210"+
|
"\3\2\2\2\u020d\u020f\5b\62\2\u020e\u0210\5:\36\2\u020f\u020e\3\2\2\2\u020f"+
|
||||||
"\5:\36\2\u020f\u020e\3\2\2\2\u020f\u0210\3\2\2\2\u02109\3\2\2\2\u0211"+
|
"\u0210\3\2\2\2\u02109\3\2\2\2\u0211\u0212\7\30\2\2\u0212\u0218\5b\62\2"+
|
||||||
"\u0212\7\30\2\2\u0212\u0218\5b\62\2\u0213\u0214\7J\2\2\u0214\u0215\5b"+
|
"\u0213\u0214\7J\2\2\u0214\u0215\5b\62\2\u0215\u0216\7Q\2\2\u0216\u0218"+
|
||||||
"\62\2\u0215\u0216\7Q\2\2\u0216\u0218\3\2\2\2\u0217\u0211\3\2\2\2\u0217"+
|
"\3\2\2\2\u0217\u0211\3\2\2\2\u0217\u0213\3\2\2\2\u0218;\3\2\2\2\u0219"+
|
||||||
"\u0213\3\2\2\2\u0218;\3\2\2\2\u0219\u021a\b\37\1\2\u021a\u021e\5> \2\u021b"+
|
"\u021a\b\37\1\2\u021a\u021e\5> \2\u021b\u021c\t\n\2\2\u021c\u021e\5<\37"+
|
||||||
"\u021c\t\n\2\2\u021c\u021e\5<\37\6\u021d\u0219\3\2\2\2\u021d\u021b\3\2"+
|
"\6\u021d\u0219\3\2\2\2\u021d\u021b\3\2\2\2\u021e\u022b\3\2\2\2\u021f\u0220"+
|
||||||
"\2\2\u021e\u022b\3\2\2\2\u021f\u0220\f\5\2\2\u0220\u0221\t\13\2\2\u0221"+
|
"\f\5\2\2\u0220\u0221\t\13\2\2\u0221\u022a\5<\37\6\u0222\u0223\f\4\2\2"+
|
||||||
"\u022a\5<\37\6\u0222\u0223\f\4\2\2\u0223\u0224\t\n\2\2\u0224\u022a\5<"+
|
"\u0223\u0224\t\n\2\2\u0224\u022a\5<\37\5\u0225\u0226\f\3\2\2\u0226\u0227"+
|
||||||
"\37\5\u0225\u0226\f\3\2\2\u0226\u0227\5P)\2\u0227\u0228\5<\37\4\u0228"+
|
"\5P)\2\u0227\u0228\5<\37\4\u0228\u022a\3\2\2\2\u0229\u021f\3\2\2\2\u0229"+
|
||||||
"\u022a\3\2\2\2\u0229\u021f\3\2\2\2\u0229\u0222\3\2\2\2\u0229\u0225\3\2"+
|
"\u0222\3\2\2\2\u0229\u0225\3\2\2\2\u022a\u022d\3\2\2\2\u022b\u0229\3\2"+
|
||||||
"\2\2\u022a\u022d\3\2\2\2\u022b\u0229\3\2\2\2\u022b\u022c\3\2\2\2\u022c"+
|
"\2\2\u022b\u022c\3\2\2\2\u022c=\3\2\2\2\u022d\u022b\3\2\2\2\u022e\u0242"+
|
||||||
"=\3\2\2\2\u022d\u022b\3\2\2\2\u022e\u0242\5@!\2\u022f\u0242\5D#\2\u0230"+
|
"\5@!\2\u022f\u0242\5D#\2\u0230\u0242\5N(\2\u0231\u0232\5V,\2\u0232\u0233"+
|
||||||
"\u0242\5N(\2\u0231\u0232\5V,\2\u0232\u0233\7^\2\2\u0233\u0235\3\2\2\2"+
|
"\7^\2\2\u0233\u0235\3\2\2\2\u0234\u0231\3\2\2\2\u0234\u0235\3\2\2\2\u0235"+
|
||||||
"\u0234\u0231\3\2\2\2\u0234\u0235\3\2\2\2\u0235\u0236\3\2\2\2\u0236\u0242"+
|
"\u0236\3\2\2\2\u0236\u0242\7Z\2\2\u0237\u0242\5H%\2\u0238\u0239\7\3\2"+
|
||||||
"\7Z\2\2\u0237\u0242\5H%\2\u0238\u0239\7\3\2\2\u0239\u023a\5\b\5\2\u023a"+
|
"\2\u0239\u023a\5\b\5\2\u023a\u023b\7\4\2\2\u023b\u0242\3\2\2\2\u023c\u0242"+
|
||||||
"\u023b\7\4\2\2\u023b\u0242\3\2\2\2\u023c\u0242\5V,\2\u023d\u023e\7\3\2"+
|
"\5V,\2\u023d\u023e\7\3\2\2\u023e\u023f\5,\27\2\u023f\u0240\7\4\2\2\u0240"+
|
||||||
"\2\u023e\u023f\5,\27\2\u023f\u0240\7\4\2\2\u0240\u0242\3\2\2\2\u0241\u022e"+
|
"\u0242\3\2\2\2\u0241\u022e\3\2\2\2\u0241\u022f\3\2\2\2\u0241\u0230\3\2"+
|
||||||
"\3\2\2\2\u0241\u022f\3\2\2\2\u0241\u0230\3\2\2\2\u0241\u0234\3\2\2\2\u0241"+
|
"\2\2\u0241\u0234\3\2\2\2\u0241\u0237\3\2\2\2\u0241\u0238\3\2\2\2\u0241"+
|
||||||
"\u0237\3\2\2\2\u0241\u0238\3\2\2\2\u0241\u023c\3\2\2\2\u0241\u023d\3\2"+
|
"\u023c\3\2\2\2\u0241\u023d\3\2\2\2\u0242?\3\2\2\2\u0243\u0249\5B\"\2\u0244"+
|
||||||
"\2\2\u0242?\3\2\2\2\u0243\u0249\5B\"\2\u0244\u0245\7K\2\2\u0245\u0246"+
|
"\u0245\7K\2\2\u0245\u0246\5B\"\2\u0246\u0247\7Q\2\2\u0247\u0249\3\2\2"+
|
||||||
"\5B\"\2\u0246\u0247\7Q\2\2\u0247\u0249\3\2\2\2\u0248\u0243\3\2\2\2\u0248"+
|
"\2\u0248\u0243\3\2\2\2\u0248\u0244\3\2\2\2\u0249A\3\2\2\2\u024a\u024b"+
|
||||||
"\u0244\3\2\2\2\u0249A\3\2\2\2\u024a\u024b\7\20\2\2\u024b\u024c\7\3\2\2"+
|
"\7\20\2\2\u024b\u024c\7\3\2\2\u024c\u024d\5,\27\2\u024d\u024e\7\f\2\2"+
|
||||||
"\u024c\u024d\5,\27\2\u024d\u024e\7\f\2\2\u024e\u024f\5T+\2\u024f\u0250"+
|
"\u024e\u024f\5T+\2\u024f\u0250\7\4\2\2\u0250C\3\2\2\2\u0251\u0257\5F$"+
|
||||||
"\7\4\2\2\u0250C\3\2\2\2\u0251\u0257\5F$\2\u0252\u0253\7K\2\2\u0253\u0254"+
|
"\2\u0252\u0253\7K\2\2\u0253\u0254\5F$\2\u0254\u0255\7Q\2\2\u0255\u0257"+
|
||||||
"\5F$\2\u0254\u0255\7Q\2\2\u0255\u0257\3\2\2\2\u0256\u0251\3\2\2\2\u0256"+
|
"\3\2\2\2\u0256\u0251\3\2\2\2\u0256\u0252\3\2\2\2\u0257E\3\2\2\2\u0258"+
|
||||||
"\u0252\3\2\2\2\u0257E\3\2\2\2\u0258\u0259\7\34\2\2\u0259\u025a\7\3\2\2"+
|
"\u0259\7\34\2\2\u0259\u025a\7\3\2\2\u025a\u025b\5X-\2\u025b\u025c\7\37"+
|
||||||
"\u025a\u025b\5X-\2\u025b\u025c\7\37\2\2\u025c\u025d\5<\37\2\u025d\u025e"+
|
"\2\2\u025c\u025d\5<\37\2\u025d\u025e\7\4\2\2\u025eG\3\2\2\2\u025f\u0265"+
|
||||||
"\7\4\2\2\u025eG\3\2\2\2\u025f\u0265\5J&\2\u0260\u0261\7K\2\2\u0261\u0262"+
|
"\5J&\2\u0260\u0261\7K\2\2\u0261\u0262\5J&\2\u0262\u0263\7Q\2\2\u0263\u0265"+
|
||||||
"\5J&\2\u0262\u0263\7Q\2\2\u0263\u0265\3\2\2\2\u0264\u025f\3\2\2\2\u0264"+
|
"\3\2\2\2\u0264\u025f\3\2\2\2\u0264\u0260\3\2\2\2\u0265I\3\2\2\2\u0266"+
|
||||||
"\u0260\3\2\2\2\u0265I\3\2\2\2\u0266\u0267\5L\'\2\u0267\u0273\7\3\2\2\u0268"+
|
"\u0267\5L\'\2\u0267\u0273\7\3\2\2\u0268\u026a\5\36\20\2\u0269\u0268\3"+
|
||||||
"\u026a\5\36\20\2\u0269\u0268\3\2\2\2\u0269\u026a\3\2\2\2\u026a\u026b\3"+
|
"\2\2\2\u0269\u026a\3\2\2\2\u026a\u026b\3\2\2\2\u026b\u0270\5,\27\2\u026c"+
|
||||||
"\2\2\2\u026b\u0270\5,\27\2\u026c\u026d\7\5\2\2\u026d\u026f\5,\27\2\u026e"+
|
"\u026d\7\5\2\2\u026d\u026f\5,\27\2\u026e\u026c\3\2\2\2\u026f\u0272\3\2"+
|
||||||
"\u026c\3\2\2\2\u026f\u0272\3\2\2\2\u0270\u026e\3\2\2\2\u0270\u0271\3\2"+
|
"\2\2\u0270\u026e\3\2\2\2\u0270\u0271\3\2\2\2\u0271\u0274\3\2\2\2\u0272"+
|
||||||
"\2\2\u0271\u0274\3\2\2\2\u0272\u0270\3\2\2\2\u0273\u0269\3\2\2\2\u0273"+
|
"\u0270\3\2\2\2\u0273\u0269\3\2\2\2\u0273\u0274\3\2\2\2\u0274\u0275\3\2"+
|
||||||
"\u0274\3\2\2\2\u0274\u0275\3\2\2\2\u0275\u0276\7\4\2\2\u0276K\3\2\2\2"+
|
"\2\2\u0275\u0276\7\4\2\2\u0276K\3\2\2\2\u0277\u027b\7)\2\2\u0278\u027b"+
|
||||||
"\u0277\u027b\7)\2\2\u0278\u027b\79\2\2\u0279\u027b\5X-\2\u027a\u0277\3"+
|
"\79\2\2\u0279\u027b\5X-\2\u027a\u0277\3\2\2\2\u027a\u0278\3\2\2\2\u027a"+
|
||||||
"\2\2\2\u027a\u0278\3\2\2\2\u027a\u0279\3\2\2\2\u027bM\3\2\2\2\u027c\u0296"+
|
"\u0279\3\2\2\2\u027bM\3\2\2\2\u027c\u0296\7\60\2\2\u027d\u0296\5`\61\2"+
|
||||||
"\7\60\2\2\u027d\u0296\5`\61\2\u027e\u0296\5R*\2\u027f\u0281\7`\2\2\u0280"+
|
"\u027e\u0296\5R*\2\u027f\u0281\7`\2\2\u0280\u027f\3\2\2\2\u0281\u0282"+
|
||||||
"\u027f\3\2\2\2\u0281\u0282\3\2\2\2\u0282\u0280\3\2\2\2\u0282\u0283\3\2"+
|
"\3\2\2\2\u0282\u0280\3\2\2\2\u0282\u0283\3\2\2\2\u0283\u0296\3\2\2\2\u0284"+
|
||||||
"\2\2\u0283\u0296\3\2\2\2\u0284\u0296\7_\2\2\u0285\u0286\7M\2\2\u0286\u0287"+
|
"\u0296\7_\2\2\u0285\u0286\7M\2\2\u0286\u0287\5b\62\2\u0287\u0288\7Q\2"+
|
||||||
"\5b\62\2\u0287\u0288\7Q\2\2\u0288\u0296\3\2\2\2\u0289\u028a\7N\2\2\u028a"+
|
"\2\u0288\u0296\3\2\2\2\u0289\u028a\7N\2\2\u028a\u028b\5b\62\2\u028b\u028c"+
|
||||||
"\u028b\5b\62\2\u028b\u028c\7Q\2\2\u028c\u0296\3\2\2\2\u028d\u028e\7O\2"+
|
"\7Q\2\2\u028c\u0296\3\2\2\2\u028d\u028e\7O\2\2\u028e\u028f\5b\62\2\u028f"+
|
||||||
"\2\u028e\u028f\5b\62\2\u028f\u0290\7Q\2\2\u0290\u0296\3\2\2\2\u0291\u0292"+
|
"\u0290\7Q\2\2\u0290\u0296\3\2\2\2\u0291\u0292\7P\2\2\u0292\u0293\5b\62"+
|
||||||
"\7P\2\2\u0292\u0293\5b\62\2\u0293\u0294\7Q\2\2\u0294\u0296\3\2\2\2\u0295"+
|
"\2\u0293\u0294\7Q\2\2\u0294\u0296\3\2\2\2\u0295\u027c\3\2\2\2\u0295\u027d"+
|
||||||
"\u027c\3\2\2\2\u0295\u027d\3\2\2\2\u0295\u027e\3\2\2\2\u0295\u0280\3\2"+
|
"\3\2\2\2\u0295\u027e\3\2\2\2\u0295\u0280\3\2\2\2\u0295\u0284\3\2\2\2\u0295"+
|
||||||
"\2\2\u0295\u0284\3\2\2\2\u0295\u0285\3\2\2\2\u0295\u0289\3\2\2\2\u0295"+
|
"\u0285\3\2\2\2\u0295\u0289\3\2\2\2\u0295\u028d\3\2\2\2\u0295\u0291\3\2"+
|
||||||
"\u028d\3\2\2\2\u0295\u0291\3\2\2\2\u0296O\3\2\2\2\u0297\u0298\t\f\2\2"+
|
"\2\2\u0296O\3\2\2\2\u0297\u0298\t\f\2\2\u0298Q\3\2\2\2\u0299\u029a\t\r"+
|
||||||
"\u0298Q\3\2\2\2\u0299\u029a\t\r\2\2\u029aS\3\2\2\2\u029b\u029c\5X-\2\u029c"+
|
"\2\2\u029aS\3\2\2\2\u029b\u029c\5X-\2\u029cU\3\2\2\2\u029d\u029e\5X-\2"+
|
||||||
"U\3\2\2\2\u029d\u029e\5X-\2\u029e\u029f\7^\2\2\u029f\u02a1\3\2\2\2\u02a0"+
|
"\u029e\u029f\7^\2\2\u029f\u02a1\3\2\2\2\u02a0\u029d\3\2\2\2\u02a1\u02a4"+
|
||||||
"\u029d\3\2\2\2\u02a1\u02a4\3\2\2\2\u02a2\u02a0\3\2\2\2\u02a2\u02a3\3\2"+
|
"\3\2\2\2\u02a2\u02a0\3\2\2\2\u02a2\u02a3\3\2\2\2\u02a3\u02a5\3\2\2\2\u02a4"+
|
||||||
"\2\2\u02a3\u02a5\3\2\2\2\u02a4\u02a2\3\2\2\2\u02a5\u02a6\5X-\2\u02a6W"+
|
"\u02a2\3\2\2\2\u02a5\u02a6\5X-\2\u02a6W\3\2\2\2\u02a7\u02aa\5\\/\2\u02a8"+
|
||||||
"\3\2\2\2\u02a7\u02aa\5\\/\2\u02a8\u02aa\5^\60\2\u02a9\u02a7\3\2\2\2\u02a9"+
|
"\u02aa\5^\60\2\u02a9\u02a7\3\2\2\2\u02a9\u02a8\3\2\2\2\u02aaY\3\2\2\2"+
|
||||||
"\u02a8\3\2\2\2\u02aaY\3\2\2\2\u02ab\u02ac\5X-\2\u02ac\u02ad\7\6\2\2\u02ad"+
|
"\u02ab\u02ac\5X-\2\u02ac\u02ad\7\6\2\2\u02ad\u02af\3\2\2\2\u02ae\u02ab"+
|
||||||
"\u02af\3\2\2\2\u02ae\u02ab\3\2\2\2\u02ae\u02af\3\2\2\2\u02af\u02b0\3\2"+
|
"\3\2\2\2\u02ae\u02af\3\2\2\2\u02af\u02b0\3\2\2\2\u02b0\u02b8\7e\2\2\u02b1"+
|
||||||
"\2\2\u02b0\u02b8\7e\2\2\u02b1\u02b2\5X-\2\u02b2\u02b3\7\6\2\2\u02b3\u02b5"+
|
"\u02b2\5X-\2\u02b2\u02b3\7\6\2\2\u02b3\u02b5\3\2\2\2\u02b4\u02b1\3\2\2"+
|
||||||
"\3\2\2\2\u02b4\u02b1\3\2\2\2\u02b4\u02b5\3\2\2\2\u02b5\u02b6\3\2\2\2\u02b6"+
|
"\2\u02b4\u02b5\3\2\2\2\u02b5\u02b6\3\2\2\2\u02b6\u02b8\5X-\2\u02b7\u02ae"+
|
||||||
"\u02b8\5X-\2\u02b7\u02ae\3\2\2\2\u02b7\u02b4\3\2\2\2\u02b8[\3\2\2\2\u02b9"+
|
"\3\2\2\2\u02b7\u02b4\3\2\2\2\u02b8[\3\2\2\2\u02b9\u02bc\7f\2\2\u02ba\u02bc"+
|
||||||
"\u02bc\7f\2\2\u02ba\u02bc\7g\2\2\u02bb\u02b9\3\2\2\2\u02bb\u02ba\3\2\2"+
|
"\7g\2\2\u02bb\u02b9\3\2\2\2\u02bb\u02ba\3\2\2\2\u02bc]\3\2\2\2\u02bd\u02c1"+
|
||||||
"\2\u02bc]\3\2\2\2\u02bd\u02c1\7c\2\2\u02be\u02c1\5d\63\2\u02bf\u02c1\7"+
|
"\7c\2\2\u02be\u02c1\5d\63\2\u02bf\u02c1\7d\2\2\u02c0\u02bd\3\2\2\2\u02c0"+
|
||||||
"d\2\2\u02c0\u02bd\3\2\2\2\u02c0\u02be\3\2\2\2\u02c0\u02bf\3\2\2\2\u02c1"+
|
"\u02be\3\2\2\2\u02c0\u02bf\3\2\2\2\u02c1_\3\2\2\2\u02c2\u02c5\7b\2\2\u02c3"+
|
||||||
"_\3\2\2\2\u02c2\u02c4\t\n\2\2\u02c3\u02c2\3\2\2\2\u02c3\u02c4\3\2\2\2"+
|
"\u02c5\7a\2\2\u02c4\u02c2\3\2\2\2\u02c4\u02c3\3\2\2\2\u02c5a\3\2\2\2\u02c6"+
|
||||||
"\u02c4\u02c5\3\2\2\2\u02c5\u02cb\7b\2\2\u02c6\u02c8\t\n\2\2\u02c7\u02c6"+
|
"\u02c7\t\16\2\2\u02c7c\3\2\2\2\u02c8\u02c9\t\17\2\2\u02c9e\3\2\2\2`uw"+
|
||||||
"\3\2\2\2\u02c7\u02c8\3\2\2\2\u02c8\u02c9\3\2\2\2\u02c9\u02cb\7a\2\2\u02ca"+
|
"{\u0084\u0086\u008a\u0091\u0098\u009d\u00a2\u00ac\u00b0\u00b8\u00bb\u00c1"+
|
||||||
"\u02c3\3\2\2\2\u02ca\u02c7\3\2\2\2\u02cba\3\2\2\2\u02cc\u02cd\t\16\2\2"+
|
"\u00c6\u00c9\u00d0\u00d8\u00db\u00e7\u00ea\u00ed\u00f4\u00fb\u00ff\u0103"+
|
||||||
"\u02cdc\3\2\2\2\u02ce\u02cf\t\17\2\2\u02cfe\3\2\2\2buw{\u0084\u0086\u008a"+
|
"\u010a\u010e\u0112\u0117\u011b\u0123\u0127\u012e\u0139\u013c\u0140\u014c"+
|
||||||
"\u0091\u0098\u009d\u00a2\u00ac\u00b0\u00b8\u00bb\u00c1\u00c6\u00c9\u00d0"+
|
"\u014f\u0155\u015c\u0163\u0166\u016a\u016e\u0172\u0174\u017f\u0184\u0188"+
|
||||||
"\u00d8\u00db\u00e7\u00ea\u00ed\u00f4\u00fb\u00ff\u0103\u010a\u010e\u0112"+
|
"\u018b\u0191\u0194\u019a\u019d\u019f\u01c2\u01ca\u01cc\u01d3\u01d8\u01db"+
|
||||||
"\u0117\u011b\u0123\u0127\u012e\u0139\u013c\u0140\u014c\u014f\u0155\u015c"+
|
"\u01e3\u01ec\u01f2\u01fa\u01ff\u0205\u0208\u020f\u0217\u021d\u0229\u022b"+
|
||||||
"\u0163\u0166\u016a\u016e\u0172\u0174\u017f\u0184\u0188\u018b\u0191\u0194"+
|
"\u0234\u0241\u0248\u0256\u0264\u0269\u0270\u0273\u027a\u0282\u0295\u02a2"+
|
||||||
"\u019a\u019d\u019f\u01c2\u01ca\u01cc\u01d3\u01d8\u01db\u01e3\u01ec\u01f2"+
|
"\u02a9\u02ae\u02b4\u02b7\u02bb\u02c0\u02c4";
|
||||||
"\u01fa\u01ff\u0205\u0208\u020f\u0217\u021d\u0229\u022b\u0234\u0241\u0248"+
|
|
||||||
"\u0256\u0264\u0269\u0270\u0273\u027a\u0282\u0295\u02a2\u02a9\u02ae\u02b4"+
|
|
||||||
"\u02b7\u02bb\u02c0\u02c3\u02c7\u02ca";
|
|
||||||
public static final ATN _ATN =
|
public static final ATN _ATN =
|
||||||
new ATNDeserializer().deserialize(_serializedATN.toCharArray());
|
new ATNDeserializer().deserialize(_serializedATN.toCharArray());
|
||||||
static {
|
static {
|
||||||
|
@ -38,7 +38,7 @@ public class PreAnalyzerTests extends ESTestCase {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public void testWildIndexWithCatalog() {
|
public void testWildIndexWithCatalog() {
|
||||||
LogicalPlan plan = parser.createStatement("SELECT * FROM elastic:index*");
|
LogicalPlan plan = parser.createStatement("SELECT * FROM elastic:\"index*\"");
|
||||||
PreAnalysis result = preAnalyzer.preAnalyze(plan);
|
PreAnalysis result = preAnalyzer.preAnalyze(plan);
|
||||||
assertThat(plan.preAnalyzed(), is(true));
|
assertThat(plan.preAnalyzed(), is(true));
|
||||||
assertThat(result.indices, hasSize(1));
|
assertThat(result.indices, hasSize(1));
|
||||||
|
@ -6,31 +6,35 @@
|
|||||||
package org.elasticsearch.xpack.sql.expression.function;
|
package org.elasticsearch.xpack.sql.expression.function;
|
||||||
|
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
import org.elasticsearch.xpack.sql.tree.Location;
|
import org.elasticsearch.xpack.sql.SqlIllegalArgumentException;
|
||||||
import org.elasticsearch.xpack.sql.tree.LocationTests;
|
|
||||||
import org.elasticsearch.xpack.sql.tree.NodeInfo;
|
|
||||||
import org.elasticsearch.xpack.sql.type.DataType;
|
|
||||||
import org.elasticsearch.xpack.sql.expression.Expression;
|
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||||
import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunction;
|
import org.elasticsearch.xpack.sql.expression.function.scalar.ScalarFunction;
|
||||||
import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe;
|
import org.elasticsearch.xpack.sql.expression.gen.pipeline.Pipe;
|
||||||
import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate;
|
import org.elasticsearch.xpack.sql.expression.gen.script.ScriptTemplate;
|
||||||
import org.elasticsearch.xpack.sql.parser.ParsingException;
|
import org.elasticsearch.xpack.sql.parser.ParsingException;
|
||||||
|
import org.elasticsearch.xpack.sql.tree.Location;
|
||||||
|
import org.elasticsearch.xpack.sql.tree.LocationTests;
|
||||||
|
import org.elasticsearch.xpack.sql.tree.NodeInfo;
|
||||||
|
import org.elasticsearch.xpack.sql.type.DataType;
|
||||||
|
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
|
import java.util.Collections;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.TimeZone;
|
import java.util.TimeZone;
|
||||||
|
|
||||||
|
import static java.util.Collections.emptyList;
|
||||||
import static org.elasticsearch.xpack.sql.expression.function.FunctionRegistry.def;
|
import static org.elasticsearch.xpack.sql.expression.function.FunctionRegistry.def;
|
||||||
import static org.elasticsearch.xpack.sql.expression.function.UnresolvedFunction.ResolutionType.DISTINCT;
|
import static org.elasticsearch.xpack.sql.expression.function.UnresolvedFunction.ResolutionType.DISTINCT;
|
||||||
import static org.elasticsearch.xpack.sql.expression.function.UnresolvedFunction.ResolutionType.EXTRACT;
|
import static org.elasticsearch.xpack.sql.expression.function.UnresolvedFunction.ResolutionType.EXTRACT;
|
||||||
import static org.elasticsearch.xpack.sql.expression.function.UnresolvedFunction.ResolutionType.STANDARD;
|
import static org.elasticsearch.xpack.sql.expression.function.UnresolvedFunction.ResolutionType.STANDARD;
|
||||||
import static org.hamcrest.Matchers.endsWith;
|
import static org.hamcrest.Matchers.endsWith;
|
||||||
|
import static org.hamcrest.Matchers.is;
|
||||||
import static org.mockito.Mockito.mock;
|
import static org.mockito.Mockito.mock;
|
||||||
import static java.util.Collections.emptyList;
|
|
||||||
|
|
||||||
public class FunctionRegistryTests extends ESTestCase {
|
public class FunctionRegistryTests extends ESTestCase {
|
||||||
public void testNoArgFunction() {
|
public void testNoArgFunction() {
|
||||||
UnresolvedFunction ur = uf(STANDARD);
|
UnresolvedFunction ur = uf(STANDARD);
|
||||||
FunctionRegistry r = new FunctionRegistry(Arrays.asList(def(Dummy.class, Dummy::new)));
|
FunctionRegistry r = new FunctionRegistry(Collections.singletonList(def(DummyFunction.class, DummyFunction::new)));
|
||||||
FunctionDefinition def = r.resolveFunction(ur.name());
|
FunctionDefinition def = r.resolveFunction(ur.name());
|
||||||
assertEquals(ur.location(), ur.buildResolved(randomTimeZone(), def).location());
|
assertEquals(ur.location(), ur.buildResolved(randomTimeZone(), def).location());
|
||||||
|
|
||||||
@ -47,9 +51,10 @@ public class FunctionRegistryTests extends ESTestCase {
|
|||||||
|
|
||||||
public void testUnaryFunction() {
|
public void testUnaryFunction() {
|
||||||
UnresolvedFunction ur = uf(STANDARD, mock(Expression.class));
|
UnresolvedFunction ur = uf(STANDARD, mock(Expression.class));
|
||||||
FunctionRegistry r = new FunctionRegistry(Arrays.asList(def(Dummy.class, (Location l, Expression e) -> {
|
FunctionRegistry r = new FunctionRegistry(Collections.singletonList(
|
||||||
|
def(DummyFunction.class, (Location l, Expression e) -> {
|
||||||
assertSame(e, ur.children().get(0));
|
assertSame(e, ur.children().get(0));
|
||||||
return new Dummy(l);
|
return new DummyFunction(l);
|
||||||
})));
|
})));
|
||||||
FunctionDefinition def = r.resolveFunction(ur.name());
|
FunctionDefinition def = r.resolveFunction(ur.name());
|
||||||
assertFalse(def.datetime());
|
assertFalse(def.datetime());
|
||||||
@ -74,11 +79,12 @@ public class FunctionRegistryTests extends ESTestCase {
|
|||||||
public void testUnaryDistinctAwareFunction() {
|
public void testUnaryDistinctAwareFunction() {
|
||||||
boolean urIsDistinct = randomBoolean();
|
boolean urIsDistinct = randomBoolean();
|
||||||
UnresolvedFunction ur = uf(urIsDistinct ? DISTINCT : STANDARD, mock(Expression.class));
|
UnresolvedFunction ur = uf(urIsDistinct ? DISTINCT : STANDARD, mock(Expression.class));
|
||||||
FunctionRegistry r = new FunctionRegistry(Arrays.asList(def(Dummy.class, (Location l, Expression e, boolean distinct) -> {
|
FunctionRegistry r = new FunctionRegistry(Collections.singletonList(
|
||||||
assertEquals(urIsDistinct, distinct);
|
def(DummyFunction.class, (Location l, Expression e, boolean distinct) -> {
|
||||||
assertSame(e, ur.children().get(0));
|
assertEquals(urIsDistinct, distinct);
|
||||||
return new Dummy(l);
|
assertSame(e, ur.children().get(0));
|
||||||
})));
|
return new DummyFunction(l);
|
||||||
|
})));
|
||||||
FunctionDefinition def = r.resolveFunction(ur.name());
|
FunctionDefinition def = r.resolveFunction(ur.name());
|
||||||
assertEquals(ur.location(), ur.buildResolved(randomTimeZone(), def).location());
|
assertEquals(ur.location(), ur.buildResolved(randomTimeZone(), def).location());
|
||||||
assertFalse(def.datetime());
|
assertFalse(def.datetime());
|
||||||
@ -98,11 +104,12 @@ public class FunctionRegistryTests extends ESTestCase {
|
|||||||
boolean urIsExtract = randomBoolean();
|
boolean urIsExtract = randomBoolean();
|
||||||
UnresolvedFunction ur = uf(urIsExtract ? EXTRACT : STANDARD, mock(Expression.class));
|
UnresolvedFunction ur = uf(urIsExtract ? EXTRACT : STANDARD, mock(Expression.class));
|
||||||
TimeZone providedTimeZone = randomTimeZone();
|
TimeZone providedTimeZone = randomTimeZone();
|
||||||
FunctionRegistry r = new FunctionRegistry(Arrays.asList(def(Dummy.class, (Location l, Expression e, TimeZone tz) -> {
|
FunctionRegistry r = new FunctionRegistry(Collections.singletonList(
|
||||||
assertEquals(providedTimeZone, tz);
|
def(DummyFunction.class, (Location l, Expression e, TimeZone tz) -> {
|
||||||
assertSame(e, ur.children().get(0));
|
assertEquals(providedTimeZone, tz);
|
||||||
return new Dummy(l);
|
assertSame(e, ur.children().get(0));
|
||||||
})));
|
return new DummyFunction(l);
|
||||||
|
})));
|
||||||
FunctionDefinition def = r.resolveFunction(ur.name());
|
FunctionDefinition def = r.resolveFunction(ur.name());
|
||||||
assertEquals(ur.location(), ur.buildResolved(providedTimeZone, def).location());
|
assertEquals(ur.location(), ur.buildResolved(providedTimeZone, def).location());
|
||||||
assertTrue(def.datetime());
|
assertTrue(def.datetime());
|
||||||
@ -125,11 +132,12 @@ public class FunctionRegistryTests extends ESTestCase {
|
|||||||
|
|
||||||
public void testBinaryFunction() {
|
public void testBinaryFunction() {
|
||||||
UnresolvedFunction ur = uf(STANDARD, mock(Expression.class), mock(Expression.class));
|
UnresolvedFunction ur = uf(STANDARD, mock(Expression.class), mock(Expression.class));
|
||||||
FunctionRegistry r = new FunctionRegistry(Arrays.asList(def(Dummy.class, (Location l, Expression lhs, Expression rhs) -> {
|
FunctionRegistry r = new FunctionRegistry(Collections.singletonList(
|
||||||
assertSame(lhs, ur.children().get(0));
|
def(DummyFunction.class, (Location l, Expression lhs, Expression rhs) -> {
|
||||||
assertSame(rhs, ur.children().get(1));
|
assertSame(lhs, ur.children().get(0));
|
||||||
return new Dummy(l);
|
assertSame(rhs, ur.children().get(1));
|
||||||
})));
|
return new DummyFunction(l);
|
||||||
|
})));
|
||||||
FunctionDefinition def = r.resolveFunction(ur.name());
|
FunctionDefinition def = r.resolveFunction(ur.name());
|
||||||
assertEquals(ur.location(), ur.buildResolved(randomTimeZone(), def).location());
|
assertEquals(ur.location(), ur.buildResolved(randomTimeZone(), def).location());
|
||||||
assertFalse(def.datetime());
|
assertFalse(def.datetime());
|
||||||
@ -156,17 +164,60 @@ public class FunctionRegistryTests extends ESTestCase {
|
|||||||
assertThat(e.getMessage(), endsWith("expects exactly two arguments"));
|
assertThat(e.getMessage(), endsWith("expects exactly two arguments"));
|
||||||
}
|
}
|
||||||
|
|
||||||
private UnresolvedFunction uf(UnresolvedFunction.ResolutionType resolutionType, Expression... children) {
|
public void testFunctionResolving() {
|
||||||
return new UnresolvedFunction(LocationTests.randomLocation(), "dummy", resolutionType, Arrays.asList(children));
|
UnresolvedFunction ur = uf(STANDARD, mock(Expression.class));
|
||||||
|
FunctionRegistry r = new FunctionRegistry(
|
||||||
|
Collections.singletonList(def(DummyFunction.class, (Location l, Expression e) -> {
|
||||||
|
assertSame(e, ur.children().get(0));
|
||||||
|
return new DummyFunction(l);
|
||||||
|
}, "DUMMY_FUNC")));
|
||||||
|
|
||||||
|
// Resolve by primary name
|
||||||
|
FunctionDefinition def = r.resolveFunction(r.resolveAlias("DuMMy_FuncTIon"));
|
||||||
|
assertEquals(ur.location(), ur.buildResolved(randomTimeZone(), def).location());
|
||||||
|
|
||||||
|
def = r.resolveFunction(r.resolveAlias("Dummy_Function"));
|
||||||
|
assertEquals(ur.location(), ur.buildResolved(randomTimeZone(), def).location());
|
||||||
|
|
||||||
|
def = r.resolveFunction(r.resolveAlias("dummy_function"));
|
||||||
|
assertEquals(ur.location(), ur.buildResolved(randomTimeZone(), def).location());
|
||||||
|
|
||||||
|
def = r.resolveFunction(r.resolveAlias("DUMMY_FUNCTION"));
|
||||||
|
assertEquals(ur.location(), ur.buildResolved(randomTimeZone(), def).location());
|
||||||
|
|
||||||
|
// Resolve by alias
|
||||||
|
def = r.resolveFunction(r.resolveAlias("DumMy_FunC"));
|
||||||
|
assertEquals(ur.location(), ur.buildResolved(randomTimeZone(), def).location());
|
||||||
|
|
||||||
|
def = r.resolveFunction(r.resolveAlias("dummy_func"));
|
||||||
|
assertEquals(ur.location(), ur.buildResolved(randomTimeZone(), def).location());
|
||||||
|
|
||||||
|
def = r.resolveFunction(r.resolveAlias("DUMMY_FUNC"));
|
||||||
|
assertEquals(ur.location(), ur.buildResolved(randomTimeZone(), def).location());
|
||||||
|
|
||||||
|
// Not resolved
|
||||||
|
SqlIllegalArgumentException e = expectThrows(SqlIllegalArgumentException.class,
|
||||||
|
() -> r.resolveFunction(r.resolveAlias("DummyFunction")));
|
||||||
|
assertThat(e.getMessage(),
|
||||||
|
is("Cannot find function DUMMYFUNCTION; this should have been caught during analysis"));
|
||||||
|
|
||||||
|
e = expectThrows(SqlIllegalArgumentException.class,
|
||||||
|
() -> r.resolveFunction(r.resolveAlias("dummyFunction")));
|
||||||
|
assertThat(e.getMessage(),
|
||||||
|
is("Cannot find function DUMMYFUNCTION; this should have been caught during analysis"));
|
||||||
}
|
}
|
||||||
|
|
||||||
public static class Dummy extends ScalarFunction {
|
private UnresolvedFunction uf(UnresolvedFunction.ResolutionType resolutionType, Expression... children) {
|
||||||
public Dummy(Location location) {
|
return new UnresolvedFunction(LocationTests.randomLocation(), "DUMMY_FUNCTION", resolutionType, Arrays.asList(children));
|
||||||
|
}
|
||||||
|
|
||||||
|
public static class DummyFunction extends ScalarFunction {
|
||||||
|
public DummyFunction(Location location) {
|
||||||
super(location, emptyList());
|
super(location, emptyList());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected NodeInfo<Dummy> info() {
|
protected NodeInfo<DummyFunction> info() {
|
||||||
return NodeInfo.create(this);
|
return NodeInfo.create(this);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -9,21 +9,26 @@ import org.elasticsearch.test.ESTestCase;
|
|||||||
import org.elasticsearch.xpack.sql.expression.Expression;
|
import org.elasticsearch.xpack.sql.expression.Expression;
|
||||||
import org.elasticsearch.xpack.sql.expression.Literal;
|
import org.elasticsearch.xpack.sql.expression.Literal;
|
||||||
import org.elasticsearch.xpack.sql.expression.function.UnresolvedFunction;
|
import org.elasticsearch.xpack.sql.expression.function.UnresolvedFunction;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.Add;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.Mul;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.Neg;
|
||||||
|
import org.elasticsearch.xpack.sql.expression.predicate.operator.arithmetic.Sub;
|
||||||
import org.elasticsearch.xpack.sql.type.DataType;
|
import org.elasticsearch.xpack.sql.type.DataType;
|
||||||
|
|
||||||
|
import static org.hamcrest.core.StringStartsWith.startsWith;
|
||||||
|
|
||||||
public class ExpressionTests extends ESTestCase {
|
public class ExpressionTests extends ESTestCase {
|
||||||
|
|
||||||
private final SqlParser parser = new SqlParser();
|
private final SqlParser parser = new SqlParser();
|
||||||
|
|
||||||
public void testTokenFunctionName() throws Exception {
|
public void testTokenFunctionName() {
|
||||||
Expression lt = parser.createExpression("LEFT()");
|
Expression lt = parser.createExpression("LEFT()");
|
||||||
assertEquals(UnresolvedFunction.class, lt.getClass());
|
assertEquals(UnresolvedFunction.class, lt.getClass());
|
||||||
UnresolvedFunction uf = (UnresolvedFunction) lt;
|
UnresolvedFunction uf = (UnresolvedFunction) lt;
|
||||||
assertEquals("LEFT", uf.functionName());
|
assertEquals("LEFT", uf.functionName());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testLiteralBoolean() {
|
||||||
public void testLiteralBoolean() throws Exception {
|
|
||||||
Expression lt = parser.createExpression("TRUE");
|
Expression lt = parser.createExpression("TRUE");
|
||||||
assertEquals(Literal.class, lt.getClass());
|
assertEquals(Literal.class, lt.getClass());
|
||||||
Literal l = (Literal) lt;
|
Literal l = (Literal) lt;
|
||||||
@ -31,7 +36,7 @@ public class ExpressionTests extends ESTestCase {
|
|||||||
assertEquals(DataType.BOOLEAN, l.dataType());
|
assertEquals(DataType.BOOLEAN, l.dataType());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testLiteralDouble() throws Exception {
|
public void testLiteralDouble() {
|
||||||
Expression lt = parser.createExpression(String.valueOf(Double.MAX_VALUE));
|
Expression lt = parser.createExpression(String.valueOf(Double.MAX_VALUE));
|
||||||
assertEquals(Literal.class, lt.getClass());
|
assertEquals(Literal.class, lt.getClass());
|
||||||
Literal l = (Literal) lt;
|
Literal l = (Literal) lt;
|
||||||
@ -39,7 +44,7 @@ public class ExpressionTests extends ESTestCase {
|
|||||||
assertEquals(DataType.DOUBLE, l.dataType());
|
assertEquals(DataType.DOUBLE, l.dataType());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testLiteralDoubleNegative() throws Exception {
|
public void testLiteralDoubleNegative() {
|
||||||
Expression lt = parser.createExpression(String.valueOf(Double.MIN_VALUE));
|
Expression lt = parser.createExpression(String.valueOf(Double.MIN_VALUE));
|
||||||
assertEquals(Literal.class, lt.getClass());
|
assertEquals(Literal.class, lt.getClass());
|
||||||
Literal l = (Literal) lt;
|
Literal l = (Literal) lt;
|
||||||
@ -47,7 +52,7 @@ public class ExpressionTests extends ESTestCase {
|
|||||||
assertEquals(DataType.DOUBLE, l.dataType());
|
assertEquals(DataType.DOUBLE, l.dataType());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testLiteralDoublePositive() throws Exception {
|
public void testLiteralDoublePositive() {
|
||||||
Expression lt = parser.createExpression("+" + Double.MAX_VALUE);
|
Expression lt = parser.createExpression("+" + Double.MAX_VALUE);
|
||||||
assertEquals(Literal.class, lt.getClass());
|
assertEquals(Literal.class, lt.getClass());
|
||||||
Literal l = (Literal) lt;
|
Literal l = (Literal) lt;
|
||||||
@ -55,7 +60,7 @@ public class ExpressionTests extends ESTestCase {
|
|||||||
assertEquals(DataType.DOUBLE, l.dataType());
|
assertEquals(DataType.DOUBLE, l.dataType());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testLiteralLong() throws Exception {
|
public void testLiteralLong() {
|
||||||
Expression lt = parser.createExpression(String.valueOf(Long.MAX_VALUE));
|
Expression lt = parser.createExpression(String.valueOf(Long.MAX_VALUE));
|
||||||
assertEquals(Literal.class, lt.getClass());
|
assertEquals(Literal.class, lt.getClass());
|
||||||
Literal l = (Literal) lt;
|
Literal l = (Literal) lt;
|
||||||
@ -63,14 +68,14 @@ public class ExpressionTests extends ESTestCase {
|
|||||||
assertEquals(DataType.LONG, l.dataType());
|
assertEquals(DataType.LONG, l.dataType());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testLiteralLongNegative() throws Exception {
|
public void testLiteralLongNegative() {
|
||||||
Expression lt = parser.createExpression(String.valueOf(Long.MIN_VALUE));
|
Expression lt = parser.createExpression(String.valueOf(Long.MIN_VALUE));
|
||||||
assertTrue(lt.foldable());
|
assertTrue(lt.foldable());
|
||||||
assertEquals(Long.MIN_VALUE, lt.fold());
|
assertEquals(Long.MIN_VALUE, lt.fold());
|
||||||
assertEquals(DataType.LONG, lt.dataType());
|
assertEquals(DataType.LONG, lt.dataType());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testLiteralLongPositive() throws Exception {
|
public void testLiteralLongPositive() {
|
||||||
Expression lt = parser.createExpression("+" + String.valueOf(Long.MAX_VALUE));
|
Expression lt = parser.createExpression("+" + String.valueOf(Long.MAX_VALUE));
|
||||||
assertEquals(Literal.class, lt.getClass());
|
assertEquals(Literal.class, lt.getClass());
|
||||||
Literal l = (Literal) lt;
|
Literal l = (Literal) lt;
|
||||||
@ -78,7 +83,7 @@ public class ExpressionTests extends ESTestCase {
|
|||||||
assertEquals(DataType.LONG, l.dataType());
|
assertEquals(DataType.LONG, l.dataType());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testLiteralInteger() throws Exception {
|
public void testLiteralInteger() {
|
||||||
Expression lt = parser.createExpression(String.valueOf(Integer.MAX_VALUE));
|
Expression lt = parser.createExpression(String.valueOf(Integer.MAX_VALUE));
|
||||||
assertEquals(Literal.class, lt.getClass());
|
assertEquals(Literal.class, lt.getClass());
|
||||||
Literal l = (Literal) lt;
|
Literal l = (Literal) lt;
|
||||||
@ -86,29 +91,69 @@ public class ExpressionTests extends ESTestCase {
|
|||||||
assertEquals(DataType.INTEGER, l.dataType());
|
assertEquals(DataType.INTEGER, l.dataType());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testLiteralIntegerWithShortValue() throws Exception {
|
public void testLiteralIntegerWithShortValue() {
|
||||||
Expression lt = parser.createExpression(String.valueOf(Short.MAX_VALUE));
|
Expression lt = parser.createExpression(String.valueOf(Short.MAX_VALUE));
|
||||||
assertEquals(Literal.class, lt.getClass());
|
assertEquals(Literal.class, lt.getClass());
|
||||||
Literal l = (Literal) lt;
|
Literal l = (Literal) lt;
|
||||||
assertEquals(Integer.valueOf(Short.MAX_VALUE), l.value());
|
assertEquals((int) Short.MAX_VALUE, l.value());
|
||||||
assertEquals(DataType.INTEGER, l.dataType());
|
assertEquals(DataType.INTEGER, l.dataType());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testLiteralIntegerWithByteValue() throws Exception {
|
public void testLiteralIntegerWithByteValue() {
|
||||||
Expression lt = parser.createExpression(String.valueOf(Byte.MAX_VALUE));
|
Expression lt = parser.createExpression(String.valueOf(Byte.MAX_VALUE));
|
||||||
assertEquals(Literal.class, lt.getClass());
|
assertEquals(Literal.class, lt.getClass());
|
||||||
Literal l = (Literal) lt;
|
Literal l = (Literal) lt;
|
||||||
assertEquals(Integer.valueOf(Byte.MAX_VALUE), l.value());
|
assertEquals((int) Byte.MAX_VALUE, l.value());
|
||||||
assertEquals(DataType.INTEGER, l.dataType());
|
assertEquals(DataType.INTEGER, l.dataType());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testLiteralIntegerInvalid() throws Exception {
|
public void testLiteralIntegerInvalid() {
|
||||||
ParsingException ex = expectThrows(ParsingException.class, () -> parser.createExpression("123456789098765432101"));
|
ParsingException ex = expectThrows(ParsingException.class, () -> parser.createExpression("123456789098765432101"));
|
||||||
assertEquals("Number [123456789098765432101] is too large", ex.getErrorMessage());
|
assertEquals("Number [123456789098765432101] is too large", ex.getErrorMessage());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testLiteralDecimalTooBig() throws Exception {
|
public void testLiteralDecimalTooBig() {
|
||||||
ParsingException ex = expectThrows(ParsingException.class, () -> parser.createExpression("1.9976931348623157e+308"));
|
ParsingException ex = expectThrows(ParsingException.class, () -> parser.createExpression("1.9976931348623157e+308"));
|
||||||
assertEquals("Number [1.9976931348623157e+308] is too large", ex.getErrorMessage());
|
assertEquals("Number [1.9976931348623157e+308] is too large", ex.getErrorMessage());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testLiteralTimesLiteral() {
|
||||||
|
Expression expr = parser.createExpression("10*2");
|
||||||
|
assertEquals(Mul.class, expr.getClass());
|
||||||
|
Mul mul = (Mul) expr;
|
||||||
|
assertEquals("10 * 2", mul.name());
|
||||||
|
assertEquals(DataType.INTEGER, mul.dataType());
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testFunctionTimesLiteral() {
|
||||||
|
Expression expr = parser.createExpression("PI()*2");
|
||||||
|
assertEquals(Mul.class, expr.getClass());
|
||||||
|
Mul mul = (Mul) expr;
|
||||||
|
assertEquals("(PI) * 2", mul.name());
|
||||||
|
}
|
||||||
|
|
||||||
|
public void testComplexArithmetic() {
|
||||||
|
Expression expr = parser.createExpression("-(((a-2)-(-3))+b)");
|
||||||
|
assertEquals(Neg.class, expr.getClass());
|
||||||
|
Neg neg = (Neg) expr;
|
||||||
|
assertThat(neg.name(), startsWith("-(((a) - 2) - -3) + (b)#"));
|
||||||
|
assertEquals(1, neg.children().size());
|
||||||
|
assertEquals(Add.class, neg.children().get(0).getClass());
|
||||||
|
Add add = (Add) neg.children().get(0);
|
||||||
|
assertEquals("(((a) - 2) - -3) + (b)", add.name());
|
||||||
|
assertEquals(2, add.children().size());
|
||||||
|
assertEquals("?b", add.children().get(1).toString());
|
||||||
|
assertEquals(Sub.class, add.children().get(0).getClass());
|
||||||
|
Sub sub1 = (Sub) add.children().get(0);
|
||||||
|
assertEquals("((a) - 2) - -3", sub1.name());
|
||||||
|
assertEquals(2, sub1.children().size());
|
||||||
|
assertEquals(Literal.class, sub1.children().get(1).getClass());
|
||||||
|
assertEquals("-3", ((Literal) sub1.children().get(1)).name());
|
||||||
|
assertEquals(Sub.class, sub1.children().get(0).getClass());
|
||||||
|
Sub sub2 = (Sub) sub1.children().get(0);
|
||||||
|
assertEquals(2, sub2.children().size());
|
||||||
|
assertEquals("?a", sub2.children().get(0).toString());
|
||||||
|
assertEquals(Literal.class, sub2.children().get(1).getClass());
|
||||||
|
assertEquals("2", ((Literal) sub2.children().get(1)).name());
|
||||||
|
}
|
||||||
}
|
}
|
@ -6,7 +6,6 @@
|
|||||||
package org.elasticsearch.xpack.sql.tree;
|
package org.elasticsearch.xpack.sql.tree;
|
||||||
|
|
||||||
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
|
||||||
|
|
||||||
import org.elasticsearch.common.Strings;
|
import org.elasticsearch.common.Strings;
|
||||||
import org.elasticsearch.common.io.PathUtils;
|
import org.elasticsearch.common.io.PathUtils;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
@ -418,7 +417,7 @@ public class NodeSubclassTests<T extends B, B extends Node<B>> extends ESTestCas
|
|||||||
}
|
}
|
||||||
} else if (toBuildClass == ChildrenAreAProperty.class) {
|
} else if (toBuildClass == ChildrenAreAProperty.class) {
|
||||||
/*
|
/*
|
||||||
* While any subclass of Dummy will do here we want to prevent
|
* While any subclass of DummyFunction will do here we want to prevent
|
||||||
* stack overflow so we use the one without children.
|
* stack overflow so we use the one without children.
|
||||||
*/
|
*/
|
||||||
if (argClass == Dummy.class) {
|
if (argClass == Dummy.class) {
|
||||||
|
@ -288,6 +288,109 @@ setup:
|
|||||||
- agg: "max"
|
- agg: "max"
|
||||||
- agg: "sum"
|
- agg: "sum"
|
||||||
|
|
||||||
|
---
|
||||||
|
"Verify job caps by rollup index comma delimited list":
|
||||||
|
|
||||||
|
- skip:
|
||||||
|
version: " - 6.99.99"
|
||||||
|
reason: "comma delimited index support was fixed in 7.0"
|
||||||
|
|
||||||
|
- do:
|
||||||
|
headers:
|
||||||
|
Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser
|
||||||
|
xpack.rollup.put_job:
|
||||||
|
id: foo2
|
||||||
|
body: >
|
||||||
|
{
|
||||||
|
"index_pattern": "foo2",
|
||||||
|
"rollup_index": "foo_rollup",
|
||||||
|
"cron": "*/30 * * * * ?",
|
||||||
|
"page_size" :10,
|
||||||
|
"groups" : {
|
||||||
|
"date_histogram": {
|
||||||
|
"field": "the_field",
|
||||||
|
"interval": "1h"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"metrics": [
|
||||||
|
{
|
||||||
|
"field": "value_field",
|
||||||
|
"metrics": ["min", "max", "sum"]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
- do:
|
||||||
|
headers:
|
||||||
|
Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser
|
||||||
|
xpack.rollup.put_job:
|
||||||
|
id: foo3
|
||||||
|
body: >
|
||||||
|
{
|
||||||
|
"index_pattern": "foo3",
|
||||||
|
"rollup_index": "foo_rollup2",
|
||||||
|
"cron": "*/30 * * * * ?",
|
||||||
|
"page_size" :10,
|
||||||
|
"groups" : {
|
||||||
|
"date_histogram": {
|
||||||
|
"field": "the_field",
|
||||||
|
"interval": "1h"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"metrics": [
|
||||||
|
{
|
||||||
|
"field": "value_field",
|
||||||
|
"metrics": ["min", "max", "sum"]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
- do:
|
||||||
|
xpack.rollup.get_rollup_index_caps:
|
||||||
|
index: "foo_rollup2,foo_rollup"
|
||||||
|
|
||||||
|
- match:
|
||||||
|
$body:
|
||||||
|
foo_rollup:
|
||||||
|
rollup_jobs:
|
||||||
|
- job_id: "foo"
|
||||||
|
rollup_index: "foo_rollup"
|
||||||
|
index_pattern: "foo"
|
||||||
|
fields:
|
||||||
|
the_field:
|
||||||
|
- agg: "date_histogram"
|
||||||
|
interval: "1h"
|
||||||
|
time_zone: "UTC"
|
||||||
|
value_field:
|
||||||
|
- agg: "min"
|
||||||
|
- agg: "max"
|
||||||
|
- agg: "sum"
|
||||||
|
- job_id: "foo2"
|
||||||
|
rollup_index: "foo_rollup"
|
||||||
|
index_pattern: "foo2"
|
||||||
|
fields:
|
||||||
|
the_field:
|
||||||
|
- agg: "date_histogram"
|
||||||
|
interval: "1h"
|
||||||
|
time_zone: "UTC"
|
||||||
|
value_field:
|
||||||
|
- agg: "min"
|
||||||
|
- agg: "max"
|
||||||
|
- agg: "sum"
|
||||||
|
foo_rollup2:
|
||||||
|
rollup_jobs:
|
||||||
|
- job_id: "foo3"
|
||||||
|
rollup_index: "foo_rollup2"
|
||||||
|
index_pattern: "foo3"
|
||||||
|
fields:
|
||||||
|
the_field:
|
||||||
|
- agg: "date_histogram"
|
||||||
|
interval: "1h"
|
||||||
|
time_zone: "UTC"
|
||||||
|
value_field:
|
||||||
|
- agg: "min"
|
||||||
|
- agg: "max"
|
||||||
|
- agg: "sum"
|
||||||
|
|
||||||
---
|
---
|
||||||
"Verify index pattern":
|
"Verify index pattern":
|
||||||
|
|
||||||
|
@ -47,7 +47,7 @@ salary | INTEGER
|
|||||||
;
|
;
|
||||||
|
|
||||||
describePattern
|
describePattern
|
||||||
DESCRIBE test_*;
|
DESCRIBE "test_*";
|
||||||
|
|
||||||
column:s | type:s
|
column:s | type:s
|
||||||
|
|
||||||
@ -99,7 +99,7 @@ F | 10099.28
|
|||||||
;
|
;
|
||||||
|
|
||||||
testGroupByOnPattern
|
testGroupByOnPattern
|
||||||
SELECT gender, PERCENTILE(emp_no, 97) p1 FROM test_* WHERE gender is NOT NULL GROUP BY gender;
|
SELECT gender, PERCENTILE(emp_no, 97) p1 FROM "test_*" WHERE gender is NOT NULL GROUP BY gender;
|
||||||
|
|
||||||
gender:s | p1:d
|
gender:s | p1:d
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user