Merge remote-tracking branch 'es/master' into ccr
* es/master: Remove redundant argument for buildConfiguration of s3 plugin (#28281) Completely remove Painless Type from AnalyzerCaster in favor of Java Class. (#28329) Fix spelling error Reindex: Wait for deletion in test Reindex: log more on rare test failure Ensure we protect Collections obtained from scripts from self-referencing (#28335) [Docs] Fix asciidoc style in composite agg docs Adds the ability to specify a format on composite date_histogram source (#28310) Provide a better error message for the case when all shards failed (#28333) [Test] Re-Add integer_range and date_range field types for query builder tests (#28171) Added Put Mapping API to high-level Rest client (#27869) Revert change that does not return all indices if a specific alias is requested via get alias api. (#28294) Painless: Replace Painless Type with Java Class during Casts (#27847) Notify affixMap settings when any under the registered prefix matches (#28317)
This commit is contained in:
commit
5b2dceea74
|
@ -27,6 +27,8 @@ import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
|
|||
import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;
|
||||
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest;
|
||||
import org.elasticsearch.action.admin.indices.delete.DeleteIndexResponse;
|
||||
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
|
||||
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse;
|
||||
import org.elasticsearch.action.admin.indices.open.OpenIndexRequest;
|
||||
import org.elasticsearch.action.admin.indices.open.OpenIndexResponse;
|
||||
|
||||
|
@ -89,6 +91,29 @@ public final class IndicesClient {
|
|||
listener, Collections.emptySet(), headers);
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates the mappings on an index using the Put Mapping API
|
||||
* <p>
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-put-mapping.html">
|
||||
* Put Mapping API on elastic.co</a>
|
||||
*/
|
||||
public PutMappingResponse putMapping(PutMappingRequest putMappingRequest, Header... headers) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(putMappingRequest, Request::putMapping, PutMappingResponse::fromXContent,
|
||||
Collections.emptySet(), headers);
|
||||
}
|
||||
|
||||
/**
|
||||
* Asynchronously updates the mappings on an index using the Put Mapping API
|
||||
* <p>
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-put-mapping.html">
|
||||
* Put Mapping API on elastic.co</a>
|
||||
*/
|
||||
public void putMappingAsync(PutMappingRequest putMappingRequest, ActionListener<PutMappingResponse> listener,
|
||||
Header... headers) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(putMappingRequest, Request::putMapping, PutMappingResponse::fromXContent,
|
||||
listener, Collections.emptySet(), headers);
|
||||
}
|
||||
|
||||
/**
|
||||
* Opens an index using the Open Index API
|
||||
* <p>
|
||||
|
|
|
@ -32,6 +32,7 @@ import org.elasticsearch.action.DocWriteRequest;
|
|||
import org.elasticsearch.action.admin.indices.close.CloseIndexRequest;
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
|
||||
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest;
|
||||
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
|
||||
import org.elasticsearch.action.admin.indices.open.OpenIndexRequest;
|
||||
import org.elasticsearch.action.bulk.BulkRequest;
|
||||
import org.elasticsearch.action.delete.DeleteRequest;
|
||||
|
@ -178,6 +179,22 @@ public final class Request {
|
|||
return new Request(HttpPut.METHOD_NAME, endpoint, parameters.getParams(), entity);
|
||||
}
|
||||
|
||||
static Request putMapping(PutMappingRequest putMappingRequest) throws IOException {
|
||||
// The concreteIndex is an internal concept, not applicable to requests made over the REST API.
|
||||
if (putMappingRequest.getConcreteIndex() != null) {
|
||||
throw new IllegalArgumentException("concreteIndex cannot be set on PutMapping requests made over the REST API");
|
||||
}
|
||||
|
||||
String endpoint = endpoint(putMappingRequest.indices(), "_mapping", putMappingRequest.type());
|
||||
|
||||
Params parameters = Params.builder();
|
||||
parameters.withTimeout(putMappingRequest.timeout());
|
||||
parameters.withMasterTimeout(putMappingRequest.masterNodeTimeout());
|
||||
|
||||
HttpEntity entity = createEntity(putMappingRequest, REQUEST_BODY_CONTENT_TYPE);
|
||||
return new Request(HttpPut.METHOD_NAME, endpoint, parameters.getParams(), entity);
|
||||
}
|
||||
|
||||
static Request info() {
|
||||
return new Request(HttpGet.METHOD_NAME, "/", Collections.emptyMap(), null);
|
||||
}
|
||||
|
@ -454,6 +471,10 @@ public final class Request {
|
|||
return endpoint(String.join(",", indices), String.join(",", types), endpoint);
|
||||
}
|
||||
|
||||
static String endpoint(String[] indices, String endpoint, String type) {
|
||||
return endpoint(String.join(",", indices), endpoint, type);
|
||||
}
|
||||
|
||||
/**
|
||||
* Utility method to build request's endpoint.
|
||||
*/
|
||||
|
|
|
@ -27,6 +27,8 @@ import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
|
|||
import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;
|
||||
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest;
|
||||
import org.elasticsearch.action.admin.indices.delete.DeleteIndexResponse;
|
||||
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
|
||||
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse;
|
||||
import org.elasticsearch.action.admin.indices.open.OpenIndexRequest;
|
||||
import org.elasticsearch.action.admin.indices.open.OpenIndexResponse;
|
||||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
|
@ -108,6 +110,35 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
public void testPutMapping() throws IOException {
|
||||
{
|
||||
// Add mappings to index
|
||||
String indexName = "mapping_index";
|
||||
createIndex(indexName);
|
||||
|
||||
PutMappingRequest putMappingRequest = new PutMappingRequest(indexName);
|
||||
putMappingRequest.type("type_name");
|
||||
XContentBuilder mappingBuilder = JsonXContent.contentBuilder();
|
||||
mappingBuilder.startObject().startObject("properties").startObject("field");
|
||||
mappingBuilder.field("type", "text");
|
||||
mappingBuilder.endObject().endObject().endObject();
|
||||
putMappingRequest.source(mappingBuilder);
|
||||
|
||||
PutMappingResponse putMappingResponse =
|
||||
execute(putMappingRequest, highLevelClient().indices()::putMapping, highLevelClient().indices()::putMappingAsync);
|
||||
assertTrue(putMappingResponse.isAcknowledged());
|
||||
|
||||
Map<String, Object> indexMetaData = getIndexMetadata(indexName);
|
||||
Map<String, Object> mappingsData = (Map) indexMetaData.get("mappings");
|
||||
Map<String, Object> typeData = (Map) mappingsData.get("type_name");
|
||||
Map<String, Object> properties = (Map) typeData.get("properties");
|
||||
Map<String, Object> field = (Map) properties.get("field");
|
||||
|
||||
assertEquals("text", field.get("type"));
|
||||
}
|
||||
}
|
||||
|
||||
public void testDeleteIndex() throws IOException {
|
||||
{
|
||||
// Delete index if exists
|
||||
|
|
|
@ -28,6 +28,7 @@ import org.elasticsearch.action.DocWriteRequest;
|
|||
import org.elasticsearch.action.admin.indices.close.CloseIndexRequest;
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
|
||||
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest;
|
||||
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
|
||||
import org.elasticsearch.action.admin.indices.open.OpenIndexRequest;
|
||||
import org.elasticsearch.action.bulk.BulkRequest;
|
||||
import org.elasticsearch.action.bulk.BulkShardRequest;
|
||||
|
@ -317,6 +318,39 @@ public class RequestTests extends ESTestCase {
|
|||
assertToXContentBody(createIndexRequest, request.getEntity());
|
||||
}
|
||||
|
||||
public void testPutMapping() throws IOException {
|
||||
PutMappingRequest putMappingRequest = new PutMappingRequest();
|
||||
|
||||
int numIndices = randomIntBetween(0, 5);
|
||||
String[] indices = new String[numIndices];
|
||||
for (int i = 0; i < numIndices; i++) {
|
||||
indices[i] = "index-" + randomAlphaOfLengthBetween(2, 5);
|
||||
}
|
||||
putMappingRequest.indices(indices);
|
||||
|
||||
String type = randomAlphaOfLengthBetween(3, 10);
|
||||
putMappingRequest.type(type);
|
||||
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
|
||||
setRandomTimeout(putMappingRequest::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams);
|
||||
setRandomMasterTimeout(putMappingRequest, expectedParams);
|
||||
|
||||
Request request = Request.putMapping(putMappingRequest);
|
||||
StringJoiner endpoint = new StringJoiner("/", "/", "");
|
||||
String index = String.join(",", indices);
|
||||
if (Strings.hasLength(index)) {
|
||||
endpoint.add(index);
|
||||
}
|
||||
endpoint.add("_mapping");
|
||||
endpoint.add(type);
|
||||
assertEquals(endpoint.toString(), request.getEndpoint());
|
||||
|
||||
assertEquals(expectedParams, request.getParameters());
|
||||
assertEquals("PUT", request.getMethod());
|
||||
assertToXContentBody(putMappingRequest, request.getEntity());
|
||||
}
|
||||
|
||||
public void testDeleteIndex() {
|
||||
String[] indices = randomIndicesNames(0, 5);
|
||||
DeleteIndexRequest deleteIndexRequest = new DeleteIndexRequest(indices);
|
||||
|
|
|
@ -28,6 +28,8 @@ import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
|
|||
import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;
|
||||
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest;
|
||||
import org.elasticsearch.action.admin.indices.delete.DeleteIndexResponse;
|
||||
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
|
||||
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse;
|
||||
import org.elasticsearch.action.admin.indices.open.OpenIndexRequest;
|
||||
import org.elasticsearch.action.admin.indices.open.OpenIndexResponse;
|
||||
import org.elasticsearch.action.support.ActiveShardCount;
|
||||
|
@ -157,15 +159,15 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
|
|||
|
||||
// tag::create-index-request-mappings
|
||||
request.mapping("tweet", // <1>
|
||||
" {\n" +
|
||||
" \"tweet\": {\n" +
|
||||
" \"properties\": {\n" +
|
||||
" \"message\": {\n" +
|
||||
" \"type\": \"text\"\n" +
|
||||
" }\n" +
|
||||
"{\n" +
|
||||
" \"tweet\": {\n" +
|
||||
" \"properties\": {\n" +
|
||||
" \"message\": {\n" +
|
||||
" \"type\": \"text\"\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" }", // <2>
|
||||
" }\n" +
|
||||
"}", // <2>
|
||||
XContentType.JSON);
|
||||
// end::create-index-request-mappings
|
||||
|
||||
|
@ -228,6 +230,86 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
|
|||
}
|
||||
}
|
||||
|
||||
public void testPutMapping() throws IOException {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
{
|
||||
CreateIndexResponse createIndexResponse = client.indices().create(new CreateIndexRequest("twitter"));
|
||||
assertTrue(createIndexResponse.isAcknowledged());
|
||||
}
|
||||
|
||||
{
|
||||
// tag::put-mapping-request
|
||||
PutMappingRequest request = new PutMappingRequest("twitter"); // <1>
|
||||
request.type("tweet"); // <2>
|
||||
// end::put-mapping-request
|
||||
|
||||
// tag::put-mapping-request-source
|
||||
request.source(
|
||||
"{\n" +
|
||||
" \"tweet\": {\n" +
|
||||
" \"properties\": {\n" +
|
||||
" \"message\": {\n" +
|
||||
" \"type\": \"text\"\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
"}", // <1>
|
||||
XContentType.JSON);
|
||||
// end::put-mapping-request-source
|
||||
|
||||
// tag::put-mapping-request-timeout
|
||||
request.timeout(TimeValue.timeValueMinutes(2)); // <1>
|
||||
request.timeout("2m"); // <2>
|
||||
// end::put-mapping-request-timeout
|
||||
// tag::put-mapping-request-masterTimeout
|
||||
request.masterNodeTimeout(TimeValue.timeValueMinutes(1)); // <1>
|
||||
request.masterNodeTimeout("1m"); // <2>
|
||||
// end::put-mapping-request-masterTimeout
|
||||
|
||||
// tag::put-mapping-execute
|
||||
PutMappingResponse putMappingResponse = client.indices().putMapping(request);
|
||||
// end::put-mapping-execute
|
||||
|
||||
// tag::put-mapping-response
|
||||
boolean acknowledged = putMappingResponse.isAcknowledged(); // <1>
|
||||
// end::put-mapping-response
|
||||
assertTrue(acknowledged);
|
||||
}
|
||||
}
|
||||
|
||||
public void testPutMappingAsync() throws Exception {
|
||||
final RestHighLevelClient client = highLevelClient();
|
||||
|
||||
{
|
||||
CreateIndexResponse createIndexResponse = client.indices().create(new CreateIndexRequest("twitter"));
|
||||
assertTrue(createIndexResponse.isAcknowledged());
|
||||
}
|
||||
|
||||
{
|
||||
PutMappingRequest request = new PutMappingRequest("twitter").type("tweet");
|
||||
// tag::put-mapping-execute-async
|
||||
client.indices().putMappingAsync(request, new ActionListener<PutMappingResponse>() {
|
||||
@Override
|
||||
public void onResponse(PutMappingResponse putMappingResponse) {
|
||||
// <1>
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
// <2>
|
||||
}
|
||||
});
|
||||
// end::put-mapping-execute-async
|
||||
|
||||
assertBusy(() -> {
|
||||
// TODO Use Indices Exist API instead once it exists
|
||||
Response response = client.getLowLevelClient().performRequest("HEAD", "twitter");
|
||||
assertTrue(RestStatus.OK.getStatus() == response.getStatusLine().getStatusCode());
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
public void testOpenIndex() throws IOException {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
|
|
|
@ -6,6 +6,8 @@ include::open_index.asciidoc[]
|
|||
|
||||
include::close_index.asciidoc[]
|
||||
|
||||
include::putmapping.asciidoc[]
|
||||
|
||||
include::_index.asciidoc[]
|
||||
|
||||
include::get.asciidoc[]
|
||||
|
|
|
@ -0,0 +1,71 @@
|
|||
[[java-rest-high-put-mapping]]
|
||||
=== Put Mapping API
|
||||
|
||||
[[java-rest-high-put-mapping-request]]
|
||||
==== Put Mapping Request
|
||||
|
||||
A `PutMappingRequest` requires an `index` argument, and a type:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[put-mapping-request]
|
||||
--------------------------------------------------
|
||||
<1> The index to add the mapping to
|
||||
<2> The type to create (or update)
|
||||
|
||||
==== Mapping source
|
||||
A description of the fields to create on the mapping; if not defined, the mapping will default to empty.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[put-mapping-request-source]
|
||||
--------------------------------------------------
|
||||
<1> The mapping source
|
||||
|
||||
==== Optional arguments
|
||||
The following arguments can optionally be provided:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[put-mapping-request-timeout]
|
||||
--------------------------------------------------
|
||||
<1> Timeout to wait for the all the nodes to acknowledge the index creation as a `TimeValue`
|
||||
<2> Timeout to wait for the all the nodes to acknowledge the index creation as a `String`
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[put-mapping-request-masterTimeout]
|
||||
--------------------------------------------------
|
||||
<1> Timeout to connect to the master node as a `TimeValue`
|
||||
<2> Timeout to connect to the master node as a `String`
|
||||
|
||||
[[java-rest-high-put-mapping-sync]]
|
||||
==== Synchronous Execution
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[put-mapping-execute]
|
||||
--------------------------------------------------
|
||||
|
||||
[[java-rest-high-put-mapping-async]]
|
||||
==== Asynchronous Execution
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[put-mapping-execute-async]
|
||||
--------------------------------------------------
|
||||
<1> Called when the execution is successfully completed. The response is
|
||||
provided as an argument
|
||||
<2> Called in case of failure. The raised exception is provided as an argument
|
||||
|
||||
[[java-rest-high-put-mapping-response]]
|
||||
==== Put Mapping Response
|
||||
|
||||
The returned `PutMappingResponse` allows to retrieve information about the executed
|
||||
operation as follows:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[put-mapping-response]
|
||||
--------------------------------------------------
|
||||
<1> Indicates whether all of the nodes have acknowledged the request
|
|
@ -8,6 +8,7 @@ Indices APIs::
|
|||
* <<java-rest-high-delete-index>>
|
||||
* <<java-rest-high-open-index>>
|
||||
* <<java-rest-high-close-index>>
|
||||
* <<java-rest-high-put-mapping>>
|
||||
|
||||
Single document APIs::
|
||||
* <<java-rest-high-document-index>>
|
||||
|
|
|
@ -224,8 +224,40 @@ Time values can also be specified via abbreviations supported by <<time-units,ti
|
|||
Note that fractional time values are not supported, but you can address this by shifting to another
|
||||
time unit (e.g., `1.5h` could instead be specified as `90m`).
|
||||
|
||||
[float]
|
||||
===== Time Zone
|
||||
====== Format
|
||||
|
||||
Internally, a date is represented as a 64 bit number representing a timestamp in milliseconds-since-the-epoch.
|
||||
These timestamps are returned as the bucket keys. It is possible to return a formatted date string instead using
|
||||
the format specified with the format parameter:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
GET /_search
|
||||
{
|
||||
"aggs" : {
|
||||
"my_buckets": {
|
||||
"composite" : {
|
||||
"sources" : [
|
||||
{
|
||||
"date": {
|
||||
"date_histogram" : {
|
||||
"field": "timestamp",
|
||||
"interval": "1d",
|
||||
"format": "yyyy-MM-dd" <1>
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
|
||||
<1> Supports expressive date <<date-format-pattern,format pattern>>
|
||||
|
||||
====== Time Zone
|
||||
|
||||
Date-times are stored in Elasticsearch in UTC. By default, all bucketing and
|
||||
rounding is also done in UTC. The `time_zone` parameter can be used to indicate
|
||||
|
|
|
@ -56,7 +56,7 @@ match the query string `"a* b* c*"`.
|
|||
[WARNING]
|
||||
=======
|
||||
Pure wildcards `\*` are rewritten to <<query-dsl-exists-query,`exists`>> queries for efficiency.
|
||||
As a consequence, the wildcard `"field:*"` would match documents with an emtpy value
|
||||
As a consequence, the wildcard `"field:*"` would match documents with an empty value
|
||||
like the following:
|
||||
```
|
||||
{
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.script.mustache;
|
||||
|
||||
import com.github.mustachejava.reflect.ReflectionObjectHandler;
|
||||
import org.elasticsearch.common.util.CollectionUtils;
|
||||
import org.elasticsearch.common.util.iterable.Iterables;
|
||||
|
||||
import java.lang.reflect.Array;
|
||||
|
@ -154,4 +155,9 @@ final class CustomReflectionObjectHandler extends ReflectionObjectHandler {
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String stringify(Object object) {
|
||||
CollectionUtils.ensureNoSelfReferences(object);
|
||||
return super.stringify(object);
|
||||
}
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -27,6 +27,7 @@ import java.lang.invoke.MethodHandles;
|
|||
import java.lang.invoke.MethodType;
|
||||
import java.lang.reflect.Modifier;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
|
@ -76,6 +77,13 @@ public final class Definition {
|
|||
public final Type ArrayListType;
|
||||
public final Type HashMapType;
|
||||
|
||||
/** Marker class for def type to be used during type analysis. */
|
||||
public static final class def {
|
||||
private def() {
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
public static final class Type {
|
||||
public final String name;
|
||||
public final int dimensions;
|
||||
|
@ -365,40 +373,41 @@ public final class Definition {
|
|||
}
|
||||
|
||||
public static class Cast {
|
||||
|
||||
/** Create a standard cast with no boxing/unboxing. */
|
||||
public static Cast standard(Type from, Type to, boolean explicit) {
|
||||
public static Cast standard(Class<?> from, Class<?> to, boolean explicit) {
|
||||
return new Cast(from, to, explicit, null, null, null, null);
|
||||
}
|
||||
|
||||
/** Create a cast where the from type will be unboxed, and then the cast will be performed. */
|
||||
public static Cast unboxFrom(Type from, Type to, boolean explicit, Type unboxFrom) {
|
||||
public static Cast unboxFrom(Class<?> from, Class<?> to, boolean explicit, Class<?> unboxFrom) {
|
||||
return new Cast(from, to, explicit, unboxFrom, null, null, null);
|
||||
}
|
||||
|
||||
/** Create a cast where the to type will be unboxed, and then the cast will be performed. */
|
||||
public static Cast unboxTo(Type from, Type to, boolean explicit, Type unboxTo) {
|
||||
public static Cast unboxTo(Class<?> from, Class<?> to, boolean explicit, Class<?> unboxTo) {
|
||||
return new Cast(from, to, explicit, null, unboxTo, null, null);
|
||||
}
|
||||
|
||||
/** Create a cast where the from type will be boxed, and then the cast will be performed. */
|
||||
public static Cast boxFrom(Type from, Type to, boolean explicit, Type boxFrom) {
|
||||
public static Cast boxFrom(Class<?> from, Class<?> to, boolean explicit, Class<?> boxFrom) {
|
||||
return new Cast(from, to, explicit, null, null, boxFrom, null);
|
||||
}
|
||||
|
||||
/** Create a cast where the to type will be boxed, and then the cast will be performed. */
|
||||
public static Cast boxTo(Type from, Type to, boolean explicit, Type boxTo) {
|
||||
public static Cast boxTo(Class<?> from, Class<?> to, boolean explicit, Class<?> boxTo) {
|
||||
return new Cast(from, to, explicit, null, null, null, boxTo);
|
||||
}
|
||||
|
||||
public final Type from;
|
||||
public final Type to;
|
||||
public final Class<?> from;
|
||||
public final Class<?> to;
|
||||
public final boolean explicit;
|
||||
public final Type unboxFrom;
|
||||
public final Type unboxTo;
|
||||
public final Type boxFrom;
|
||||
public final Type boxTo;
|
||||
public final Class<?> unboxFrom;
|
||||
public final Class<?> unboxTo;
|
||||
public final Class<?> boxFrom;
|
||||
public final Class<?> boxTo;
|
||||
|
||||
private Cast(Type from, Type to, boolean explicit, Type unboxFrom, Type unboxTo, Type boxFrom, Type boxTo) {
|
||||
private Cast(Class<?> from, Class<?> to, boolean explicit, Class<?> unboxFrom, Class<?> unboxTo, Class<?> boxFrom, Class<?> boxTo) {
|
||||
this.from = from;
|
||||
this.to = to;
|
||||
this.explicit = explicit;
|
||||
|
@ -499,6 +508,124 @@ public final class Definition {
|
|||
constant.clazz == String.class;
|
||||
}
|
||||
|
||||
public static Class<?> ObjectClassTodefClass(Class<?> clazz) {
|
||||
if (clazz.isArray()) {
|
||||
Class<?> component = clazz.getComponentType();
|
||||
int dimensions = 1;
|
||||
|
||||
while (component.isArray()) {
|
||||
component = component.getComponentType();
|
||||
++dimensions;
|
||||
}
|
||||
|
||||
if (component == Object.class) {
|
||||
char[] braces = new char[dimensions];
|
||||
Arrays.fill(braces, '[');
|
||||
|
||||
String descriptor = new String(braces) + org.objectweb.asm.Type.getType(def.class).getDescriptor();
|
||||
org.objectweb.asm.Type type = org.objectweb.asm.Type.getType(descriptor);
|
||||
|
||||
try {
|
||||
return Class.forName(type.getInternalName().replace('/', '.'));
|
||||
} catch (ClassNotFoundException exception) {
|
||||
throw new IllegalStateException("internal error", exception);
|
||||
}
|
||||
}
|
||||
} else if (clazz == Object.class) {
|
||||
return def.class;
|
||||
}
|
||||
|
||||
return clazz;
|
||||
}
|
||||
|
||||
public static Class<?> defClassToObjectClass(Class<?> clazz) {
|
||||
if (clazz.isArray()) {
|
||||
Class<?> component = clazz.getComponentType();
|
||||
int dimensions = 1;
|
||||
|
||||
while (component.isArray()) {
|
||||
component = component.getComponentType();
|
||||
++dimensions;
|
||||
}
|
||||
|
||||
if (component == def.class) {
|
||||
char[] braces = new char[dimensions];
|
||||
Arrays.fill(braces, '[');
|
||||
|
||||
String descriptor = new String(braces) + org.objectweb.asm.Type.getType(Object.class).getDescriptor();
|
||||
org.objectweb.asm.Type type = org.objectweb.asm.Type.getType(descriptor);
|
||||
|
||||
try {
|
||||
return Class.forName(type.getInternalName().replace('/', '.'));
|
||||
} catch (ClassNotFoundException exception) {
|
||||
throw new IllegalStateException("internal error", exception);
|
||||
}
|
||||
}
|
||||
} else if (clazz == def.class) {
|
||||
return Object.class;
|
||||
}
|
||||
|
||||
return clazz;
|
||||
}
|
||||
|
||||
public static String ClassToName(Class<?> clazz) {
|
||||
if (clazz.isArray()) {
|
||||
Class<?> component = clazz.getComponentType();
|
||||
int dimensions = 1;
|
||||
|
||||
while (component.isArray()) {
|
||||
component = component.getComponentType();
|
||||
++dimensions;
|
||||
}
|
||||
|
||||
if (component == def.class) {
|
||||
StringBuilder builder = new StringBuilder("def");
|
||||
|
||||
for (int dimension = 0; dimension < dimensions; dimensions++) {
|
||||
builder.append("[]");
|
||||
}
|
||||
|
||||
return builder.toString();
|
||||
}
|
||||
} else if (clazz == def.class) {
|
||||
return "def";
|
||||
}
|
||||
|
||||
return clazz.getCanonicalName().replace('$', '.');
|
||||
}
|
||||
|
||||
public Type ClassToType(Class<?> clazz) {
|
||||
if (clazz == null) {
|
||||
return null;
|
||||
} else if (clazz.isArray()) {
|
||||
Class<?> component = clazz.getComponentType();
|
||||
int dimensions = 1;
|
||||
|
||||
while (component.isArray()) {
|
||||
component = component.getComponentType();
|
||||
++dimensions;
|
||||
}
|
||||
|
||||
if (clazz == def.class) {
|
||||
return getType(structsMap.get("def"), dimensions);
|
||||
} else {
|
||||
return getType(runtimeMap.get(clazz).struct, dimensions);
|
||||
}
|
||||
} else if (clazz == def.class) {
|
||||
return getType(structsMap.get("def"), 0);
|
||||
}
|
||||
|
||||
return getType(structsMap.get(ClassToName(clazz)), 0);
|
||||
}
|
||||
|
||||
public static Class<?> TypeToClass (Type type) {
|
||||
if (type.dynamic) {
|
||||
return ObjectClassTodefClass(type.clazz);
|
||||
}
|
||||
|
||||
return type.clazz;
|
||||
}
|
||||
|
||||
public RuntimeClass getRuntimeClass(Class<?> clazz) {
|
||||
return runtimeMap.get(clazz);
|
||||
}
|
||||
|
@ -536,8 +663,6 @@ public final class Definition {
|
|||
private final Map<String, Struct> structsMap;
|
||||
private final Map<String, Type> simpleTypesMap;
|
||||
|
||||
public AnalyzerCaster caster;
|
||||
|
||||
public Definition(List<Whitelist> whitelists) {
|
||||
structsMap = new HashMap<>();
|
||||
simpleTypesMap = new HashMap<>();
|
||||
|
@ -719,8 +844,6 @@ public final class Definition {
|
|||
IteratorType = getType("Iterator");
|
||||
ArrayListType = getType("ArrayList");
|
||||
HashMapType = getType("HashMap");
|
||||
|
||||
caster = new AnalyzerCaster(this);
|
||||
}
|
||||
|
||||
private void addStruct(ClassLoader whitelistClassLoader, Whitelist.Struct whitelistStruct) {
|
||||
|
|
|
@ -20,15 +20,17 @@
|
|||
package org.elasticsearch.painless;
|
||||
|
||||
import org.elasticsearch.painless.Definition.Cast;
|
||||
import org.elasticsearch.painless.Definition.Type;
|
||||
import org.elasticsearch.painless.Definition.def;
|
||||
import org.objectweb.asm.ClassVisitor;
|
||||
import org.objectweb.asm.Label;
|
||||
import org.objectweb.asm.Opcodes;
|
||||
import org.objectweb.asm.Type;
|
||||
import org.objectweb.asm.commons.GeneratorAdapter;
|
||||
import org.objectweb.asm.commons.Method;
|
||||
|
||||
import java.util.ArrayDeque;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.BitSet;
|
||||
import java.util.Deque;
|
||||
import java.util.List;
|
||||
|
@ -128,68 +130,68 @@ public final class MethodWriter extends GeneratorAdapter {
|
|||
mark(end);
|
||||
}
|
||||
|
||||
public void writeCast(final Cast cast) {
|
||||
public void writeCast(Cast cast) {
|
||||
if (cast != null) {
|
||||
if (cast.from.clazz == char.class && cast.to.clazz == String.class) {
|
||||
if (cast.from == char.class && cast.to == String.class) {
|
||||
invokeStatic(UTILITY_TYPE, CHAR_TO_STRING);
|
||||
} else if (cast.from.clazz == String.class && cast.to.clazz == char.class) {
|
||||
} else if (cast.from == String.class && cast.to == char.class) {
|
||||
invokeStatic(UTILITY_TYPE, STRING_TO_CHAR);
|
||||
} else if (cast.unboxFrom != null) {
|
||||
unbox(cast.unboxFrom.type);
|
||||
unbox(getType(cast.unboxFrom));
|
||||
writeCast(cast.from, cast.to);
|
||||
} else if (cast.unboxTo != null) {
|
||||
if (cast.from.dynamic) {
|
||||
if (cast.from == def.class) {
|
||||
if (cast.explicit) {
|
||||
if (cast.to.clazz == Boolean.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_BOOLEAN);
|
||||
else if (cast.to.clazz == Byte.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_BYTE_EXPLICIT);
|
||||
else if (cast.to.clazz == Short.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_SHORT_EXPLICIT);
|
||||
else if (cast.to.clazz == Character.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_CHAR_EXPLICIT);
|
||||
else if (cast.to.clazz == Integer.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_INT_EXPLICIT);
|
||||
else if (cast.to.clazz == Long.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_LONG_EXPLICIT);
|
||||
else if (cast.to.clazz == Float.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_FLOAT_EXPLICIT);
|
||||
else if (cast.to.clazz == Double.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_DOUBLE_EXPLICIT);
|
||||
if (cast.to == Boolean.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_BOOLEAN);
|
||||
else if (cast.to == Byte.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_BYTE_EXPLICIT);
|
||||
else if (cast.to == Short.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_SHORT_EXPLICIT);
|
||||
else if (cast.to == Character.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_CHAR_EXPLICIT);
|
||||
else if (cast.to == Integer.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_INT_EXPLICIT);
|
||||
else if (cast.to == Long.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_LONG_EXPLICIT);
|
||||
else if (cast.to == Float.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_FLOAT_EXPLICIT);
|
||||
else if (cast.to == Double.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_DOUBLE_EXPLICIT);
|
||||
else {
|
||||
throw new IllegalStateException("Illegal tree structure.");
|
||||
}
|
||||
} else {
|
||||
if (cast.to.clazz == Boolean.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_BOOLEAN);
|
||||
else if (cast.to.clazz == Byte.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_BYTE_IMPLICIT);
|
||||
else if (cast.to.clazz == Short.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_SHORT_IMPLICIT);
|
||||
else if (cast.to.clazz == Character.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_CHAR_IMPLICIT);
|
||||
else if (cast.to.clazz == Integer.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_INT_IMPLICIT);
|
||||
else if (cast.to.clazz == Long.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_LONG_IMPLICIT);
|
||||
else if (cast.to.clazz == Float.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_FLOAT_IMPLICIT);
|
||||
else if (cast.to.clazz == Double.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_DOUBLE_IMPLICIT);
|
||||
if (cast.to == Boolean.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_BOOLEAN);
|
||||
else if (cast.to == Byte.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_BYTE_IMPLICIT);
|
||||
else if (cast.to == Short.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_SHORT_IMPLICIT);
|
||||
else if (cast.to == Character.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_CHAR_IMPLICIT);
|
||||
else if (cast.to == Integer.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_INT_IMPLICIT);
|
||||
else if (cast.to == Long.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_LONG_IMPLICIT);
|
||||
else if (cast.to == Float.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_FLOAT_IMPLICIT);
|
||||
else if (cast.to == Double.class) invokeStatic(DEF_UTIL_TYPE, DEF_TO_DOUBLE_IMPLICIT);
|
||||
else {
|
||||
throw new IllegalStateException("Illegal tree structure.");
|
||||
}
|
||||
}
|
||||
} else {
|
||||
writeCast(cast.from, cast.to);
|
||||
unbox(cast.unboxTo.type);
|
||||
unbox(getType(cast.unboxTo));
|
||||
}
|
||||
} else if (cast.boxFrom != null) {
|
||||
box(cast.boxFrom.type);
|
||||
box(getType(cast.boxFrom));
|
||||
writeCast(cast.from, cast.to);
|
||||
} else if (cast.boxTo != null) {
|
||||
writeCast(cast.from, cast.to);
|
||||
box(cast.boxTo.type);
|
||||
box(getType(cast.boxTo));
|
||||
} else {
|
||||
writeCast(cast.from, cast.to);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void writeCast(final Type from, final Type to) {
|
||||
private void writeCast(Class<?> from, Class<?> to) {
|
||||
if (from.equals(to)) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (from.clazz != boolean.class && from.clazz.isPrimitive() && to.clazz != boolean.class && to.clazz.isPrimitive()) {
|
||||
cast(from.type, to.type);
|
||||
if (from != boolean.class && from.isPrimitive() && to != boolean.class && to.isPrimitive()) {
|
||||
cast(getType(from), getType(to));
|
||||
} else {
|
||||
if (!to.clazz.isAssignableFrom(from.clazz)) {
|
||||
checkCast(to.type);
|
||||
if (!to.isAssignableFrom(from)) {
|
||||
checkCast(getType(to));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -202,6 +204,29 @@ public final class MethodWriter extends GeneratorAdapter {
|
|||
valueOf(type);
|
||||
}
|
||||
|
||||
public static Type getType(Class<?> clazz) {
|
||||
if (clazz.isArray()) {
|
||||
Class<?> component = clazz.getComponentType();
|
||||
int dimensions = 1;
|
||||
|
||||
while (component.isArray()) {
|
||||
component = component.getComponentType();
|
||||
++dimensions;
|
||||
}
|
||||
|
||||
if (component == def.class) {
|
||||
char[] braces = new char[dimensions];
|
||||
Arrays.fill(braces, '[');
|
||||
|
||||
return Type.getType(new String(braces) + Type.getType(Object.class).getDescriptor());
|
||||
}
|
||||
} else if (clazz == def.class) {
|
||||
return Type.getType(Object.class);
|
||||
}
|
||||
|
||||
return Type.getType(clazz);
|
||||
}
|
||||
|
||||
public void writeBranch(final Label tru, final Label fals) {
|
||||
if (tru != null) {
|
||||
visitJumpInsn(Opcodes.IFNE, tru);
|
||||
|
@ -227,7 +252,7 @@ public final class MethodWriter extends GeneratorAdapter {
|
|||
}
|
||||
}
|
||||
|
||||
public void writeAppendStrings(final Type type) {
|
||||
public void writeAppendStrings(final Definition.Type type) {
|
||||
if (INDY_STRING_CONCAT_BOOTSTRAP_HANDLE != null) {
|
||||
// Java 9+: record type information
|
||||
stringConcatArgs.peek().add(type.type);
|
||||
|
@ -267,7 +292,7 @@ public final class MethodWriter extends GeneratorAdapter {
|
|||
}
|
||||
|
||||
/** Writes a dynamic binary instruction: returnType, lhs, and rhs can be different */
|
||||
public void writeDynamicBinaryInstruction(Location location, Type returnType, Type lhs, Type rhs,
|
||||
public void writeDynamicBinaryInstruction(Location location, Definition.Type returnType, Definition.Type lhs, Definition.Type rhs,
|
||||
Operation operation, int flags) {
|
||||
org.objectweb.asm.Type methodType = org.objectweb.asm.Type.getMethodType(returnType.type, lhs.type, rhs.type);
|
||||
|
||||
|
@ -318,7 +343,7 @@ public final class MethodWriter extends GeneratorAdapter {
|
|||
}
|
||||
|
||||
/** Writes a static binary instruction */
|
||||
public void writeBinaryInstruction(Location location, Type type, Operation operation) {
|
||||
public void writeBinaryInstruction(Location location, Definition.Type type, Operation operation) {
|
||||
if ((type.clazz == float.class || type.clazz == double.class) &&
|
||||
(operation == Operation.LSH || operation == Operation.USH ||
|
||||
operation == Operation.RSH || operation == Operation.BWAND ||
|
||||
|
|
|
@ -119,7 +119,8 @@ public abstract class AExpression extends ANode {
|
|||
* @return The new child node for the parent node calling this method.
|
||||
*/
|
||||
AExpression cast(Locals locals) {
|
||||
Cast cast = locals.getDefinition().caster.getLegalCast(location, actual, expected, explicit, internal);
|
||||
Cast cast =
|
||||
AnalyzerCaster.getLegalCast(location, Definition.TypeToClass(actual), Definition.TypeToClass(expected), explicit, internal);
|
||||
|
||||
if (cast == null) {
|
||||
if (constant == null || this instanceof EConstant) {
|
||||
|
@ -167,7 +168,7 @@ public abstract class AExpression extends ANode {
|
|||
// from this node because the output data for the EConstant
|
||||
// will already be the same.
|
||||
|
||||
constant = locals.getDefinition().caster.constCast(location, constant, cast);
|
||||
constant = AnalyzerCaster.constCast(location, constant, cast);
|
||||
|
||||
EConstant econstant = new EConstant(location, constant);
|
||||
econstant.analyze(locals);
|
||||
|
|
|
@ -19,7 +19,10 @@
|
|||
|
||||
package org.elasticsearch.painless.node;
|
||||
|
||||
|
||||
import org.elasticsearch.painless.AnalyzerCaster;
|
||||
import org.elasticsearch.painless.DefBootstrap;
|
||||
import org.elasticsearch.painless.Definition;
|
||||
import org.elasticsearch.painless.Definition.Cast;
|
||||
import org.elasticsearch.painless.Definition.Type;
|
||||
import org.elasticsearch.painless.Globals;
|
||||
|
@ -139,33 +142,41 @@ public final class EAssignment extends AExpression {
|
|||
boolean shift = false;
|
||||
|
||||
if (operation == Operation.MUL) {
|
||||
promote = locals.getDefinition().caster.promoteNumeric(lhs.actual, rhs.actual, true);
|
||||
promote = locals.getDefinition().ClassToType(
|
||||
AnalyzerCaster.promoteNumeric(Definition.TypeToClass(lhs.actual), Definition.TypeToClass(rhs.actual), true));
|
||||
} else if (operation == Operation.DIV) {
|
||||
promote = locals.getDefinition().caster.promoteNumeric(lhs.actual, rhs.actual, true);
|
||||
promote = locals.getDefinition().ClassToType(
|
||||
AnalyzerCaster.promoteNumeric(Definition.TypeToClass(lhs.actual), Definition.TypeToClass(rhs.actual), true));
|
||||
} else if (operation == Operation.REM) {
|
||||
promote = locals.getDefinition().caster.promoteNumeric(lhs.actual, rhs.actual, true);
|
||||
promote = locals.getDefinition().ClassToType(
|
||||
AnalyzerCaster.promoteNumeric(Definition.TypeToClass(lhs.actual), Definition.TypeToClass(rhs.actual), true));
|
||||
} else if (operation == Operation.ADD) {
|
||||
promote = locals.getDefinition().caster.promoteAdd(lhs.actual, rhs.actual);
|
||||
promote = locals.getDefinition().ClassToType(
|
||||
AnalyzerCaster.promoteAdd(Definition.TypeToClass(lhs.actual), Definition.TypeToClass(rhs.actual)));
|
||||
} else if (operation == Operation.SUB) {
|
||||
promote = locals.getDefinition().caster.promoteNumeric(lhs.actual, rhs.actual, true);
|
||||
promote = locals.getDefinition().ClassToType(
|
||||
AnalyzerCaster.promoteNumeric(Definition.TypeToClass(lhs.actual), Definition.TypeToClass(rhs.actual), true));
|
||||
} else if (operation == Operation.LSH) {
|
||||
promote = locals.getDefinition().caster.promoteNumeric(lhs.actual, false);
|
||||
shiftDistance = locals.getDefinition().caster.promoteNumeric(rhs.actual, false);
|
||||
promote = locals.getDefinition().ClassToType(AnalyzerCaster.promoteNumeric(Definition.TypeToClass(lhs.actual), false));
|
||||
shiftDistance = locals.getDefinition().ClassToType(AnalyzerCaster.promoteNumeric(Definition.TypeToClass(rhs.actual), false));
|
||||
shift = true;
|
||||
} else if (operation == Operation.RSH) {
|
||||
promote = locals.getDefinition().caster.promoteNumeric(lhs.actual, false);
|
||||
shiftDistance = locals.getDefinition().caster.promoteNumeric(rhs.actual, false);
|
||||
promote = locals.getDefinition().ClassToType(AnalyzerCaster.promoteNumeric(Definition.TypeToClass(lhs.actual), false));
|
||||
shiftDistance = locals.getDefinition().ClassToType(AnalyzerCaster.promoteNumeric(Definition.TypeToClass(rhs.actual), false));
|
||||
shift = true;
|
||||
} else if (operation == Operation.USH) {
|
||||
promote = locals.getDefinition().caster.promoteNumeric(lhs.actual, false);
|
||||
shiftDistance = locals.getDefinition().caster.promoteNumeric(rhs.actual, false);
|
||||
promote = locals.getDefinition().ClassToType(AnalyzerCaster.promoteNumeric(Definition.TypeToClass(lhs.actual), false));
|
||||
shiftDistance = locals.getDefinition().ClassToType(AnalyzerCaster.promoteNumeric(Definition.TypeToClass(rhs.actual), false));
|
||||
shift = true;
|
||||
} else if (operation == Operation.BWAND) {
|
||||
promote = locals.getDefinition().caster.promoteXor(lhs.actual, rhs.actual);
|
||||
promote = locals.getDefinition().ClassToType(
|
||||
AnalyzerCaster.promoteXor(Definition.TypeToClass(lhs.actual), Definition.TypeToClass(rhs.actual)));
|
||||
} else if (operation == Operation.XOR) {
|
||||
promote = locals.getDefinition().caster.promoteXor(lhs.actual, rhs.actual);
|
||||
promote = locals.getDefinition().ClassToType(
|
||||
AnalyzerCaster.promoteXor(Definition.TypeToClass(lhs.actual), Definition.TypeToClass(rhs.actual)));
|
||||
} else if (operation == Operation.BWOR) {
|
||||
promote = locals.getDefinition().caster.promoteXor(lhs.actual, rhs.actual);
|
||||
promote = locals.getDefinition().ClassToType(
|
||||
AnalyzerCaster.promoteXor(Definition.TypeToClass(lhs.actual), Definition.TypeToClass(rhs.actual)));
|
||||
} else {
|
||||
throw createError(new IllegalStateException("Illegal tree structure."));
|
||||
}
|
||||
|
@ -199,8 +210,8 @@ public final class EAssignment extends AExpression {
|
|||
|
||||
rhs = rhs.cast(locals);
|
||||
|
||||
there = locals.getDefinition().caster.getLegalCast(location, lhs.actual, promote, false, false);
|
||||
back = locals.getDefinition().caster.getLegalCast(location, promote, lhs.actual, true, false);
|
||||
there = AnalyzerCaster.getLegalCast(location, Definition.TypeToClass(lhs.actual), Definition.TypeToClass(promote), false, false);
|
||||
back = AnalyzerCaster.getLegalCast(location, Definition.TypeToClass(promote), Definition.TypeToClass(lhs.actual), true, false);
|
||||
|
||||
this.statement = true;
|
||||
this.actual = read ? lhs.actual : locals.getDefinition().voidType;
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.painless.node;
|
||||
|
||||
import org.elasticsearch.painless.AnalyzerCaster;
|
||||
import org.elasticsearch.painless.DefBootstrap;
|
||||
import org.elasticsearch.painless.Definition;
|
||||
import org.elasticsearch.painless.Definition.Type;
|
||||
|
@ -101,7 +102,8 @@ public final class EBinary extends AExpression {
|
|||
left.analyze(variables);
|
||||
right.analyze(variables);
|
||||
|
||||
promote = variables.getDefinition().caster.promoteNumeric(left.actual, right.actual, true);
|
||||
promote = variables.getDefinition().ClassToType(
|
||||
AnalyzerCaster.promoteNumeric(Definition.TypeToClass(left.actual), Definition.TypeToClass(right.actual), true));
|
||||
|
||||
if (promote == null) {
|
||||
throw createError(new ClassCastException("Cannot apply multiply [*] to types " +
|
||||
|
@ -145,7 +147,8 @@ public final class EBinary extends AExpression {
|
|||
left.analyze(variables);
|
||||
right.analyze(variables);
|
||||
|
||||
promote = variables.getDefinition().caster.promoteNumeric(left.actual, right.actual, true);
|
||||
promote = variables.getDefinition().ClassToType(
|
||||
AnalyzerCaster.promoteNumeric(Definition.TypeToClass(left.actual), Definition.TypeToClass(right.actual), true));
|
||||
|
||||
if (promote == null) {
|
||||
throw createError(new ClassCastException("Cannot apply divide [/] to types " +
|
||||
|
@ -194,7 +197,8 @@ public final class EBinary extends AExpression {
|
|||
left.analyze(variables);
|
||||
right.analyze(variables);
|
||||
|
||||
promote = variables.getDefinition().caster.promoteNumeric(left.actual, right.actual, true);
|
||||
promote = variables.getDefinition().ClassToType(
|
||||
AnalyzerCaster.promoteNumeric(Definition.TypeToClass(left.actual), Definition.TypeToClass(right.actual), true));
|
||||
|
||||
if (promote == null) {
|
||||
throw createError(new ClassCastException("Cannot apply remainder [%] to types " +
|
||||
|
@ -243,7 +247,8 @@ public final class EBinary extends AExpression {
|
|||
left.analyze(variables);
|
||||
right.analyze(variables);
|
||||
|
||||
promote = variables.getDefinition().caster.promoteAdd(left.actual, right.actual);
|
||||
promote = variables.getDefinition().ClassToType(
|
||||
AnalyzerCaster.promoteAdd(Definition.TypeToClass(left.actual), Definition.TypeToClass(right.actual)));
|
||||
|
||||
if (promote == null) {
|
||||
throw createError(new ClassCastException("Cannot apply add [+] to types " +
|
||||
|
@ -303,7 +308,8 @@ public final class EBinary extends AExpression {
|
|||
left.analyze(variables);
|
||||
right.analyze(variables);
|
||||
|
||||
promote = variables.getDefinition().caster.promoteNumeric(left.actual, right.actual, true);
|
||||
promote = variables.getDefinition().ClassToType(
|
||||
AnalyzerCaster.promoteNumeric(Definition.TypeToClass(left.actual), Definition.TypeToClass(right.actual), true));
|
||||
|
||||
if (promote == null) {
|
||||
throw createError(new ClassCastException("Cannot apply subtract [-] to types " +
|
||||
|
@ -362,8 +368,8 @@ public final class EBinary extends AExpression {
|
|||
left.analyze(variables);
|
||||
right.analyze(variables);
|
||||
|
||||
Type lhspromote = variables.getDefinition().caster.promoteNumeric(left.actual, false);
|
||||
Type rhspromote = variables.getDefinition().caster.promoteNumeric(right.actual, false);
|
||||
Type lhspromote = variables.getDefinition().ClassToType(AnalyzerCaster.promoteNumeric(Definition.TypeToClass(left.actual), false));
|
||||
Type rhspromote = variables.getDefinition().ClassToType(AnalyzerCaster.promoteNumeric(Definition.TypeToClass(right.actual), false));
|
||||
|
||||
if (lhspromote == null || rhspromote == null) {
|
||||
throw createError(new ClassCastException("Cannot apply left shift [<<] to types " +
|
||||
|
@ -411,8 +417,8 @@ public final class EBinary extends AExpression {
|
|||
left.analyze(variables);
|
||||
right.analyze(variables);
|
||||
|
||||
Type lhspromote = variables.getDefinition().caster.promoteNumeric(left.actual, false);
|
||||
Type rhspromote = variables.getDefinition().caster.promoteNumeric(right.actual, false);
|
||||
Type lhspromote = variables.getDefinition().ClassToType(AnalyzerCaster.promoteNumeric(Definition.TypeToClass(left.actual), false));
|
||||
Type rhspromote = variables.getDefinition().ClassToType(AnalyzerCaster.promoteNumeric(Definition.TypeToClass(right.actual), false));
|
||||
|
||||
if (lhspromote == null || rhspromote == null) {
|
||||
throw createError(new ClassCastException("Cannot apply right shift [>>] to types " +
|
||||
|
@ -460,8 +466,8 @@ public final class EBinary extends AExpression {
|
|||
left.analyze(variables);
|
||||
right.analyze(variables);
|
||||
|
||||
Type lhspromote = variables.getDefinition().caster.promoteNumeric(left.actual, false);
|
||||
Type rhspromote = variables.getDefinition().caster.promoteNumeric(right.actual, false);
|
||||
Type lhspromote = variables.getDefinition().ClassToType(AnalyzerCaster.promoteNumeric(Definition.TypeToClass(left.actual), false));
|
||||
Type rhspromote = variables.getDefinition().ClassToType(AnalyzerCaster.promoteNumeric(Definition.TypeToClass(right.actual), false));
|
||||
|
||||
actual = promote = lhspromote;
|
||||
shiftDistance = rhspromote;
|
||||
|
@ -509,7 +515,8 @@ public final class EBinary extends AExpression {
|
|||
left.analyze(variables);
|
||||
right.analyze(variables);
|
||||
|
||||
promote = variables.getDefinition().caster.promoteNumeric(left.actual, right.actual, false);
|
||||
promote = variables.getDefinition().ClassToType(
|
||||
AnalyzerCaster.promoteNumeric(Definition.TypeToClass(left.actual), Definition.TypeToClass(right.actual), false));
|
||||
|
||||
if (promote == null) {
|
||||
throw createError(new ClassCastException("Cannot apply and [&] to types " +
|
||||
|
@ -550,7 +557,8 @@ public final class EBinary extends AExpression {
|
|||
left.analyze(variables);
|
||||
right.analyze(variables);
|
||||
|
||||
promote = variables.getDefinition().caster.promoteXor(left.actual, right.actual);
|
||||
promote = variables.getDefinition().ClassToType(
|
||||
AnalyzerCaster.promoteXor(Definition.TypeToClass(left.actual), Definition.TypeToClass(right.actual)));
|
||||
|
||||
if (promote == null) {
|
||||
throw createError(new ClassCastException("Cannot apply xor [^] to types " +
|
||||
|
@ -592,7 +600,8 @@ public final class EBinary extends AExpression {
|
|||
left.analyze(variables);
|
||||
right.analyze(variables);
|
||||
|
||||
promote = variables.getDefinition().caster.promoteNumeric(left.actual, right.actual, false);
|
||||
promote = variables.getDefinition().ClassToType(
|
||||
AnalyzerCaster.promoteNumeric(Definition.TypeToClass(left.actual), Definition.TypeToClass(right.actual), false));
|
||||
|
||||
if (promote == null) {
|
||||
throw createError(new ClassCastException("Cannot apply or [|] to types " +
|
||||
|
|
|
@ -82,11 +82,12 @@ public final class ECapturingFunctionRef extends AExpression implements ILambda
|
|||
for (int i = 0; i < ref.interfaceMethod.arguments.size(); ++i) {
|
||||
Definition.Type from = ref.interfaceMethod.arguments.get(i);
|
||||
Definition.Type to = ref.delegateMethod.arguments.get(i);
|
||||
locals.getDefinition().caster.getLegalCast(location, from, to, false, true);
|
||||
AnalyzerCaster.getLegalCast(location, Definition.TypeToClass(from), Definition.TypeToClass(to), false, true);
|
||||
}
|
||||
|
||||
if (ref.interfaceMethod.rtn.equals(locals.getDefinition().voidType) == false) {
|
||||
locals.getDefinition().caster.getLegalCast(location, ref.delegateMethod.rtn, ref.interfaceMethod.rtn, false, true);
|
||||
AnalyzerCaster.getLegalCast(location,
|
||||
Definition.TypeToClass(ref.delegateMethod.rtn), Definition.TypeToClass(ref.interfaceMethod.rtn), false, true);
|
||||
}
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw createError(e);
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.painless.node;
|
||||
|
||||
import org.elasticsearch.painless.Definition;
|
||||
import org.elasticsearch.painless.Definition.Cast;
|
||||
|
||||
import java.util.Objects;
|
||||
|
@ -63,6 +64,6 @@ final class ECast extends AExpression {
|
|||
|
||||
@Override
|
||||
public String toString() {
|
||||
return singleLineToString(cast.to, child);
|
||||
return singleLineToString(Definition.ClassToName(cast.to), child);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -89,7 +89,8 @@ public final class EComp extends AExpression {
|
|||
left.analyze(variables);
|
||||
right.analyze(variables);
|
||||
|
||||
promotedType = variables.getDefinition().caster.promoteEquality(left.actual, right.actual);
|
||||
promotedType = variables.getDefinition().ClassToType(
|
||||
AnalyzerCaster.promoteEquality(Definition.TypeToClass(left.actual), Definition.TypeToClass(right.actual)));
|
||||
|
||||
if (promotedType == null) {
|
||||
throw createError(new ClassCastException("Cannot apply equals [==] to types " +
|
||||
|
@ -140,7 +141,8 @@ public final class EComp extends AExpression {
|
|||
left.analyze(variables);
|
||||
right.analyze(variables);
|
||||
|
||||
promotedType = variables.getDefinition().caster.promoteEquality(left.actual, right.actual);
|
||||
promotedType = variables.getDefinition().ClassToType(
|
||||
AnalyzerCaster.promoteEquality(Definition.TypeToClass(left.actual), Definition.TypeToClass(right.actual)));
|
||||
|
||||
if (promotedType == null) {
|
||||
throw createError(new ClassCastException("Cannot apply reference equals [===] to types " +
|
||||
|
@ -182,7 +184,8 @@ public final class EComp extends AExpression {
|
|||
left.analyze(variables);
|
||||
right.analyze(variables);
|
||||
|
||||
promotedType = variables.getDefinition().caster.promoteEquality(left.actual, right.actual);
|
||||
promotedType = variables.getDefinition().ClassToType(
|
||||
AnalyzerCaster.promoteEquality(Definition.TypeToClass(left.actual), Definition.TypeToClass(right.actual)));
|
||||
|
||||
if (promotedType == null) {
|
||||
throw createError(new ClassCastException("Cannot apply not equals [!=] to types " +
|
||||
|
@ -233,7 +236,8 @@ public final class EComp extends AExpression {
|
|||
left.analyze(variables);
|
||||
right.analyze(variables);
|
||||
|
||||
promotedType = variables.getDefinition().caster.promoteEquality(left.actual, right.actual);
|
||||
promotedType = variables.getDefinition().ClassToType(
|
||||
AnalyzerCaster.promoteEquality(Definition.TypeToClass(left.actual), Definition.TypeToClass(right.actual)));
|
||||
|
||||
if (promotedType == null) {
|
||||
throw createError(new ClassCastException("Cannot apply reference not equals [!==] to types " +
|
||||
|
@ -275,7 +279,8 @@ public final class EComp extends AExpression {
|
|||
left.analyze(variables);
|
||||
right.analyze(variables);
|
||||
|
||||
promotedType = variables.getDefinition().caster.promoteNumeric(left.actual, right.actual, true);
|
||||
promotedType = variables.getDefinition().ClassToType(
|
||||
AnalyzerCaster.promoteNumeric(Definition.TypeToClass(left.actual), Definition.TypeToClass(right.actual), true));
|
||||
|
||||
if (promotedType == null) {
|
||||
throw createError(new ClassCastException("Cannot apply greater than or equals [>=] to types " +
|
||||
|
@ -316,7 +321,8 @@ public final class EComp extends AExpression {
|
|||
left.analyze(variables);
|
||||
right.analyze(variables);
|
||||
|
||||
promotedType = variables.getDefinition().caster.promoteNumeric(left.actual, right.actual, true);
|
||||
promotedType = variables.getDefinition().ClassToType(
|
||||
AnalyzerCaster.promoteNumeric(Definition.TypeToClass(left.actual), Definition.TypeToClass(right.actual), true));
|
||||
|
||||
if (promotedType == null) {
|
||||
throw createError(new ClassCastException("Cannot apply greater than [>] to types " +
|
||||
|
@ -357,7 +363,8 @@ public final class EComp extends AExpression {
|
|||
left.analyze(variables);
|
||||
right.analyze(variables);
|
||||
|
||||
promotedType = variables.getDefinition().caster.promoteNumeric(left.actual, right.actual, true);
|
||||
promotedType = variables.getDefinition().ClassToType(
|
||||
AnalyzerCaster.promoteNumeric(Definition.TypeToClass(left.actual), Definition.TypeToClass(right.actual), true));
|
||||
|
||||
if (promotedType == null) {
|
||||
throw createError(new ClassCastException("Cannot apply less than or equals [<=] to types " +
|
||||
|
@ -398,7 +405,8 @@ public final class EComp extends AExpression {
|
|||
left.analyze(variables);
|
||||
right.analyze(variables);
|
||||
|
||||
promotedType = variables.getDefinition().caster.promoteNumeric(left.actual, right.actual, true);
|
||||
promotedType = variables.getDefinition().ClassToType(
|
||||
AnalyzerCaster.promoteNumeric(Definition.TypeToClass(left.actual), Definition.TypeToClass(right.actual), true));
|
||||
|
||||
if (promotedType == null) {
|
||||
throw createError(new ClassCastException("Cannot apply less than [>=] to types " +
|
||||
|
|
|
@ -79,7 +79,8 @@ public final class EConditional extends AExpression {
|
|||
right.analyze(locals);
|
||||
|
||||
if (expected == null) {
|
||||
final Type promote = locals.getDefinition().caster.promoteConditional(left.actual, right.actual, left.constant, right.constant);
|
||||
Type promote = locals.getDefinition().ClassToType(AnalyzerCaster.promoteConditional(
|
||||
Definition.TypeToClass(left.actual), Definition.TypeToClass(right.actual), left.constant, right.constant));
|
||||
|
||||
left.expected = promote;
|
||||
right.expected = promote;
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.painless.node;
|
||||
|
||||
import org.elasticsearch.painless.AnalyzerCaster;
|
||||
import org.elasticsearch.painless.Definition;
|
||||
import org.elasticsearch.painless.Definition.Type;
|
||||
import org.elasticsearch.painless.Globals;
|
||||
import org.elasticsearch.painless.Locals;
|
||||
|
@ -81,7 +82,8 @@ public class EElvis extends AExpression {
|
|||
}
|
||||
|
||||
if (expected == null) {
|
||||
final Type promote = locals.getDefinition().caster.promoteConditional(lhs.actual, rhs.actual, lhs.constant, rhs.constant);
|
||||
Type promote = locals.getDefinition().ClassToType(AnalyzerCaster.promoteConditional(
|
||||
Definition.TypeToClass(lhs.actual), Definition.TypeToClass(rhs.actual), lhs.constant, rhs.constant));
|
||||
|
||||
lhs.expected = promote;
|
||||
rhs.expected = promote;
|
||||
|
|
|
@ -82,11 +82,12 @@ public final class EFunctionRef extends AExpression implements ILambda {
|
|||
for (int i = 0; i < interfaceMethod.arguments.size(); ++i) {
|
||||
Definition.Type from = interfaceMethod.arguments.get(i);
|
||||
Definition.Type to = delegateMethod.arguments.get(i);
|
||||
locals.getDefinition().caster.getLegalCast(location, from, to, false, true);
|
||||
AnalyzerCaster.getLegalCast(location, Definition.TypeToClass(from), Definition.TypeToClass(to), false, true);
|
||||
}
|
||||
|
||||
if (interfaceMethod.rtn.equals(locals.getDefinition().voidType) == false) {
|
||||
locals.getDefinition().caster.getLegalCast(location, delegateMethod.rtn, interfaceMethod.rtn, false, true);
|
||||
AnalyzerCaster.getLegalCast(
|
||||
location, Definition.TypeToClass(delegateMethod.rtn), Definition.TypeToClass(interfaceMethod.rtn), false, true);
|
||||
}
|
||||
} else {
|
||||
// whitelist lookup
|
||||
|
|
|
@ -192,11 +192,12 @@ public final class ELambda extends AExpression implements ILambda {
|
|||
for (int i = 0; i < interfaceMethod.arguments.size(); ++i) {
|
||||
Type from = interfaceMethod.arguments.get(i);
|
||||
Type to = desugared.parameters.get(i + captures.size()).type;
|
||||
locals.getDefinition().caster.getLegalCast(location, from, to, false, true);
|
||||
AnalyzerCaster.getLegalCast(location, Definition.TypeToClass(from), Definition.TypeToClass(to), false, true);
|
||||
}
|
||||
|
||||
if (interfaceMethod.rtn.equals(locals.getDefinition().voidType) == false) {
|
||||
locals.getDefinition().caster.getLegalCast(location, desugared.rtnType, interfaceMethod.rtn, false, true);
|
||||
AnalyzerCaster.getLegalCast(
|
||||
location, Definition.TypeToClass(desugared.rtnType), Definition.TypeToClass(interfaceMethod.rtn), false, true);
|
||||
}
|
||||
|
||||
actual = expected;
|
||||
|
|
|
@ -90,7 +90,7 @@ public final class EUnary extends AExpression {
|
|||
void analyzeBWNot(Locals variables) {
|
||||
child.analyze(variables);
|
||||
|
||||
promote = variables.getDefinition().caster.promoteNumeric(child.actual, false);
|
||||
promote = variables.getDefinition().ClassToType(AnalyzerCaster.promoteNumeric(Definition.TypeToClass(child.actual), false));
|
||||
|
||||
if (promote == null) {
|
||||
throw createError(new ClassCastException("Cannot apply not [~] to type [" + child.actual.name + "]."));
|
||||
|
@ -121,7 +121,7 @@ public final class EUnary extends AExpression {
|
|||
void analyzerAdd(Locals variables) {
|
||||
child.analyze(variables);
|
||||
|
||||
promote = variables.getDefinition().caster.promoteNumeric(child.actual, true);
|
||||
promote = variables.getDefinition().ClassToType(AnalyzerCaster.promoteNumeric(Definition.TypeToClass(child.actual), true));
|
||||
|
||||
if (promote == null) {
|
||||
throw createError(new ClassCastException("Cannot apply positive [+] to type [" + child.actual.name + "]."));
|
||||
|
@ -156,7 +156,7 @@ public final class EUnary extends AExpression {
|
|||
void analyzerSub(Locals variables) {
|
||||
child.analyze(variables);
|
||||
|
||||
promote = variables.getDefinition().caster.promoteNumeric(child.actual, true);
|
||||
promote = variables.getDefinition().ClassToType(AnalyzerCaster.promoteNumeric(Definition.TypeToClass(child.actual), true));
|
||||
|
||||
if (promote == null) {
|
||||
throw createError(new ClassCastException("Cannot apply negative [-] to type [" + child.actual.name + "]."));
|
||||
|
|
|
@ -64,13 +64,10 @@ final class SSubEachArray extends AStatement {
|
|||
void analyze(Locals locals) {
|
||||
// We must store the array and index as variables for securing slots on the stack, and
|
||||
// also add the location offset to make the names unique in case of nested for each loops.
|
||||
array = locals.addVariable(location, expression.actual, "#array" + location.getOffset(),
|
||||
true);
|
||||
index = locals.addVariable(location, locals.getDefinition().intType, "#index" + location.getOffset(),
|
||||
true);
|
||||
indexed = locals.getDefinition().getType(expression.actual.struct,
|
||||
expression.actual.dimensions - 1);
|
||||
cast = locals.getDefinition().caster.getLegalCast(location, indexed, variable.type, true, true);
|
||||
array = locals.addVariable(location, expression.actual, "#array" + location.getOffset(), true);
|
||||
index = locals.addVariable(location, locals.getDefinition().intType, "#index" + location.getOffset(), true);
|
||||
indexed = locals.getDefinition().getType(expression.actual.struct, expression.actual.dimensions - 1);
|
||||
cast = AnalyzerCaster.getLegalCast(location, Definition.TypeToClass(indexed), Definition.TypeToClass(variable.type), true, true);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -25,6 +25,7 @@ import org.elasticsearch.painless.Definition;
|
|||
import org.elasticsearch.painless.Definition.Cast;
|
||||
import org.elasticsearch.painless.Definition.Method;
|
||||
import org.elasticsearch.painless.Definition.MethodKey;
|
||||
import org.elasticsearch.painless.Definition.def;
|
||||
import org.elasticsearch.painless.Globals;
|
||||
import org.elasticsearch.painless.Locals;
|
||||
import org.elasticsearch.painless.Locals.Variable;
|
||||
|
@ -85,7 +86,7 @@ final class SSubEachIterable extends AStatement {
|
|||
}
|
||||
}
|
||||
|
||||
cast = locals.getDefinition().caster.getLegalCast(location, locals.getDefinition().DefType, variable.type, true, true);
|
||||
cast = AnalyzerCaster.getLegalCast(location, def.class, Definition.TypeToClass(variable.type), true, true);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -20,7 +20,6 @@
|
|||
package org.elasticsearch.painless;
|
||||
|
||||
import org.elasticsearch.painless.Definition.Cast;
|
||||
import org.elasticsearch.painless.Definition.Type;
|
||||
import org.elasticsearch.painless.spi.Whitelist;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
|
@ -28,73 +27,73 @@ public class AnalyzerCasterTests extends ESTestCase {
|
|||
|
||||
private static final Definition definition = new Definition(Whitelist.BASE_WHITELISTS);
|
||||
|
||||
private static void assertCast(Type actual, Type expected, boolean mustBeExplicit) {
|
||||
private static void assertCast(Class<?> actual, Class<?> expected, boolean mustBeExplicit) {
|
||||
Location location = new Location("dummy", 0);
|
||||
|
||||
if (actual.equals(expected)) {
|
||||
assertFalse(mustBeExplicit);
|
||||
assertNull(definition.caster.getLegalCast(location, actual, expected, false, false));
|
||||
assertNull(definition.caster.getLegalCast(location, actual, expected, true, false));
|
||||
assertNull(AnalyzerCaster.getLegalCast(location, actual, expected, false, false));
|
||||
assertNull(AnalyzerCaster.getLegalCast(location, actual, expected, true, false));
|
||||
return;
|
||||
}
|
||||
|
||||
Cast cast = definition.caster.getLegalCast(location, actual, expected, true, false);
|
||||
Cast cast = AnalyzerCaster.getLegalCast(location, actual, expected, true, false);
|
||||
assertEquals(actual, cast.from);
|
||||
assertEquals(expected, cast.to);
|
||||
|
||||
if (mustBeExplicit) {
|
||||
ClassCastException error = expectThrows(ClassCastException.class,
|
||||
() -> definition.caster.getLegalCast(location, actual, expected, false, false));
|
||||
() -> AnalyzerCaster.getLegalCast(location, actual, expected, false, false));
|
||||
assertTrue(error.getMessage().startsWith("Cannot cast"));
|
||||
} else {
|
||||
cast = definition.caster.getLegalCast(location, actual, expected, false, false);
|
||||
cast = AnalyzerCaster.getLegalCast(location, actual, expected, false, false);
|
||||
assertEquals(actual, cast.from);
|
||||
assertEquals(expected, cast.to);
|
||||
}
|
||||
}
|
||||
|
||||
public void testNumericCasts() {
|
||||
assertCast(definition.byteType, definition.byteType, false);
|
||||
assertCast(definition.byteType, definition.shortType, false);
|
||||
assertCast(definition.byteType, definition.intType, false);
|
||||
assertCast(definition.byteType, definition.longType, false);
|
||||
assertCast(definition.byteType, definition.floatType, false);
|
||||
assertCast(definition.byteType, definition.doubleType, false);
|
||||
assertCast(byte.class, byte.class, false);
|
||||
assertCast(byte.class, short.class, false);
|
||||
assertCast(byte.class, int.class, false);
|
||||
assertCast(byte.class, long.class, false);
|
||||
assertCast(byte.class, float.class, false);
|
||||
assertCast(byte.class, double.class, false);
|
||||
|
||||
assertCast(definition.shortType, definition.byteType, true);
|
||||
assertCast(definition.shortType, definition.shortType, false);
|
||||
assertCast(definition.shortType, definition.intType, false);
|
||||
assertCast(definition.shortType, definition.longType, false);
|
||||
assertCast(definition.shortType, definition.floatType, false);
|
||||
assertCast(definition.shortType, definition.doubleType, false);
|
||||
assertCast(short.class, byte.class, true);
|
||||
assertCast(short.class, short.class, false);
|
||||
assertCast(short.class, int.class, false);
|
||||
assertCast(short.class, long.class, false);
|
||||
assertCast(short.class, float.class, false);
|
||||
assertCast(short.class, double.class, false);
|
||||
|
||||
assertCast(definition.intType, definition.byteType, true);
|
||||
assertCast(definition.intType, definition.shortType, true);
|
||||
assertCast(definition.intType, definition.intType, false);
|
||||
assertCast(definition.intType, definition.longType, false);
|
||||
assertCast(definition.intType, definition.floatType, false);
|
||||
assertCast(definition.intType, definition.doubleType, false);
|
||||
assertCast(int.class, byte.class, true);
|
||||
assertCast(int.class, short.class, true);
|
||||
assertCast(int.class, int.class, false);
|
||||
assertCast(int.class, long.class, false);
|
||||
assertCast(int.class, float.class, false);
|
||||
assertCast(int.class, double.class, false);
|
||||
|
||||
assertCast(definition.longType, definition.byteType, true);
|
||||
assertCast(definition.longType, definition.shortType, true);
|
||||
assertCast(definition.longType, definition.intType, true);
|
||||
assertCast(definition.longType, definition.longType, false);
|
||||
assertCast(definition.longType, definition.floatType, false);
|
||||
assertCast(definition.longType, definition.doubleType, false);
|
||||
assertCast(long.class, byte.class, true);
|
||||
assertCast(long.class, short.class, true);
|
||||
assertCast(long.class, int.class, true);
|
||||
assertCast(long.class, long.class, false);
|
||||
assertCast(long.class, float.class, false);
|
||||
assertCast(long.class, double.class, false);
|
||||
|
||||
assertCast(definition.floatType, definition.byteType, true);
|
||||
assertCast(definition.floatType, definition.shortType, true);
|
||||
assertCast(definition.floatType, definition.intType, true);
|
||||
assertCast(definition.floatType, definition.longType, true);
|
||||
assertCast(definition.floatType, definition.floatType, false);
|
||||
assertCast(definition.floatType, definition.doubleType, false);
|
||||
assertCast(float.class, byte.class, true);
|
||||
assertCast(float.class, short.class, true);
|
||||
assertCast(float.class, int.class, true);
|
||||
assertCast(float.class, long.class, true);
|
||||
assertCast(float.class, float.class, false);
|
||||
assertCast(float.class, double.class, false);
|
||||
|
||||
assertCast(definition.doubleType, definition.byteType, true);
|
||||
assertCast(definition.doubleType, definition.shortType, true);
|
||||
assertCast(definition.doubleType, definition.intType, true);
|
||||
assertCast(definition.doubleType, definition.longType, true);
|
||||
assertCast(definition.doubleType, definition.floatType, true);
|
||||
assertCast(definition.doubleType, definition.doubleType, false);
|
||||
assertCast(double.class, byte.class, true);
|
||||
assertCast(double.class, short.class, true);
|
||||
assertCast(double.class, int.class, true);
|
||||
assertCast(double.class, long.class, true);
|
||||
assertCast(double.class, float.class, true);
|
||||
assertCast(double.class, double.class, false);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -162,12 +162,12 @@ public class NodeToStringTests extends ESTestCase {
|
|||
public void testECast() {
|
||||
Location l = new Location(getTestName(), 0);
|
||||
AExpression child = new EConstant(l, "test");
|
||||
Cast cast = Cast.standard(definition.StringType, definition.IntegerType, true);
|
||||
Cast cast = Cast.standard(String.class, Integer.class, true);
|
||||
assertEquals("(ECast java.lang.Integer (EConstant String 'test'))", new ECast(l, child, cast).toString());
|
||||
|
||||
l = new Location(getTestName(), 1);
|
||||
child = new EBinary(l, Operation.ADD, new EConstant(l, "test"), new EConstant(l, 12));
|
||||
cast = Cast.standard(definition.IntegerType, definition.BooleanType, true);
|
||||
cast = Cast.standard(Integer.class, Boolean.class, true);
|
||||
assertEquals("(ECast java.lang.Boolean (EBinary (EConstant String 'test') + (EConstant Integer 12)))",
|
||||
new ECast(l, child, cast).toString());
|
||||
}
|
||||
|
|
|
@ -137,4 +137,4 @@
|
|||
|
||||
- match: { error.root_cause.0.type: "remote_transport_exception" }
|
||||
- match: { error.type: "illegal_argument_exception" }
|
||||
- match: { error.reason: "Object has already been built and is self-referencing itself" }
|
||||
- match: { error.reason: "Iterable object is self-referencing itself" }
|
||||
|
|
|
@ -406,3 +406,39 @@
|
|||
- match: { hits.hits.0._score: 1.0 }
|
||||
- match: { aggregations.value_agg.buckets.0.key: 2 }
|
||||
- match: { aggregations.value_agg.buckets.0.doc_count: 1 }
|
||||
|
||||
---
|
||||
"Return self-referencing map":
|
||||
- do:
|
||||
indices.create:
|
||||
index: test
|
||||
body:
|
||||
settings:
|
||||
number_of_shards: "1"
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: test
|
||||
type: test
|
||||
id: 1
|
||||
body: { "genre": 1 }
|
||||
|
||||
- do:
|
||||
indices.refresh: {}
|
||||
|
||||
- do:
|
||||
catch: bad_request
|
||||
index: test
|
||||
search:
|
||||
body:
|
||||
aggs:
|
||||
genre:
|
||||
terms:
|
||||
script:
|
||||
lang: painless
|
||||
source: "def x = [:] ; def y = [:] ; x.a = y ; y.a = x ; return x"
|
||||
|
||||
- match: { error.root_cause.0.type: "illegal_argument_exception" }
|
||||
- match: { error.root_cause.0.reason: "Iterable object is self-referencing itself" }
|
||||
- match: { error.type: "search_phase_execution_exception" }
|
||||
- match: { error.reason: "all shards failed" }
|
||||
|
|
|
@ -91,6 +91,7 @@ public class CancelTests extends ReindexTestCase {
|
|||
int numDocs = getNumShards(INDEX).numPrimaries * 10 * builder.request().getSlices();
|
||||
ALLOWED_OPERATIONS.release(numDocs);
|
||||
|
||||
logger.debug("setting up [{}] docs", numDocs);
|
||||
indexRandom(true, false, true, IntStream.range(0, numDocs)
|
||||
.mapToObj(i -> client().prepareIndex(INDEX, TYPE, String.valueOf(i)).setSource("n", i))
|
||||
.collect(Collectors.toList()));
|
||||
|
@ -102,16 +103,21 @@ public class CancelTests extends ReindexTestCase {
|
|||
// Scroll by 1 so that cancellation is easier to control
|
||||
builder.source().setSize(1);
|
||||
|
||||
/* Allow a random number of the documents less the number of workers to be modified by the reindex action. That way at least one
|
||||
* worker is blocked. */
|
||||
/* Allow a random number of the documents less the number of workers
|
||||
* to be modified by the reindex action. That way at least one worker
|
||||
* is blocked. */
|
||||
int numModifiedDocs = randomIntBetween(builder.request().getSlices() * 2, numDocs);
|
||||
logger.debug("chose to modify [{}] docs", numModifiedDocs);
|
||||
ALLOWED_OPERATIONS.release(numModifiedDocs - builder.request().getSlices());
|
||||
|
||||
// Now execute the reindex action...
|
||||
ActionFuture<? extends BulkByScrollResponse> future = builder.execute();
|
||||
|
||||
/* ... and waits for the indexing operation listeners to block. It is important to realize that some of the workers might have
|
||||
* exhausted their slice while others might have quite a bit left to work on. We can't control that. */
|
||||
/* ... and wait for the indexing operation listeners to block. It
|
||||
* is important to realize that some of the workers might have
|
||||
* exhausted their slice while others might have quite a bit left
|
||||
* to work on. We can't control that. */
|
||||
logger.debug("waiting for updates to be blocked");
|
||||
awaitBusy(() -> ALLOWED_OPERATIONS.hasQueuedThreads() && ALLOWED_OPERATIONS.availablePermits() == 0);
|
||||
|
||||
// Status should show the task running
|
||||
|
@ -128,15 +134,19 @@ public class CancelTests extends ReindexTestCase {
|
|||
cancelTasksResponse.rethrowFailures("Cancel");
|
||||
assertThat(cancelTasksResponse.getTasks(), hasSize(1));
|
||||
|
||||
// The status should now show canceled. The request will still be in the list because it is (or its children are) still blocked.
|
||||
/* The status should now show canceled. The request will still be in the
|
||||
* list because it is (or its children are) still blocked. */
|
||||
mainTask = client().admin().cluster().prepareGetTask(mainTask.getTaskId()).get().getTask().getTask();
|
||||
status = (BulkByScrollTask.Status) mainTask.getStatus();
|
||||
logger.debug("asserting that parent is marked canceled {}", status);
|
||||
assertEquals(CancelTasksRequest.DEFAULT_REASON, status.getReasonCancelled());
|
||||
|
||||
if (builder.request().getSlices() > 1) {
|
||||
boolean foundCancelled = false;
|
||||
ListTasksResponse sliceList = client().admin().cluster().prepareListTasks().setParentTaskId(mainTask.getTaskId())
|
||||
.setDetailed(true).get();
|
||||
sliceList.rethrowFailures("Fetch slice tasks");
|
||||
logger.debug("finding at least one canceled child among {}", sliceList.getTasks());
|
||||
for (TaskInfo slice: sliceList.getTasks()) {
|
||||
BulkByScrollTask.Status sliceStatus = (BulkByScrollTask.Status) slice.getStatus();
|
||||
if (sliceStatus.getReasonCancelled() == null) continue;
|
||||
|
@ -146,7 +156,7 @@ public class CancelTests extends ReindexTestCase {
|
|||
assertTrue("Didn't find at least one sub task that was cancelled", foundCancelled);
|
||||
}
|
||||
|
||||
// Unblock the last operations
|
||||
logger.debug("unblocking the blocked update");
|
||||
ALLOWED_OPERATIONS.release(builder.request().getSlices());
|
||||
|
||||
// Checks that no more operations are executed
|
||||
|
|
|
@ -107,6 +107,14 @@ public class ReindexFailureTests extends ReindexTestCase {
|
|||
response.get();
|
||||
logger.info("Didn't trigger a reindex failure on the {} attempt", attempt);
|
||||
attempt++;
|
||||
/*
|
||||
* In the past we've seen the delete of the source index
|
||||
* actually take effect *during* the `indexDocs` call in
|
||||
* the next step. This breaks things pretty disasterously
|
||||
* so we *try* and wait for the delete to be fully
|
||||
* complete here.
|
||||
*/
|
||||
assertBusy(() -> assertFalse(client().admin().indices().prepareExists("source").get().isExists()));
|
||||
} catch (ExecutionException e) {
|
||||
logger.info("Triggered a reindex failure on the {} attempt: {}", attempt, e.getMessage());
|
||||
assertThat(e.getMessage(),
|
||||
|
|
|
@ -73,7 +73,7 @@ class InternalAwsS3Service extends AbstractLifecycleComponent implements AwsS3Se
|
|||
logger.debug("creating S3 client with client_name [{}], endpoint [{}]", clientName, clientSettings.endpoint);
|
||||
|
||||
AWSCredentialsProvider credentials = buildCredentials(logger, deprecationLogger, clientSettings, repositorySettings);
|
||||
ClientConfiguration configuration = buildConfiguration(clientSettings, repositorySettings);
|
||||
ClientConfiguration configuration = buildConfiguration(clientSettings);
|
||||
|
||||
client = new AmazonS3Client(credentials, configuration);
|
||||
|
||||
|
@ -86,7 +86,7 @@ class InternalAwsS3Service extends AbstractLifecycleComponent implements AwsS3Se
|
|||
}
|
||||
|
||||
// pkg private for tests
|
||||
static ClientConfiguration buildConfiguration(S3ClientSettings clientSettings, Settings repositorySettings) {
|
||||
static ClientConfiguration buildConfiguration(S3ClientSettings clientSettings) {
|
||||
ClientConfiguration clientConfiguration = new ClientConfiguration();
|
||||
// the response metadata cache is only there for diagnostics purposes,
|
||||
// but can force objects from every response to the old generation.
|
||||
|
|
|
@ -95,7 +95,7 @@ public class AwsS3ServiceImplTests extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testAWSDefaultConfiguration() {
|
||||
launchAWSConfigurationTest(Settings.EMPTY, Settings.EMPTY, Protocol.HTTPS, null, -1, null, null, 3,
|
||||
launchAWSConfigurationTest(Settings.EMPTY, Protocol.HTTPS, null, -1, null, null, 3,
|
||||
ClientConfiguration.DEFAULT_THROTTLE_RETRIES, ClientConfiguration.DEFAULT_SOCKET_TIMEOUT);
|
||||
}
|
||||
|
||||
|
@ -110,7 +110,7 @@ public class AwsS3ServiceImplTests extends ESTestCase {
|
|||
.put("s3.client.default.proxy.port", 8080)
|
||||
.put("s3.client.default.read_timeout", "10s")
|
||||
.build();
|
||||
launchAWSConfigurationTest(settings, Settings.EMPTY, Protocol.HTTP, "aws_proxy_host", 8080, "aws_proxy_username",
|
||||
launchAWSConfigurationTest(settings, Protocol.HTTP, "aws_proxy_host", 8080, "aws_proxy_username",
|
||||
"aws_proxy_password", 3, ClientConfiguration.DEFAULT_THROTTLE_RETRIES, 10000);
|
||||
}
|
||||
|
||||
|
@ -118,7 +118,7 @@ public class AwsS3ServiceImplTests extends ESTestCase {
|
|||
Settings settings = Settings.builder()
|
||||
.put("s3.client.default.max_retries", 5)
|
||||
.build();
|
||||
launchAWSConfigurationTest(settings, Settings.EMPTY, Protocol.HTTPS, null, -1, null,
|
||||
launchAWSConfigurationTest(settings, Protocol.HTTPS, null, -1, null,
|
||||
null, 5, ClientConfiguration.DEFAULT_THROTTLE_RETRIES, 50000);
|
||||
}
|
||||
|
||||
|
@ -126,22 +126,21 @@ public class AwsS3ServiceImplTests extends ESTestCase {
|
|||
final boolean throttling = randomBoolean();
|
||||
|
||||
Settings settings = Settings.builder().put("s3.client.default.use_throttle_retries", throttling).build();
|
||||
launchAWSConfigurationTest(settings, Settings.EMPTY, Protocol.HTTPS, null, -1, null, null, 3, throttling, 50000);
|
||||
launchAWSConfigurationTest(settings, Protocol.HTTPS, null, -1, null, null, 3, throttling, 50000);
|
||||
}
|
||||
|
||||
private void launchAWSConfigurationTest(Settings settings,
|
||||
Settings singleRepositorySettings,
|
||||
Protocol expectedProtocol,
|
||||
String expectedProxyHost,
|
||||
int expectedProxyPort,
|
||||
String expectedProxyUsername,
|
||||
String expectedProxyPassword,
|
||||
Integer expectedMaxRetries,
|
||||
boolean expectedUseThrottleRetries,
|
||||
int expectedReadTimeout) {
|
||||
Protocol expectedProtocol,
|
||||
String expectedProxyHost,
|
||||
int expectedProxyPort,
|
||||
String expectedProxyUsername,
|
||||
String expectedProxyPassword,
|
||||
Integer expectedMaxRetries,
|
||||
boolean expectedUseThrottleRetries,
|
||||
int expectedReadTimeout) {
|
||||
|
||||
S3ClientSettings clientSettings = S3ClientSettings.getClientSettings(settings, "default");
|
||||
ClientConfiguration configuration = InternalAwsS3Service.buildConfiguration(clientSettings, singleRepositorySettings);
|
||||
ClientConfiguration configuration = InternalAwsS3Service.buildConfiguration(clientSettings);
|
||||
|
||||
assertThat(configuration.getResponseMetadataCacheSize(), is(0));
|
||||
assertThat(configuration.getProtocol(), is(expectedProtocol));
|
||||
|
|
|
@ -7,6 +7,8 @@ setup:
|
|||
mappings:
|
||||
doc:
|
||||
properties:
|
||||
date:
|
||||
type: date
|
||||
keyword:
|
||||
type: keyword
|
||||
long:
|
||||
|
@ -40,6 +42,20 @@ setup:
|
|||
id: 4
|
||||
body: { "keyword": "bar", "long": [1000, 0] }
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: test
|
||||
type: doc
|
||||
id: 5
|
||||
body: { "date": "2017-10-20T03:08:45" }
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: test
|
||||
type: doc
|
||||
id: 6
|
||||
body: { "date": "2017-10-21T07:00:00" }
|
||||
|
||||
- do:
|
||||
indices.refresh:
|
||||
index: [test]
|
||||
|
@ -66,7 +82,7 @@ setup:
|
|||
}
|
||||
]
|
||||
|
||||
- match: {hits.total: 4}
|
||||
- match: {hits.total: 6}
|
||||
- length: { aggregations.test.buckets: 2 }
|
||||
- match: { aggregations.test.buckets.0.key.kw: "bar" }
|
||||
- match: { aggregations.test.buckets.0.doc_count: 3 }
|
||||
|
@ -104,7 +120,7 @@ setup:
|
|||
}
|
||||
]
|
||||
|
||||
- match: {hits.total: 4}
|
||||
- match: {hits.total: 6}
|
||||
- length: { aggregations.test.buckets: 5 }
|
||||
- match: { aggregations.test.buckets.0.key.long: 0}
|
||||
- match: { aggregations.test.buckets.0.key.kw: "bar" }
|
||||
|
@ -154,7 +170,7 @@ setup:
|
|||
]
|
||||
after: { "long": 20, "kw": "foo" }
|
||||
|
||||
- match: {hits.total: 4}
|
||||
- match: {hits.total: 6}
|
||||
- length: { aggregations.test.buckets: 2 }
|
||||
- match: { aggregations.test.buckets.0.key.long: 100 }
|
||||
- match: { aggregations.test.buckets.0.key.kw: "bar" }
|
||||
|
@ -188,7 +204,7 @@ setup:
|
|||
]
|
||||
after: { "kw": "delta" }
|
||||
|
||||
- match: {hits.total: 4}
|
||||
- match: {hits.total: 6}
|
||||
- length: { aggregations.test.buckets: 1 }
|
||||
- match: { aggregations.test.buckets.0.key.kw: "foo" }
|
||||
- match: { aggregations.test.buckets.0.doc_count: 2 }
|
||||
|
@ -220,3 +236,62 @@ setup:
|
|||
}
|
||||
}
|
||||
]
|
||||
|
||||
---
|
||||
"Composite aggregation with format":
|
||||
- skip:
|
||||
version: " - 6.99.99"
|
||||
reason: this uses a new option (format) added in 7.0.0
|
||||
|
||||
- do:
|
||||
search:
|
||||
index: test
|
||||
body:
|
||||
aggregations:
|
||||
test:
|
||||
composite:
|
||||
sources: [
|
||||
{
|
||||
"date": {
|
||||
"date_histogram": {
|
||||
"field": "date",
|
||||
"interval": "1d",
|
||||
"format": "yyyy-MM-dd"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
- match: {hits.total: 6}
|
||||
- length: { aggregations.test.buckets: 2 }
|
||||
- match: { aggregations.test.buckets.0.key.date: "2017-10-20" }
|
||||
- match: { aggregations.test.buckets.0.doc_count: 1 }
|
||||
- match: { aggregations.test.buckets.1.key.date: "2017-10-21" }
|
||||
- match: { aggregations.test.buckets.1.doc_count: 1 }
|
||||
|
||||
- do:
|
||||
search:
|
||||
index: test
|
||||
body:
|
||||
aggregations:
|
||||
test:
|
||||
composite:
|
||||
after: {
|
||||
date: "2017-10-20"
|
||||
}
|
||||
sources: [
|
||||
{
|
||||
"date": {
|
||||
"date_histogram": {
|
||||
"field": "date",
|
||||
"interval": "1d",
|
||||
"format": "yyyy-MM-dd"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
- match: {hits.total: 6}
|
||||
- length: { aggregations.test.buckets: 1 }
|
||||
- match: { aggregations.test.buckets.0.key.date: "2017-10-21" }
|
||||
- match: { aggregations.test.buckets.0.doc_count: 1 }
|
||||
|
|
|
@ -32,6 +32,7 @@ import org.elasticsearch.common.bytes.BytesArray;
|
|||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
|
@ -57,7 +58,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError;
|
|||
* @see org.elasticsearch.client.IndicesAdminClient#putMapping(PutMappingRequest)
|
||||
* @see PutMappingResponse
|
||||
*/
|
||||
public class PutMappingRequest extends AcknowledgedRequest<PutMappingRequest> implements IndicesRequest.Replaceable {
|
||||
public class PutMappingRequest extends AcknowledgedRequest<PutMappingRequest> implements IndicesRequest.Replaceable, ToXContentObject {
|
||||
|
||||
private static ObjectHashSet<String> RESERVED_FIELDS = ObjectHashSet.from(
|
||||
"_uid", "_id", "_type", "_source", "_all", "_analyzer", "_parent", "_routing", "_index",
|
||||
|
@ -318,4 +319,14 @@ public class PutMappingRequest extends AcknowledgedRequest<PutMappingRequest> im
|
|||
}
|
||||
out.writeOptionalWriteable(concreteIndex);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
if (source != null) {
|
||||
builder.rawValue(new BytesArray(source), XContentType.JSON);
|
||||
} else {
|
||||
builder.startObject().endObject();
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -22,13 +22,24 @@ package org.elasticsearch.action.admin.indices.mapping.put;
|
|||
import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ToXContentObject;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* The response of put mapping operation.
|
||||
*/
|
||||
public class PutMappingResponse extends AcknowledgedResponse {
|
||||
public class PutMappingResponse extends AcknowledgedResponse implements ToXContentObject {
|
||||
|
||||
private static final ConstructingObjectParser<PutMappingResponse, Void> PARSER = new ConstructingObjectParser<>("put_mapping",
|
||||
true, args -> new PutMappingResponse((boolean) args[0]));
|
||||
|
||||
static {
|
||||
declareAcknowledgedField(PARSER);
|
||||
}
|
||||
|
||||
protected PutMappingResponse() {
|
||||
|
||||
|
@ -49,4 +60,16 @@ public class PutMappingResponse extends AcknowledgedResponse {
|
|||
super.writeTo(out);
|
||||
writeAcknowledged(out);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
addAcknowledgedField(builder);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
public static PutMappingResponse fromXContent(XContentParser parser) throws IOException {
|
||||
return PARSER.apply(parser, null);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -122,14 +122,14 @@ abstract class AbstractSearchAsyncAction<Result extends SearchPhaseResult> exten
|
|||
* at least one successful operation left and if so we move to the next phase. If not we immediately fail the
|
||||
* search phase as "all shards failed"*/
|
||||
if (successfulOps.get() == 0) { // we have 0 successful results that means we shortcut stuff and return a failure
|
||||
final ShardOperationFailedException[] shardSearchFailures = ExceptionsHelper.groupBy(buildShardFailures());
|
||||
Throwable cause = shardSearchFailures.length == 0 ? null :
|
||||
ElasticsearchException.guessRootCauses(shardSearchFailures[0].getCause())[0];
|
||||
if (logger.isDebugEnabled()) {
|
||||
final ShardOperationFailedException[] shardSearchFailures = ExceptionsHelper.groupBy(buildShardFailures());
|
||||
Throwable cause = shardSearchFailures.length == 0 ? null :
|
||||
ElasticsearchException.guessRootCauses(shardSearchFailures[0].getCause())[0];
|
||||
logger.debug((Supplier<?>) () -> new ParameterizedMessage("All shards failed for phase: [{}]", getName()),
|
||||
cause);
|
||||
}
|
||||
onPhaseFailure(currentPhase, "all shards failed", null);
|
||||
onPhaseFailure(currentPhase, "all shards failed", cause);
|
||||
} else {
|
||||
if (logger.isTraceEnabled()) {
|
||||
final String resultsFrom = results.getSuccessfulResults()
|
||||
|
|
|
@ -276,11 +276,8 @@ public class MetaData implements Iterable<IndexMetaData>, Diffable<MetaData>, To
|
|||
if (!filteredValues.isEmpty()) {
|
||||
// Make the list order deterministic
|
||||
CollectionUtil.timSort(filteredValues, Comparator.comparing(AliasMetaData::alias));
|
||||
mapBuilder.put(index, Collections.unmodifiableList(filteredValues));
|
||||
} else if (matchAllAliases) {
|
||||
// in case all aliases are requested then it is desired to return the concrete index with no aliases (#25114):
|
||||
mapBuilder.put(index, Collections.emptyList());
|
||||
}
|
||||
mapBuilder.put(index, Collections.unmodifiableList(filteredValues));
|
||||
}
|
||||
return mapBuilder.build();
|
||||
}
|
||||
|
|
|
@ -597,7 +597,7 @@ public class Setting<T> implements ToXContentObject {
|
|||
|
||||
@Override
|
||||
public boolean hasChanged(Settings current, Settings previous) {
|
||||
return Stream.concat(matchStream(current), matchStream(previous)).findAny().isPresent();
|
||||
return current.filter(k -> match(k)).equals(previous.filter(k -> match(k))) == false;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -612,7 +612,7 @@ public class Setting<T> implements ToXContentObject {
|
|||
if (updater.hasChanged(current, previous)) {
|
||||
// only the ones that have changed otherwise we might get too many updates
|
||||
// the hasChanged above checks only if there are any changes
|
||||
T value = updater.getValue(current, previous);
|
||||
T value = updater.getValue(current, previous);
|
||||
if ((omitDefaults && value.equals(concreteSetting.getDefault(current))) == false) {
|
||||
result.put(namespace, value);
|
||||
}
|
||||
|
|
|
@ -19,16 +19,20 @@
|
|||
|
||||
package org.elasticsearch.common.util;
|
||||
|
||||
import java.nio.file.Path;
|
||||
import java.util.AbstractList;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Comparator;
|
||||
import java.util.IdentityHashMap;
|
||||
import java.util.LinkedList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.RandomAccess;
|
||||
import java.util.Set;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectArrayList;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
|
@ -221,6 +225,40 @@ public class CollectionUtils {
|
|||
return ints.stream().mapToInt(s -> s).toArray();
|
||||
}
|
||||
|
||||
public static void ensureNoSelfReferences(Object value) {
|
||||
Iterable<?> it = convert(value);
|
||||
if (it != null) {
|
||||
ensureNoSelfReferences(it, value, Collections.newSetFromMap(new IdentityHashMap<>()));
|
||||
}
|
||||
}
|
||||
|
||||
private static Iterable<?> convert(Object value) {
|
||||
if (value == null) {
|
||||
return null;
|
||||
}
|
||||
if (value instanceof Map) {
|
||||
return ((Map<?,?>) value).values();
|
||||
} else if ((value instanceof Iterable) && (value instanceof Path == false)) {
|
||||
return (Iterable<?>) value;
|
||||
} else if (value instanceof Object[]) {
|
||||
return Arrays.asList((Object[]) value);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private static void ensureNoSelfReferences(final Iterable<?> value, Object originalReference, final Set<Object> ancestors) {
|
||||
if (value != null) {
|
||||
if (ancestors.add(originalReference) == false) {
|
||||
throw new IllegalArgumentException("Iterable object is self-referencing itself");
|
||||
}
|
||||
for (Object o : value) {
|
||||
ensureNoSelfReferences(convert(o), o, ancestors);
|
||||
}
|
||||
ancestors.remove(originalReference);
|
||||
}
|
||||
}
|
||||
|
||||
private static class RotatedList<T> extends AbstractList<T> implements RandomAccess {
|
||||
|
||||
private final List<T> in;
|
||||
|
|
|
@ -28,6 +28,7 @@ import org.elasticsearch.common.lease.Releasable;
|
|||
import org.elasticsearch.common.text.Text;
|
||||
import org.elasticsearch.common.unit.ByteSizeValue;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.util.CollectionUtils;
|
||||
import org.joda.time.DateTimeZone;
|
||||
import org.joda.time.ReadableInstant;
|
||||
import org.joda.time.format.DateTimeFormatter;
|
||||
|
@ -43,7 +44,6 @@ import java.util.Calendar;
|
|||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.IdentityHashMap;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
|
@ -780,7 +780,6 @@ public final class XContentBuilder implements Releasable, Flushable {
|
|||
if (values == null) {
|
||||
return nullValue();
|
||||
}
|
||||
|
||||
return value(Arrays.asList(values), ensureNoSelfReferences);
|
||||
}
|
||||
|
||||
|
@ -865,7 +864,7 @@ public final class XContentBuilder implements Releasable, Flushable {
|
|||
// checks that the map does not contain references to itself because
|
||||
// iterating over map entries will cause a stackoverflow error
|
||||
if (ensureNoSelfReferences) {
|
||||
ensureNoSelfReferences(values);
|
||||
CollectionUtils.ensureNoSelfReferences(values);
|
||||
}
|
||||
|
||||
startObject();
|
||||
|
@ -894,9 +893,8 @@ public final class XContentBuilder implements Releasable, Flushable {
|
|||
// checks that the iterable does not contain references to itself because
|
||||
// iterating over entries will cause a stackoverflow error
|
||||
if (ensureNoSelfReferences) {
|
||||
ensureNoSelfReferences(values);
|
||||
CollectionUtils.ensureNoSelfReferences(values);
|
||||
}
|
||||
|
||||
startArray();
|
||||
for (Object value : values) {
|
||||
// pass ensureNoSelfReferences=false as we already performed the check at a higher level
|
||||
|
@ -1067,32 +1065,4 @@ public final class XContentBuilder implements Releasable, Flushable {
|
|||
throw new IllegalArgumentException(message);
|
||||
}
|
||||
}
|
||||
|
||||
static void ensureNoSelfReferences(Object value) {
|
||||
ensureNoSelfReferences(value, Collections.newSetFromMap(new IdentityHashMap<>()));
|
||||
}
|
||||
|
||||
private static void ensureNoSelfReferences(final Object value, final Set<Object> ancestors) {
|
||||
if (value != null) {
|
||||
|
||||
Iterable<?> it;
|
||||
if (value instanceof Map) {
|
||||
it = ((Map<?,?>) value).values();
|
||||
} else if ((value instanceof Iterable) && (value instanceof Path == false)) {
|
||||
it = (Iterable<?>) value;
|
||||
} else if (value instanceof Object[]) {
|
||||
it = Arrays.asList((Object[]) value);
|
||||
} else {
|
||||
return;
|
||||
}
|
||||
|
||||
if (ancestors.add(value) == false) {
|
||||
throw new IllegalArgumentException("Object has already been built and is self-referencing itself");
|
||||
}
|
||||
for (Object o : it) {
|
||||
ensureNoSelfReferences(o, ancestors);
|
||||
}
|
||||
ancestors.remove(value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -147,17 +147,15 @@ public class CompositeAggregationBuilder extends AbstractAggregationBuilder<Comp
|
|||
Sort sort = indexSortConfig.buildIndexSort(shardContext::fieldMapper, shardContext::getForField);
|
||||
System.arraycopy(sort.getSort(), 0, sortFields, 0, sortFields.length);
|
||||
}
|
||||
List<String> sourceNames = new ArrayList<>();
|
||||
for (int i = 0; i < configs.length; i++) {
|
||||
configs[i] = sources.get(i).build(context, i, configs.length, sortFields[i]);
|
||||
sourceNames.add(sources.get(i).name());
|
||||
if (configs[i].valuesSource().needsScores()) {
|
||||
throw new IllegalArgumentException("[sources] cannot access _score");
|
||||
}
|
||||
}
|
||||
final CompositeKey afterKey;
|
||||
if (after != null) {
|
||||
if (after.size() != sources.size()) {
|
||||
if (after.size() != configs.length) {
|
||||
throw new IllegalArgumentException("[after] has " + after.size() +
|
||||
" value(s) but [sources] has " + sources.size());
|
||||
}
|
||||
|
@ -179,7 +177,7 @@ public class CompositeAggregationBuilder extends AbstractAggregationBuilder<Comp
|
|||
} else {
|
||||
afterKey = null;
|
||||
}
|
||||
return new CompositeAggregationFactory(name, context, parent, subfactoriesBuilder, metaData, size, configs, sourceNames, afterKey);
|
||||
return new CompositeAggregationFactory(name, context, parent, subfactoriesBuilder, metaData, size, configs, afterKey);
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -32,17 +32,14 @@ import java.util.Map;
|
|||
class CompositeAggregationFactory extends AggregatorFactory<CompositeAggregationFactory> {
|
||||
private final int size;
|
||||
private final CompositeValuesSourceConfig[] sources;
|
||||
private final List<String> sourceNames;
|
||||
private final CompositeKey afterKey;
|
||||
|
||||
CompositeAggregationFactory(String name, SearchContext context, AggregatorFactory<?> parent,
|
||||
AggregatorFactories.Builder subFactoriesBuilder, Map<String, Object> metaData,
|
||||
int size, CompositeValuesSourceConfig[] sources,
|
||||
List<String> sourceNames, CompositeKey afterKey) throws IOException {
|
||||
int size, CompositeValuesSourceConfig[] sources, CompositeKey afterKey) throws IOException {
|
||||
super(name, context, parent, subFactoriesBuilder, metaData);
|
||||
this.size = size;
|
||||
this.sources = sources;
|
||||
this.sourceNames = sourceNames;
|
||||
this.afterKey = afterKey;
|
||||
}
|
||||
|
||||
|
@ -50,6 +47,6 @@ class CompositeAggregationFactory extends AggregatorFactory<CompositeAggregation
|
|||
protected Aggregator createInternal(Aggregator parent, boolean collectsFromSingleBucket,
|
||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
|
||||
return new CompositeAggregator(name, factories, context, parent, pipelineAggregators, metaData,
|
||||
size, sources, sourceNames, afterKey);
|
||||
size, sources, afterKey);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -27,6 +27,7 @@ import org.apache.lucene.search.Query;
|
|||
import org.apache.lucene.search.Scorer;
|
||||
import org.apache.lucene.search.Weight;
|
||||
import org.apache.lucene.util.RoaringDocIdSet;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.Aggregator;
|
||||
import org.elasticsearch.search.aggregations.AggregatorFactories;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
|
@ -43,11 +44,13 @@ import java.util.Collections;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.TreeMap;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
final class CompositeAggregator extends BucketsAggregator {
|
||||
private final int size;
|
||||
private final CompositeValuesSourceConfig[] sources;
|
||||
private final List<String> sourceNames;
|
||||
private final List<DocValueFormat> formats;
|
||||
private final boolean canEarlyTerminate;
|
||||
|
||||
private final TreeMap<Integer, Integer> keys;
|
||||
|
@ -59,12 +62,12 @@ final class CompositeAggregator extends BucketsAggregator {
|
|||
|
||||
CompositeAggregator(String name, AggregatorFactories factories, SearchContext context, Aggregator parent,
|
||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData,
|
||||
int size, CompositeValuesSourceConfig[] sources, List<String> sourceNames,
|
||||
CompositeKey rawAfterKey) throws IOException {
|
||||
int size, CompositeValuesSourceConfig[] sources, CompositeKey rawAfterKey) throws IOException {
|
||||
super(name, factories, context, parent, pipelineAggregators, metaData);
|
||||
this.size = size;
|
||||
this.sources = sources;
|
||||
this.sourceNames = sourceNames;
|
||||
this.sourceNames = Arrays.stream(sources).map(CompositeValuesSourceConfig::name).collect(Collectors.toList());
|
||||
this.formats = Arrays.stream(sources).map(CompositeValuesSourceConfig::format).collect(Collectors.toList());
|
||||
// we use slot 0 to fill the current document (size+1).
|
||||
this.array = new CompositeValuesComparator(context.searcher().getIndexReader(), sources, size+1);
|
||||
if (rawAfterKey != null) {
|
||||
|
@ -131,15 +134,17 @@ final class CompositeAggregator extends BucketsAggregator {
|
|||
CompositeKey key = array.toCompositeKey(slot);
|
||||
InternalAggregations aggs = bucketAggregations(slot);
|
||||
int docCount = bucketDocCount(slot);
|
||||
buckets[pos++] = new InternalComposite.InternalBucket(sourceNames, key, reverseMuls, docCount, aggs);
|
||||
buckets[pos++] = new InternalComposite.InternalBucket(sourceNames, formats, key, reverseMuls, docCount, aggs);
|
||||
}
|
||||
return new InternalComposite(name, size, sourceNames, Arrays.asList(buckets), reverseMuls, pipelineAggregators(), metaData());
|
||||
return new InternalComposite(name, size, sourceNames, formats, Arrays.asList(buckets), reverseMuls,
|
||||
pipelineAggregators(), metaData());
|
||||
}
|
||||
|
||||
@Override
|
||||
public InternalAggregation buildEmptyAggregation() {
|
||||
final int[] reverseMuls = getReverseMuls();
|
||||
return new InternalComposite(name, size, sourceNames, Collections.emptyList(), reverseMuls, pipelineAggregators(), metaData());
|
||||
return new InternalComposite(name, size, sourceNames, formats, Collections.emptyList(), reverseMuls,
|
||||
pipelineAggregators(), metaData());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -56,7 +56,7 @@ final class CompositeValuesComparator {
|
|||
if (vs.isFloatingPoint()) {
|
||||
arrays[i] = CompositeValuesSource.wrapDouble(vs, size, reverseMul);
|
||||
} else {
|
||||
arrays[i] = CompositeValuesSource.wrapLong(vs, size, reverseMul);
|
||||
arrays[i] = CompositeValuesSource.wrapLong(vs, sources[i].format(), size, reverseMul);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,8 +23,10 @@ import org.apache.lucene.index.SortedNumericDocValues;
|
|||
import org.apache.lucene.index.SortedSetDocValues;
|
||||
import org.apache.lucene.search.LeafCollector;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.joda.FormatDateTimeFormatter;
|
||||
import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
|
||||
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
|
||||
|
@ -96,8 +98,9 @@ abstract class CompositeValuesSource<VS extends ValuesSource, T extends Comparab
|
|||
/**
|
||||
* Creates a {@link CompositeValuesSource} that generates long values.
|
||||
*/
|
||||
static CompositeValuesSource<ValuesSource.Numeric, Long> wrapLong(ValuesSource.Numeric vs, int size, int reverseMul) {
|
||||
return new LongValuesSource(vs, size, reverseMul);
|
||||
static CompositeValuesSource<ValuesSource.Numeric, Long> wrapLong(ValuesSource.Numeric vs, DocValueFormat format,
|
||||
int size, int reverseMul) {
|
||||
return new LongValuesSource(vs, format, size, reverseMul);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -273,9 +276,12 @@ abstract class CompositeValuesSource<VS extends ValuesSource, T extends Comparab
|
|||
*/
|
||||
private static class LongValuesSource extends CompositeValuesSource<ValuesSource.Numeric, Long> {
|
||||
private final long[] values;
|
||||
// handles "format" for date histogram source
|
||||
private final DocValueFormat format;
|
||||
|
||||
LongValuesSource(ValuesSource.Numeric vs, int size, int reverseMul) {
|
||||
LongValuesSource(ValuesSource.Numeric vs, DocValueFormat format, int size, int reverseMul) {
|
||||
super(vs, size, reverseMul);
|
||||
this.format = format;
|
||||
this.values = new long[size];
|
||||
}
|
||||
|
||||
|
@ -304,7 +310,11 @@ abstract class CompositeValuesSource<VS extends ValuesSource, T extends Comparab
|
|||
if (value instanceof Number) {
|
||||
topValue = ((Number) value).longValue();
|
||||
} else {
|
||||
topValue = Long.parseLong(value.toString());
|
||||
// for date histogram source with "format", the after value is formatted
|
||||
// as a string so we need to retrieve the original value in milliseconds.
|
||||
topValue = format.parseLong(value.toString(), false, () -> {
|
||||
throw new IllegalArgumentException("now() is not supported in [after] key");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -25,6 +25,7 @@ import org.apache.lucene.index.LeafReaderContext;
|
|||
import org.apache.lucene.index.SortedNumericDocValues;
|
||||
import org.apache.lucene.index.SortedSetDocValues;
|
||||
import org.apache.lucene.search.SortField;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
|
@ -51,6 +52,7 @@ public abstract class CompositeValuesSourceBuilder<AB extends CompositeValuesSou
|
|||
private ValueType valueType = null;
|
||||
private Object missing = null;
|
||||
private SortOrder order = SortOrder.ASC;
|
||||
private String format = null;
|
||||
|
||||
CompositeValuesSourceBuilder(String name) {
|
||||
this(name, null);
|
||||
|
@ -72,6 +74,11 @@ public abstract class CompositeValuesSourceBuilder<AB extends CompositeValuesSou
|
|||
}
|
||||
this.missing = in.readGenericValue();
|
||||
this.order = SortOrder.readFromStream(in);
|
||||
if (in.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) {
|
||||
this.format = in.readOptionalString();
|
||||
} else {
|
||||
this.format = null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -90,6 +97,9 @@ public abstract class CompositeValuesSourceBuilder<AB extends CompositeValuesSou
|
|||
}
|
||||
out.writeGenericValue(missing);
|
||||
order.writeTo(out);
|
||||
if (out.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) {
|
||||
out.writeOptionalString(format);
|
||||
}
|
||||
innerWriteTo(out);
|
||||
}
|
||||
|
||||
|
@ -112,6 +122,9 @@ public abstract class CompositeValuesSourceBuilder<AB extends CompositeValuesSou
|
|||
if (valueType != null) {
|
||||
builder.field("value_type", valueType.getPreferredName());
|
||||
}
|
||||
if (format != null) {
|
||||
builder.field("format", format);
|
||||
}
|
||||
builder.field("order", order);
|
||||
doXContentBody(builder, params);
|
||||
builder.endObject();
|
||||
|
@ -120,7 +133,7 @@ public abstract class CompositeValuesSourceBuilder<AB extends CompositeValuesSou
|
|||
|
||||
@Override
|
||||
public final int hashCode() {
|
||||
return Objects.hash(field, missing, script, valueType, order, innerHashCode());
|
||||
return Objects.hash(field, missing, script, valueType, order, format, innerHashCode());
|
||||
}
|
||||
|
||||
protected abstract int innerHashCode();
|
||||
|
@ -137,6 +150,7 @@ public abstract class CompositeValuesSourceBuilder<AB extends CompositeValuesSou
|
|||
Objects.equals(valueType, that.valueType()) &&
|
||||
Objects.equals(missing, that.missing()) &&
|
||||
Objects.equals(order, that.order()) &&
|
||||
Objects.equals(format, that.format()) &&
|
||||
innerEquals(that);
|
||||
}
|
||||
|
||||
|
@ -254,6 +268,24 @@ public abstract class CompositeValuesSourceBuilder<AB extends CompositeValuesSou
|
|||
return order;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the format to use for the output of the aggregation.
|
||||
*/
|
||||
public AB format(String format) {
|
||||
if (format == null) {
|
||||
throw new IllegalArgumentException("[format] must not be null: [" + name + "]");
|
||||
}
|
||||
this.format = format;
|
||||
return (AB) this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the format to use for the output of the aggregation.
|
||||
*/
|
||||
public String format() {
|
||||
return format;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates a {@link CompositeValuesSourceConfig} for this source.
|
||||
*
|
||||
|
@ -271,7 +303,7 @@ public abstract class CompositeValuesSourceBuilder<AB extends CompositeValuesSou
|
|||
|
||||
public final CompositeValuesSourceConfig build(SearchContext context, int pos, int numPos, SortField sortField) throws IOException {
|
||||
ValuesSourceConfig<?> config = ValuesSourceConfig.resolve(context.getQueryShardContext(),
|
||||
valueType, field, script, missing, null, null);
|
||||
valueType, field, script, missing, null, format);
|
||||
return innerBuild(context, config, pos, numPos, sortField);
|
||||
}
|
||||
|
||||
|
|
|
@ -19,30 +19,47 @@
|
|||
|
||||
package org.elasticsearch.search.aggregations.bucket.composite;
|
||||
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.support.ValuesSource;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
|
||||
class CompositeValuesSourceConfig {
|
||||
private final String name;
|
||||
private final ValuesSource vs;
|
||||
private final DocValueFormat format;
|
||||
private final int reverseMul;
|
||||
private final boolean canEarlyTerminate;
|
||||
|
||||
CompositeValuesSourceConfig(String name, ValuesSource vs, SortOrder order, boolean canEarlyTerminate) {
|
||||
CompositeValuesSourceConfig(String name, ValuesSource vs, DocValueFormat format, SortOrder order, boolean canEarlyTerminate) {
|
||||
this.name = name;
|
||||
this.vs = vs;
|
||||
this.format = format;
|
||||
this.canEarlyTerminate = canEarlyTerminate;
|
||||
this.reverseMul = order == SortOrder.ASC ? 1 : -1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the name associated with this configuration.
|
||||
*/
|
||||
String name() {
|
||||
return name;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the {@link ValuesSource} for this configuration.
|
||||
*/
|
||||
ValuesSource valuesSource() {
|
||||
return vs;
|
||||
}
|
||||
|
||||
/**
|
||||
* The {@link DocValueFormat} to use for formatting the keys.
|
||||
* {@link DocValueFormat#RAW} means no formatting.
|
||||
*/
|
||||
DocValueFormat format() {
|
||||
return format;
|
||||
}
|
||||
|
||||
/**
|
||||
* The sort order for the values source (e.g. -1 for descending and 1 for ascending).
|
||||
*/
|
||||
|
@ -51,6 +68,9 @@ class CompositeValuesSourceConfig {
|
|||
return reverseMul;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns whether this {@link ValuesSource} is used to sort the index.
|
||||
*/
|
||||
boolean canEarlyTerminate() {
|
||||
return canEarlyTerminate;
|
||||
}
|
||||
|
|
|
@ -30,6 +30,8 @@ import org.elasticsearch.common.xcontent.ObjectParser;
|
|||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
|
||||
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
|
||||
import org.elasticsearch.search.aggregations.support.FieldContext;
|
||||
|
@ -46,8 +48,8 @@ import java.util.Objects;
|
|||
import static org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder.DATE_FIELD_UNITS;
|
||||
|
||||
/**
|
||||
* A {@link CompositeValuesSourceBuilder} that that builds a {@link RoundingValuesSource} from a {@link Script} or
|
||||
* a field name.
|
||||
* A {@link CompositeValuesSourceBuilder} that builds a {@link RoundingValuesSource} from a {@link Script} or
|
||||
* a field name using the provided interval.
|
||||
*/
|
||||
public class DateHistogramValuesSourceBuilder extends CompositeValuesSourceBuilder<DateHistogramValuesSourceBuilder> {
|
||||
static final String TYPE = "date_histogram";
|
||||
|
@ -55,6 +57,7 @@ public class DateHistogramValuesSourceBuilder extends CompositeValuesSourceBuild
|
|||
private static final ObjectParser<DateHistogramValuesSourceBuilder, Void> PARSER;
|
||||
static {
|
||||
PARSER = new ObjectParser<>(DateHistogramValuesSourceBuilder.TYPE);
|
||||
PARSER.declareString(DateHistogramValuesSourceBuilder::format, new ParseField("format"));
|
||||
PARSER.declareField((histogram, interval) -> {
|
||||
if (interval instanceof Long) {
|
||||
histogram.interval((long) interval);
|
||||
|
@ -235,7 +238,11 @@ public class DateHistogramValuesSourceBuilder extends CompositeValuesSourceBuild
|
|||
canEarlyTerminate = checkCanEarlyTerminate(context.searcher().getIndexReader(),
|
||||
fieldContext.field(), order() == SortOrder.ASC ? false : true, sortField);
|
||||
}
|
||||
return new CompositeValuesSourceConfig(name, vs, order(), canEarlyTerminate);
|
||||
// dates are returned as timestamp in milliseconds-since-the-epoch unless a specific date format
|
||||
// is specified in the builder.
|
||||
final DocValueFormat docValueFormat = format() == null ? DocValueFormat.RAW : config.format();
|
||||
return new CompositeValuesSourceConfig(name, vs, docValueFormat,
|
||||
order(), canEarlyTerminate);
|
||||
} else {
|
||||
throw new IllegalArgumentException("invalid source, expected numeric, got " + orig.getClass().getSimpleName());
|
||||
}
|
||||
|
|
|
@ -37,7 +37,7 @@ import java.io.IOException;
|
|||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* A {@link CompositeValuesSourceBuilder} that that builds a {@link HistogramValuesSource} from another numeric values source
|
||||
* A {@link CompositeValuesSourceBuilder} that builds a {@link HistogramValuesSource} from another numeric values source
|
||||
* using the provided interval.
|
||||
*/
|
||||
public class HistogramValuesSourceBuilder extends CompositeValuesSourceBuilder<HistogramValuesSourceBuilder> {
|
||||
|
@ -128,7 +128,7 @@ public class HistogramValuesSourceBuilder extends CompositeValuesSourceBuilder<H
|
|||
canEarlyTerminate = checkCanEarlyTerminate(context.searcher().getIndexReader(),
|
||||
fieldContext.field(), order() == SortOrder.ASC ? false : true, sortField);
|
||||
}
|
||||
return new CompositeValuesSourceConfig(name, vs, order(), canEarlyTerminate);
|
||||
return new CompositeValuesSourceConfig(name, vs, config.format(), order(), canEarlyTerminate);
|
||||
} else {
|
||||
throw new IllegalArgumentException("invalid source, expected numeric, got " + orig.getClass().getSimpleName());
|
||||
}
|
||||
|
|
|
@ -20,9 +20,11 @@
|
|||
package org.elasticsearch.search.aggregations.bucket.composite;
|
||||
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.Aggregations;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregations;
|
||||
|
@ -35,6 +37,7 @@ import java.util.AbstractMap;
|
|||
import java.util.AbstractSet;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
@ -49,11 +52,14 @@ public class InternalComposite
|
|||
private final List<InternalBucket> buckets;
|
||||
private final int[] reverseMuls;
|
||||
private final List<String> sourceNames;
|
||||
private final List<DocValueFormat> formats;
|
||||
|
||||
InternalComposite(String name, int size, List<String> sourceNames, List<InternalBucket> buckets, int[] reverseMuls,
|
||||
InternalComposite(String name, int size, List<String> sourceNames, List<DocValueFormat> formats,
|
||||
List<InternalBucket> buckets, int[] reverseMuls,
|
||||
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
|
||||
super(name, pipelineAggregators, metaData);
|
||||
this.sourceNames = sourceNames;
|
||||
this.formats = formats;
|
||||
this.buckets = buckets;
|
||||
this.size = size;
|
||||
this.reverseMuls = reverseMuls;
|
||||
|
@ -63,14 +69,27 @@ public class InternalComposite
|
|||
super(in);
|
||||
this.size = in.readVInt();
|
||||
this.sourceNames = in.readList(StreamInput::readString);
|
||||
this.formats = new ArrayList<>(sourceNames.size());
|
||||
for (int i = 0; i < sourceNames.size(); i++) {
|
||||
if (in.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) {
|
||||
formats.add(in.readNamedWriteable(DocValueFormat.class));
|
||||
} else {
|
||||
formats.add(DocValueFormat.RAW);
|
||||
}
|
||||
}
|
||||
this.reverseMuls = in.readIntArray();
|
||||
this.buckets = in.readList((input) -> new InternalBucket(input, sourceNames, reverseMuls));
|
||||
this.buckets = in.readList((input) -> new InternalBucket(input, sourceNames, formats, reverseMuls));
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doWriteTo(StreamOutput out) throws IOException {
|
||||
out.writeVInt(size);
|
||||
out.writeStringList(sourceNames);
|
||||
if (out.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) {
|
||||
for (DocValueFormat format : formats) {
|
||||
out.writeNamedWriteable(format);
|
||||
}
|
||||
}
|
||||
out.writeIntArray(reverseMuls);
|
||||
out.writeList(buckets);
|
||||
}
|
||||
|
@ -87,12 +106,13 @@ public class InternalComposite
|
|||
|
||||
@Override
|
||||
public InternalComposite create(List<InternalBucket> buckets) {
|
||||
return new InternalComposite(name, size, sourceNames, buckets, reverseMuls, pipelineAggregators(), getMetaData());
|
||||
return new InternalComposite(name, size, sourceNames, formats, buckets, reverseMuls, pipelineAggregators(), getMetaData());
|
||||
}
|
||||
|
||||
@Override
|
||||
public InternalBucket createBucket(InternalAggregations aggregations, InternalBucket prototype) {
|
||||
return new InternalBucket(prototype.sourceNames, prototype.key, prototype.reverseMuls, prototype.docCount, aggregations);
|
||||
return new InternalBucket(prototype.sourceNames, prototype.formats, prototype.key, prototype.reverseMuls,
|
||||
prototype.docCount, aggregations);
|
||||
}
|
||||
|
||||
public int getSize() {
|
||||
|
@ -149,7 +169,7 @@ public class InternalComposite
|
|||
reduceContext.consumeBucketsAndMaybeBreak(1);
|
||||
result.add(reduceBucket);
|
||||
}
|
||||
return new InternalComposite(name, size, sourceNames, result, reverseMuls, pipelineAggregators(), metaData);
|
||||
return new InternalComposite(name, size, sourceNames, formats, result, reverseMuls, pipelineAggregators(), metaData);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -191,18 +211,21 @@ public class InternalComposite
|
|||
private final InternalAggregations aggregations;
|
||||
private final transient int[] reverseMuls;
|
||||
private final transient List<String> sourceNames;
|
||||
private final transient List<DocValueFormat> formats;
|
||||
|
||||
|
||||
InternalBucket(List<String> sourceNames, CompositeKey key, int[] reverseMuls, long docCount, InternalAggregations aggregations) {
|
||||
InternalBucket(List<String> sourceNames, List<DocValueFormat> formats, CompositeKey key, int[] reverseMuls, long docCount,
|
||||
InternalAggregations aggregations) {
|
||||
this.key = key;
|
||||
this.docCount = docCount;
|
||||
this.aggregations = aggregations;
|
||||
this.reverseMuls = reverseMuls;
|
||||
this.sourceNames = sourceNames;
|
||||
this.formats = formats;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
InternalBucket(StreamInput in, List<String> sourceNames, int[] reverseMuls) throws IOException {
|
||||
InternalBucket(StreamInput in, List<String> sourceNames, List<DocValueFormat> formats, int[] reverseMuls) throws IOException {
|
||||
final Comparable<?>[] values = new Comparable<?>[in.readVInt()];
|
||||
for (int i = 0; i < values.length; i++) {
|
||||
values[i] = (Comparable<?>) in.readGenericValue();
|
||||
|
@ -212,6 +235,7 @@ public class InternalComposite
|
|||
this.aggregations = InternalAggregations.readAggregations(in);
|
||||
this.reverseMuls = reverseMuls;
|
||||
this.sourceNames = sourceNames;
|
||||
this.formats = formats;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -242,9 +266,11 @@ public class InternalComposite
|
|||
|
||||
@Override
|
||||
public Map<String, Object> getKey() {
|
||||
return new ArrayMap(sourceNames, key.values());
|
||||
// returns the formatted key in a map
|
||||
return new ArrayMap(sourceNames, formats, key.values());
|
||||
}
|
||||
|
||||
// get the raw key (without formatting to preserve the natural order).
|
||||
// visible for testing
|
||||
CompositeKey getRawKey() {
|
||||
return key;
|
||||
|
@ -260,7 +286,7 @@ public class InternalComposite
|
|||
}
|
||||
builder.append(sourceNames.get(i));
|
||||
builder.append('=');
|
||||
builder.append(formatObject(key.get(i)));
|
||||
builder.append(formatObject(key.get(i), formats.get(i)));
|
||||
}
|
||||
builder.append('}');
|
||||
return builder.toString();
|
||||
|
@ -284,7 +310,7 @@ public class InternalComposite
|
|||
aggregations.add(bucket.aggregations);
|
||||
}
|
||||
InternalAggregations aggs = InternalAggregations.reduce(aggregations, reduceContext);
|
||||
return new InternalBucket(sourceNames, key, reverseMuls, docCount, aggs);
|
||||
return new InternalBucket(sourceNames, formats, key, reverseMuls, docCount, aggs);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -303,26 +329,52 @@ public class InternalComposite
|
|||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
/**
|
||||
* See {@link CompositeAggregation#bucketToXContentFragment}
|
||||
* See {@link CompositeAggregation#bucketToXContent}
|
||||
*/
|
||||
throw new UnsupportedOperationException("not implemented");
|
||||
}
|
||||
}
|
||||
|
||||
static Object formatObject(Object obj) {
|
||||
if (obj instanceof BytesRef) {
|
||||
return ((BytesRef) obj).utf8ToString();
|
||||
/**
|
||||
* Format <code>obj</code> using the provided {@link DocValueFormat}.
|
||||
* If the format is equals to {@link DocValueFormat#RAW}, the object is returned as is
|
||||
* for numbers and a string for {@link BytesRef}s.
|
||||
*/
|
||||
static Object formatObject(Object obj, DocValueFormat format) {
|
||||
if (obj.getClass() == BytesRef.class) {
|
||||
BytesRef value = (BytesRef) obj;
|
||||
if (format == DocValueFormat.RAW) {
|
||||
return value.utf8ToString();
|
||||
} else {
|
||||
return format.format((BytesRef) obj);
|
||||
}
|
||||
} else if (obj.getClass() == Long.class) {
|
||||
Long value = (Long) obj;
|
||||
if (format == DocValueFormat.RAW) {
|
||||
return value;
|
||||
} else {
|
||||
return format.format(value);
|
||||
}
|
||||
} else if (obj.getClass() == Double.class) {
|
||||
Double value = (Double) obj;
|
||||
if (format == DocValueFormat.RAW) {
|
||||
return value;
|
||||
} else {
|
||||
return format.format((Double) obj);
|
||||
}
|
||||
}
|
||||
return obj;
|
||||
}
|
||||
|
||||
private static class ArrayMap extends AbstractMap<String, Object> {
|
||||
final List<String> keys;
|
||||
final List<DocValueFormat> formats;
|
||||
final Object[] values;
|
||||
|
||||
ArrayMap(List<String> keys, Object[] values) {
|
||||
assert keys.size() == values.length;
|
||||
ArrayMap(List<String> keys, List<DocValueFormat> formats, Object[] values) {
|
||||
assert keys.size() == values.length && keys.size() == formats.size();
|
||||
this.keys = keys;
|
||||
this.formats = formats;
|
||||
this.values = values;
|
||||
}
|
||||
|
||||
|
@ -335,7 +387,7 @@ public class InternalComposite
|
|||
public Object get(Object key) {
|
||||
for (int i = 0; i < keys.size(); i++) {
|
||||
if (key.equals(keys.get(i))) {
|
||||
return formatObject(values[i]);
|
||||
return formatObject(values[i], formats.get(i));
|
||||
}
|
||||
}
|
||||
return null;
|
||||
|
@ -356,7 +408,7 @@ public class InternalComposite
|
|||
@Override
|
||||
public Entry<String, Object> next() {
|
||||
SimpleEntry<String, Object> entry =
|
||||
new SimpleEntry<>(keys.get(pos), formatObject(values[pos]));
|
||||
new SimpleEntry<>(keys.get(pos), formatObject(values[pos], formats.get(pos)));
|
||||
++ pos;
|
||||
return entry;
|
||||
}
|
||||
|
|
|
@ -95,6 +95,6 @@ public class TermsValuesSourceBuilder extends CompositeValuesSourceBuilder<Terms
|
|||
canEarlyTerminate = checkCanEarlyTerminate(context.searcher().getIndexReader(),
|
||||
fieldContext.field(), order() == SortOrder.ASC ? false : true, sortField);
|
||||
}
|
||||
return new CompositeValuesSourceConfig(name, vs, order(), canEarlyTerminate);
|
||||
return new CompositeValuesSourceConfig(name, vs, config.format(), order(), canEarlyTerminate);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.elasticsearch.search.aggregations.metrics.scripted;
|
||||
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.elasticsearch.common.util.CollectionUtils;
|
||||
import org.elasticsearch.script.ExecutableScript;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.SearchScript;
|
||||
|
@ -77,6 +78,7 @@ public class ScriptedMetricAggregator extends MetricsAggregator {
|
|||
Object aggregation;
|
||||
if (combineScript != null) {
|
||||
aggregation = combineScript.run();
|
||||
CollectionUtils.ensureNoSelfReferences(aggregation);
|
||||
} else {
|
||||
aggregation = params.get("_agg");
|
||||
}
|
||||
|
|
|
@ -112,10 +112,11 @@ public class BucketScriptPipelineAggregator extends PipelineAggregator {
|
|||
} else {
|
||||
ExecutableScript executableScript = factory.newInstance(vars);
|
||||
Object returned = executableScript.run();
|
||||
// no need to check for self references since only numbers are valid
|
||||
if (returned == null) {
|
||||
newBuckets.add(bucket);
|
||||
} else {
|
||||
if (!(returned instanceof Number)) {
|
||||
if ((returned instanceof Number) == false) {
|
||||
throw new AggregationExecutionException("series_arithmetic script for reducer [" + name()
|
||||
+ "] must return a Number");
|
||||
}
|
||||
|
|
|
@ -30,6 +30,7 @@ import org.apache.lucene.search.Scorer;
|
|||
import org.apache.lucene.util.Bits;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.lucene.ScorerAware;
|
||||
import org.elasticsearch.common.util.CollectionUtils;
|
||||
import org.elasticsearch.index.fielddata.AbstractSortingNumericDocValues;
|
||||
import org.elasticsearch.index.fielddata.AtomicOrdinalsFieldData;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
|
@ -460,7 +461,9 @@ public abstract class ValuesSource {
|
|||
for (int i = 0; i < count; ++i) {
|
||||
final BytesRef value = bytesValues.nextValue();
|
||||
script.setNextAggregationValue(value.utf8ToString());
|
||||
values[i].copyChars(script.run().toString());
|
||||
Object run = script.run();
|
||||
CollectionUtils.ensureNoSelfReferences(run);
|
||||
values[i].copyChars(run.toString());
|
||||
}
|
||||
sort();
|
||||
return true;
|
||||
|
|
|
@ -20,6 +20,7 @@ package org.elasticsearch.search.aggregations.support.values;
|
|||
|
||||
import org.apache.lucene.search.Scorer;
|
||||
import org.elasticsearch.common.lucene.ScorerAware;
|
||||
import org.elasticsearch.common.util.CollectionUtils;
|
||||
import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
|
||||
import org.elasticsearch.index.fielddata.SortingBinaryDocValues;
|
||||
import org.elasticsearch.script.SearchScript;
|
||||
|
@ -44,6 +45,7 @@ public class ScriptBytesValues extends SortingBinaryDocValues implements ScorerA
|
|||
if (o == null) {
|
||||
values[i].clear();
|
||||
} else {
|
||||
CollectionUtils.ensureNoSelfReferences(o);
|
||||
values[i].copyChars(o.toString());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -22,6 +22,7 @@ import org.apache.lucene.index.IndexReader;
|
|||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.index.ReaderUtil;
|
||||
import org.elasticsearch.common.document.DocumentField;
|
||||
import org.elasticsearch.common.util.CollectionUtils;
|
||||
import org.elasticsearch.script.SearchScript;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.fetch.FetchSubPhase;
|
||||
|
@ -64,6 +65,7 @@ public final class ScriptFieldsFetchSubPhase implements FetchSubPhase {
|
|||
final Object value;
|
||||
try {
|
||||
value = leafScripts[i].run();
|
||||
CollectionUtils.ensureNoSelfReferences(value);
|
||||
} catch (RuntimeException e) {
|
||||
if (scriptFields.get(i).ignoreException()) {
|
||||
continue;
|
||||
|
|
|
@ -32,6 +32,7 @@ import org.elasticsearch.common.io.stream.StreamOutput;
|
|||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.util.CollectionUtils;
|
||||
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
|
||||
import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
@ -341,7 +342,9 @@ public class ScriptSortBuilder extends SortBuilder<ScriptSortBuilder> {
|
|||
}
|
||||
@Override
|
||||
public BytesRef binaryValue() {
|
||||
spare.copyChars(leafScript.run().toString());
|
||||
final Object run = leafScript.run();
|
||||
CollectionUtils.ensureNoSelfReferences(run);
|
||||
spare.copyChars(run.toString());
|
||||
return spare.get();
|
||||
}
|
||||
};
|
||||
|
|
|
@ -115,7 +115,7 @@ public class CreateIndexRequestTests extends ESTestCase {
|
|||
final XContentType xContentType = randomFrom(XContentType.values());
|
||||
BytesReference originalBytes = toShuffledXContent(createIndexRequest, xContentType, EMPTY_PARAMS, humanReadable);
|
||||
|
||||
CreateIndexRequest parsedCreateIndexRequest = new CreateIndexRequest(createIndexRequest.index());
|
||||
CreateIndexRequest parsedCreateIndexRequest = new CreateIndexRequest();
|
||||
parsedCreateIndexRequest.source(originalBytes, xContentType);
|
||||
|
||||
assertMappingsEqual(createIndexRequest.mappings(), parsedCreateIndexRequest.mappings());
|
||||
|
@ -201,7 +201,7 @@ public class CreateIndexRequestTests extends ESTestCase {
|
|||
return builder;
|
||||
}
|
||||
|
||||
private static void randomMappingFields(XContentBuilder builder, boolean allowObjectField) throws IOException {
|
||||
public static void randomMappingFields(XContentBuilder builder, boolean allowObjectField) throws IOException {
|
||||
builder.startObject("properties");
|
||||
|
||||
int fieldsNo = randomIntBetween(0, 5);
|
||||
|
|
|
@ -21,17 +21,26 @@ package org.elasticsearch.action.admin.indices.mapping.put;
|
|||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexRequestTests;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.BytesStreamOutput;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.common.xcontent.yaml.YamlXContent;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.ToXContent.EMPTY_PARAMS;
|
||||
|
||||
public class PutMappingRequestTests extends ESTestCase {
|
||||
|
||||
public void testValidation() {
|
||||
|
@ -94,4 +103,79 @@ public class PutMappingRequestTests extends ESTestCase {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void testToXContent() throws IOException {
|
||||
PutMappingRequest request = new PutMappingRequest("foo");
|
||||
request.type("my_type");
|
||||
|
||||
XContentBuilder mapping = JsonXContent.contentBuilder().startObject();
|
||||
mapping.startObject("properties");
|
||||
mapping.startObject("email");
|
||||
mapping.field("type", "text");
|
||||
mapping.endObject();
|
||||
mapping.endObject();
|
||||
mapping.endObject();
|
||||
request.source(mapping);
|
||||
|
||||
String actualRequestBody = Strings.toString(request);
|
||||
String expectedRequestBody = "{\"properties\":{\"email\":{\"type\":\"text\"}}}";
|
||||
assertEquals(expectedRequestBody, actualRequestBody);
|
||||
}
|
||||
|
||||
public void testToXContentWithEmptySource() throws IOException {
|
||||
PutMappingRequest request = new PutMappingRequest("foo");
|
||||
request.type("my_type");
|
||||
|
||||
String actualRequestBody = Strings.toString(request);
|
||||
String expectedRequestBody = "{}";
|
||||
assertEquals(expectedRequestBody, actualRequestBody);
|
||||
}
|
||||
|
||||
public void testToAndFromXContent() throws IOException {
|
||||
|
||||
final PutMappingRequest putMappingRequest = createTestItem();
|
||||
|
||||
boolean humanReadable = randomBoolean();
|
||||
final XContentType xContentType = randomFrom(XContentType.values());
|
||||
BytesReference originalBytes = toShuffledXContent(putMappingRequest, xContentType, EMPTY_PARAMS, humanReadable);
|
||||
|
||||
PutMappingRequest parsedPutMappingRequest = new PutMappingRequest();
|
||||
parsedPutMappingRequest.source(originalBytes, xContentType);
|
||||
|
||||
assertMappingsEqual(putMappingRequest.source(), parsedPutMappingRequest.source());
|
||||
}
|
||||
|
||||
private void assertMappingsEqual(String expected, String actual) throws IOException {
|
||||
|
||||
XContentParser expectedJson = createParser(XContentType.JSON.xContent(), expected);
|
||||
XContentParser actualJson = createParser(XContentType.JSON.xContent(), actual);
|
||||
assertEquals(expectedJson.mapOrdered(), actualJson.mapOrdered());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a random {@link PutMappingRequest}.
|
||||
*/
|
||||
private static PutMappingRequest createTestItem() throws IOException {
|
||||
String index = randomAlphaOfLength(5);
|
||||
|
||||
PutMappingRequest request = new PutMappingRequest(index);
|
||||
|
||||
String type = randomAlphaOfLength(5);
|
||||
request.type(type);
|
||||
request.source(randomMapping());
|
||||
|
||||
return request;
|
||||
}
|
||||
|
||||
private static XContentBuilder randomMapping() throws IOException {
|
||||
XContentBuilder builder = XContentFactory.jsonBuilder();
|
||||
builder.startObject();
|
||||
|
||||
if (randomBoolean()) {
|
||||
CreateIndexRequestTests.randomMappingFields(builder, true);
|
||||
}
|
||||
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,85 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.action.admin.indices.mapping.put;
|
||||
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.test.XContentTestUtils.insertRandomFields;
|
||||
|
||||
public class PutMappingResponseTests extends ESTestCase {
|
||||
|
||||
public void testToXContent() {
|
||||
PutMappingResponse response = new PutMappingResponse(true);
|
||||
String output = Strings.toString(response);
|
||||
assertEquals("{\"acknowledged\":true}", output);
|
||||
}
|
||||
|
||||
public void testToAndFromXContent() throws IOException {
|
||||
doFromXContentTestWithRandomFields(false);
|
||||
}
|
||||
|
||||
/**
|
||||
* This test adds random fields and objects to the xContent rendered out to
|
||||
* ensure we can parse it back to be forward compatible with additions to
|
||||
* the xContent
|
||||
*/
|
||||
public void testFromXContentWithRandomFields() throws IOException {
|
||||
doFromXContentTestWithRandomFields(true);
|
||||
}
|
||||
|
||||
private void doFromXContentTestWithRandomFields(boolean addRandomFields) throws IOException {
|
||||
|
||||
final PutMappingResponse putMappingResponse = createTestItem();
|
||||
|
||||
boolean humanReadable = randomBoolean();
|
||||
final XContentType xContentType = randomFrom(XContentType.values());
|
||||
BytesReference originalBytes = toShuffledXContent(putMappingResponse, xContentType, ToXContent.EMPTY_PARAMS, humanReadable);
|
||||
|
||||
BytesReference mutated;
|
||||
if (addRandomFields) {
|
||||
mutated = insertRandomFields(xContentType, originalBytes, null, random());
|
||||
} else {
|
||||
mutated = originalBytes;
|
||||
}
|
||||
PutMappingResponse parsedPutMappingResponse;
|
||||
try (XContentParser parser = createParser(xContentType.xContent(), mutated)) {
|
||||
parsedPutMappingResponse = PutMappingResponse.fromXContent(parser);
|
||||
assertNull(parser.nextToken());
|
||||
}
|
||||
|
||||
assertEquals(putMappingResponse.isAcknowledged(), parsedPutMappingResponse.isAcknowledged());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a random {@link PutMappingResponse}.
|
||||
*/
|
||||
private static PutMappingResponse createTestItem() throws IOException {
|
||||
boolean acknowledged = randomBoolean();
|
||||
|
||||
return new PutMappingResponse(acknowledged);
|
||||
}
|
||||
}
|
|
@ -570,20 +570,24 @@ public class IndexAliasesIT extends ESIntegTestCase {
|
|||
logger.info("--> getting alias1");
|
||||
GetAliasesResponse getResponse = admin().indices().prepareGetAliases("alias1").get();
|
||||
assertThat(getResponse, notNullValue());
|
||||
assertThat(getResponse.getAliases().size(), equalTo(1));
|
||||
assertThat(getResponse.getAliases().size(), equalTo(5));
|
||||
assertThat(getResponse.getAliases().get("foobar").size(), equalTo(1));
|
||||
assertThat(getResponse.getAliases().get("foobar").get(0), notNullValue());
|
||||
assertThat(getResponse.getAliases().get("foobar").get(0).alias(), equalTo("alias1"));
|
||||
assertThat(getResponse.getAliases().get("foobar").get(0).getFilter(), nullValue());
|
||||
assertThat(getResponse.getAliases().get("foobar").get(0).getIndexRouting(), nullValue());
|
||||
assertThat(getResponse.getAliases().get("foobar").get(0).getSearchRouting(), nullValue());
|
||||
assertTrue(getResponse.getAliases().get("test").isEmpty());
|
||||
assertTrue(getResponse.getAliases().get("test123").isEmpty());
|
||||
assertTrue(getResponse.getAliases().get("foobarbaz").isEmpty());
|
||||
assertTrue(getResponse.getAliases().get("bazbar").isEmpty());
|
||||
AliasesExistResponse existsResponse = admin().indices().prepareAliasesExist("alias1").get();
|
||||
assertThat(existsResponse.exists(), equalTo(true));
|
||||
|
||||
logger.info("--> getting all aliases that start with alias*");
|
||||
getResponse = admin().indices().prepareGetAliases("alias*").get();
|
||||
assertThat(getResponse, notNullValue());
|
||||
assertThat(getResponse.getAliases().size(), equalTo(1));
|
||||
assertThat(getResponse.getAliases().size(), equalTo(5));
|
||||
assertThat(getResponse.getAliases().get("foobar").size(), equalTo(2));
|
||||
assertThat(getResponse.getAliases().get("foobar").get(0), notNullValue());
|
||||
assertThat(getResponse.getAliases().get("foobar").get(0).alias(), equalTo("alias1"));
|
||||
|
@ -595,6 +599,10 @@ public class IndexAliasesIT extends ESIntegTestCase {
|
|||
assertThat(getResponse.getAliases().get("foobar").get(1).getFilter(), nullValue());
|
||||
assertThat(getResponse.getAliases().get("foobar").get(1).getIndexRouting(), nullValue());
|
||||
assertThat(getResponse.getAliases().get("foobar").get(1).getSearchRouting(), nullValue());
|
||||
assertTrue(getResponse.getAliases().get("test").isEmpty());
|
||||
assertTrue(getResponse.getAliases().get("test123").isEmpty());
|
||||
assertTrue(getResponse.getAliases().get("foobarbaz").isEmpty());
|
||||
assertTrue(getResponse.getAliases().get("bazbar").isEmpty());
|
||||
existsResponse = admin().indices().prepareAliasesExist("alias*").get();
|
||||
assertThat(existsResponse.exists(), equalTo(true));
|
||||
|
||||
|
@ -679,12 +687,13 @@ public class IndexAliasesIT extends ESIntegTestCase {
|
|||
logger.info("--> getting f* for index *bar");
|
||||
getResponse = admin().indices().prepareGetAliases("f*").addIndices("*bar").get();
|
||||
assertThat(getResponse, notNullValue());
|
||||
assertThat(getResponse.getAliases().size(), equalTo(1));
|
||||
assertThat(getResponse.getAliases().size(), equalTo(2));
|
||||
assertThat(getResponse.getAliases().get("foobar").get(0), notNullValue());
|
||||
assertThat(getResponse.getAliases().get("foobar").get(0).alias(), equalTo("foo"));
|
||||
assertThat(getResponse.getAliases().get("foobar").get(0).getFilter(), nullValue());
|
||||
assertThat(getResponse.getAliases().get("foobar").get(0).getIndexRouting(), nullValue());
|
||||
assertThat(getResponse.getAliases().get("foobar").get(0).getSearchRouting(), nullValue());
|
||||
assertTrue(getResponse.getAliases().get("bazbar").isEmpty());
|
||||
existsResponse = admin().indices().prepareAliasesExist("f*")
|
||||
.addIndices("*bar").get();
|
||||
assertThat(existsResponse.exists(), equalTo(true));
|
||||
|
@ -693,13 +702,14 @@ public class IndexAliasesIT extends ESIntegTestCase {
|
|||
logger.info("--> getting f* for index *bac");
|
||||
getResponse = admin().indices().prepareGetAliases("foo").addIndices("*bac").get();
|
||||
assertThat(getResponse, notNullValue());
|
||||
assertThat(getResponse.getAliases().size(), equalTo(1));
|
||||
assertThat(getResponse.getAliases().size(), equalTo(2));
|
||||
assertThat(getResponse.getAliases().get("foobar").size(), equalTo(1));
|
||||
assertThat(getResponse.getAliases().get("foobar").get(0), notNullValue());
|
||||
assertThat(getResponse.getAliases().get("foobar").get(0).alias(), equalTo("foo"));
|
||||
assertThat(getResponse.getAliases().get("foobar").get(0).getFilter(), nullValue());
|
||||
assertThat(getResponse.getAliases().get("foobar").get(0).getIndexRouting(), nullValue());
|
||||
assertThat(getResponse.getAliases().get("foobar").get(0).getSearchRouting(), nullValue());
|
||||
assertTrue(getResponse.getAliases().get("bazbar").isEmpty());
|
||||
existsResponse = admin().indices().prepareAliasesExist("foo")
|
||||
.addIndices("*bac").get();
|
||||
assertThat(existsResponse.exists(), equalTo(true));
|
||||
|
|
|
@ -21,11 +21,14 @@ package org.elasticsearch.cluster.allocation;
|
|||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.cluster.ClusterState;
|
||||
import org.elasticsearch.cluster.health.ClusterHealthStatus;
|
||||
import org.elasticsearch.cluster.routing.IndexRoutingTable;
|
||||
import org.elasticsearch.cluster.routing.IndexShardRoutingTable;
|
||||
import org.elasticsearch.cluster.routing.ShardRouting;
|
||||
import org.elasticsearch.cluster.routing.ShardRoutingState;
|
||||
import org.elasticsearch.cluster.routing.allocation.decider.FilterAllocationDecider;
|
||||
import org.elasticsearch.cluster.routing.allocation.decider.ThrottlingAllocationDecider;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
@ -34,7 +37,9 @@ import org.elasticsearch.test.ESIntegTestCase;
|
|||
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
||||
import org.elasticsearch.test.ESIntegTestCase.Scope;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
|
@ -156,5 +161,58 @@ public class FilteringAllocationIT extends ESIntegTestCase {
|
|||
.execute().actionGet());
|
||||
assertEquals("invalid IP address [192.168.1.1.] for [" + filterSetting.getKey() + ipKey + "]", e.getMessage());
|
||||
}
|
||||
|
||||
public void testTransientSettingsStillApplied() throws Exception {
|
||||
List<String> nodes = internalCluster().startNodes(6);
|
||||
Set<String> excludeNodes = new HashSet<>(nodes.subList(0, 3));
|
||||
Set<String> includeNodes = new HashSet<>(nodes.subList(3, 6));
|
||||
logger.info("--> exclude: [{}], include: [{}]",
|
||||
Strings.collectionToCommaDelimitedString(excludeNodes),
|
||||
Strings.collectionToCommaDelimitedString(includeNodes));
|
||||
ensureStableCluster(6);
|
||||
client().admin().indices().prepareCreate("test").get();
|
||||
ensureGreen("test");
|
||||
|
||||
Settings exclude = Settings.builder().put("cluster.routing.allocation.exclude._name",
|
||||
Strings.collectionToCommaDelimitedString(excludeNodes)).build();
|
||||
|
||||
logger.info("--> updating settings");
|
||||
client().admin().cluster().prepareUpdateSettings().setTransientSettings(exclude).get();
|
||||
|
||||
logger.info("--> waiting for relocation");
|
||||
waitForRelocation(ClusterHealthStatus.GREEN);
|
||||
|
||||
ClusterState state = client().admin().cluster().prepareState().get().getState();
|
||||
|
||||
for (ShardRouting shard : state.getRoutingTable().shardsWithState(ShardRoutingState.STARTED)) {
|
||||
String node = state.getRoutingNodes().node(shard.currentNodeId()).node().getName();
|
||||
logger.info("--> shard on {} - {}", node, shard);
|
||||
assertTrue("shard on " + node + " but should only be on the include node list: " +
|
||||
Strings.collectionToCommaDelimitedString(includeNodes),
|
||||
includeNodes.contains(node));
|
||||
}
|
||||
|
||||
Settings other = Settings.builder().put("cluster.info.update.interval", "45s").build();
|
||||
|
||||
logger.info("--> updating settings with random persistent setting");
|
||||
client().admin().cluster().prepareUpdateSettings()
|
||||
.setPersistentSettings(other).setTransientSettings(exclude).get();
|
||||
|
||||
logger.info("--> waiting for relocation");
|
||||
waitForRelocation(ClusterHealthStatus.GREEN);
|
||||
|
||||
state = client().admin().cluster().prepareState().get().getState();
|
||||
|
||||
// The transient settings still exist in the state
|
||||
assertThat(state.metaData().transientSettings(), equalTo(exclude));
|
||||
|
||||
for (ShardRouting shard : state.getRoutingTable().shardsWithState(ShardRoutingState.STARTED)) {
|
||||
String node = state.getRoutingNodes().node(shard.currentNodeId()).node().getName();
|
||||
logger.info("--> shard on {} - {}", node, shard);
|
||||
assertTrue("shard on " + node + " but should only be on the include node list: " +
|
||||
Strings.collectionToCommaDelimitedString(includeNodes),
|
||||
includeNodes.contains(node));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -261,6 +261,21 @@ public class ScopedSettingsTests extends ESTestCase {
|
|||
assertEquals(2, listResults.size());
|
||||
assertEquals(2, intResults.size());
|
||||
|
||||
service.applySettings(Settings.builder()
|
||||
.put("foo.test.bar", 2)
|
||||
.put("foo.test_1.bar", 7)
|
||||
.putList("foo.test_list.list", "16", "17")
|
||||
.putList("foo.test_list_1.list", "18", "19", "20")
|
||||
.build());
|
||||
|
||||
assertEquals(2, intResults.get("test").intValue());
|
||||
assertEquals(7, intResults.get("test_1").intValue());
|
||||
assertEquals(Arrays.asList(16, 17), listResults.get("test_list"));
|
||||
assertEquals(Arrays.asList(18, 19, 20), listResults.get("test_list_1"));
|
||||
assertEquals(2, listResults.size());
|
||||
assertEquals(2, intResults.size());
|
||||
|
||||
|
||||
listResults.clear();
|
||||
intResults.clear();
|
||||
|
||||
|
@ -286,6 +301,35 @@ public class ScopedSettingsTests extends ESTestCase {
|
|||
|
||||
}
|
||||
|
||||
public void testAffixMapConsumerNotCalledWithNull() {
|
||||
Setting.AffixSetting<Integer> prefixSetting = Setting.prefixKeySetting("eggplant.",
|
||||
(k) -> Setting.intSetting(k, 1, Property.Dynamic, Property.NodeScope));
|
||||
Setting.AffixSetting<Integer> otherSetting = Setting.prefixKeySetting("other.",
|
||||
(k) -> Setting.intSetting(k, 1, Property.Dynamic, Property.NodeScope));
|
||||
AbstractScopedSettings service = new ClusterSettings(Settings.EMPTY,new HashSet<>(Arrays.asList(prefixSetting, otherSetting)));
|
||||
Map<String, Integer> affixResults = new HashMap<>();
|
||||
|
||||
Consumer<Map<String,Integer>> consumer = (map) -> {
|
||||
logger.info("--> consuming settings {}", map);
|
||||
affixResults.clear();
|
||||
affixResults.putAll(map);
|
||||
};
|
||||
service.addAffixMapUpdateConsumer(prefixSetting, consumer, (s, k) -> {}, randomBoolean());
|
||||
assertEquals(0, affixResults.size());
|
||||
service.applySettings(Settings.builder()
|
||||
.put("eggplant._name", 2)
|
||||
.build());
|
||||
assertThat(affixResults.size(), equalTo(1));
|
||||
assertThat(affixResults.get("_name"), equalTo(2));
|
||||
|
||||
service.applySettings(Settings.builder()
|
||||
.put("eggplant._name", 2)
|
||||
.put("other.thing", 3)
|
||||
.build());
|
||||
|
||||
assertThat(affixResults.get("_name"), equalTo(2));
|
||||
}
|
||||
|
||||
public void testApply() {
|
||||
Setting<Integer> testSetting = Setting.intSetting("foo.bar", 1, Property.Dynamic, Property.NodeScope);
|
||||
Setting<Integer> testSetting2 = Setting.intSetting("foo.bar.baz", 1, Property.Dynamic, Property.NodeScope);
|
||||
|
|
|
@ -25,16 +25,21 @@ import org.apache.lucene.util.BytesRefBuilder;
|
|||
import org.apache.lucene.util.Counter;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.SortedSet;
|
||||
import java.util.TreeSet;
|
||||
|
||||
import static java.util.Collections.emptyMap;
|
||||
import static org.elasticsearch.common.util.CollectionUtils.eagerPartition;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
||||
|
@ -176,4 +181,15 @@ public class CollectionUtilsTests extends ESTestCase {
|
|||
eagerPartition(Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12), 6)
|
||||
);
|
||||
}
|
||||
|
||||
public void testEnsureNoSelfReferences() {
|
||||
CollectionUtils.ensureNoSelfReferences(emptyMap());
|
||||
CollectionUtils.ensureNoSelfReferences(null);
|
||||
|
||||
Map<String, Object> map = new HashMap<>();
|
||||
map.put("field", map);
|
||||
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> CollectionUtils.ensureNoSelfReferences(map));
|
||||
assertThat(e.getMessage(), containsString("Iterable object is self-referencing itself"));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -35,6 +35,7 @@ import org.elasticsearch.common.geo.GeoPoint;
|
|||
import org.elasticsearch.common.io.PathUtils;
|
||||
import org.elasticsearch.common.text.Text;
|
||||
import org.elasticsearch.common.unit.DistanceUnit;
|
||||
import org.elasticsearch.common.util.CollectionUtils;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.hamcrest.Matcher;
|
||||
|
@ -854,19 +855,19 @@ public abstract class BaseXContentTestCase extends ESTestCase {
|
|||
}
|
||||
|
||||
public void testEnsureNoSelfReferences() throws IOException {
|
||||
XContentBuilder.ensureNoSelfReferences(emptyMap());
|
||||
XContentBuilder.ensureNoSelfReferences(null);
|
||||
CollectionUtils.ensureNoSelfReferences(emptyMap());
|
||||
CollectionUtils.ensureNoSelfReferences(null);
|
||||
|
||||
Map<String, Object> map = new HashMap<>();
|
||||
map.put("field", map);
|
||||
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder().map(map));
|
||||
assertThat(e.getMessage(), containsString("Object has already been built and is self-referencing itself"));
|
||||
assertThat(e.getMessage(), containsString("Iterable object is self-referencing itself"));
|
||||
}
|
||||
|
||||
/**
|
||||
* Test that the same map written multiple times do not trigger the self-reference check in
|
||||
* {@link XContentBuilder#ensureNoSelfReferences(Object)}
|
||||
* {@link CollectionUtils#ensureNoSelfReferences(Object)}
|
||||
*/
|
||||
public void testRepeatedMapsAndNoSelfReferences() throws Exception {
|
||||
Map<String, Object> mapB = singletonMap("b", "B");
|
||||
|
@ -899,7 +900,7 @@ public abstract class BaseXContentTestCase extends ESTestCase {
|
|||
map1.put("map0", map0); // map 1 -> map 0 loop
|
||||
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder().map(map0));
|
||||
assertThat(e.getMessage(), containsString("Object has already been built and is self-referencing itself"));
|
||||
assertThat(e.getMessage(), containsString("Iterable object is self-referencing itself"));
|
||||
}
|
||||
|
||||
public void testSelfReferencingMapsTwoLevels() throws IOException {
|
||||
|
@ -917,7 +918,7 @@ public abstract class BaseXContentTestCase extends ESTestCase {
|
|||
map2.put("map0", map0); // map 2 -> map 0 loop
|
||||
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder().map(map0));
|
||||
assertThat(e.getMessage(), containsString("Object has already been built and is self-referencing itself"));
|
||||
assertThat(e.getMessage(), containsString("Iterable object is self-referencing itself"));
|
||||
}
|
||||
|
||||
public void testSelfReferencingObjectsArray() throws IOException {
|
||||
|
@ -930,13 +931,13 @@ public abstract class BaseXContentTestCase extends ESTestCase {
|
|||
.startObject()
|
||||
.field("field", values)
|
||||
.endObject());
|
||||
assertThat(e.getMessage(), containsString("Object has already been built and is self-referencing itself"));
|
||||
assertThat(e.getMessage(), containsString("Iterable object is self-referencing itself"));
|
||||
|
||||
e = expectThrows(IllegalArgumentException.class, () -> builder()
|
||||
.startObject()
|
||||
.array("field", values)
|
||||
.endObject());
|
||||
assertThat(e.getMessage(), containsString("Object has already been built and is self-referencing itself"));
|
||||
assertThat(e.getMessage(), containsString("Iterable object is self-referencing itself"));
|
||||
}
|
||||
|
||||
public void testSelfReferencingIterable() throws IOException {
|
||||
|
@ -949,7 +950,7 @@ public abstract class BaseXContentTestCase extends ESTestCase {
|
|||
.startObject()
|
||||
.field("field", (Iterable) values)
|
||||
.endObject());
|
||||
assertThat(e.getMessage(), containsString("Object has already been built and is self-referencing itself"));
|
||||
assertThat(e.getMessage(), containsString("Iterable object is self-referencing itself"));
|
||||
}
|
||||
|
||||
public void testSelfReferencingIterableOneLevel() throws IOException {
|
||||
|
@ -964,7 +965,7 @@ public abstract class BaseXContentTestCase extends ESTestCase {
|
|||
.startObject()
|
||||
.field("field", (Iterable) values)
|
||||
.endObject());
|
||||
assertThat(e.getMessage(), containsString("Object has already been built and is self-referencing itself"));
|
||||
assertThat(e.getMessage(), containsString("Iterable object is self-referencing itself"));
|
||||
}
|
||||
|
||||
public void testSelfReferencingIterableTwoLevels() throws IOException {
|
||||
|
@ -984,7 +985,7 @@ public abstract class BaseXContentTestCase extends ESTestCase {
|
|||
map2.put("map0", map0); // map 2 -> map 0 loop
|
||||
|
||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder().map(map0));
|
||||
assertThat(e.getMessage(), containsString("Object has already been built and is self-referencing itself"));
|
||||
assertThat(e.getMessage(), containsString("Iterable object is self-referencing itself"));
|
||||
}
|
||||
|
||||
public void testChecksForDuplicates() throws Exception {
|
||||
|
|
|
@ -65,13 +65,13 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase<RangeQueryBuil
|
|||
switch (randomIntBetween(0, 2)) {
|
||||
case 0:
|
||||
// use mapped integer field for numeric range queries
|
||||
query = new RangeQueryBuilder(INT_FIELD_NAME);
|
||||
query = new RangeQueryBuilder(randomBoolean() ? INT_FIELD_NAME : INT_RANGE_FIELD_NAME);
|
||||
query.from(randomIntBetween(1, 100));
|
||||
query.to(randomIntBetween(101, 200));
|
||||
break;
|
||||
case 1:
|
||||
// use mapped date field, using date string representation
|
||||
query = new RangeQueryBuilder(DATE_FIELD_NAME);
|
||||
query = new RangeQueryBuilder(randomBoolean() ? DATE_FIELD_NAME : DATE_RANGE_FIELD_NAME);
|
||||
query.from(new DateTime(System.currentTimeMillis() - randomIntBetween(0, 1000000), DateTimeZone.UTC).toString());
|
||||
query.to(new DateTime(System.currentTimeMillis() + randomIntBetween(0, 1000000), DateTimeZone.UTC).toString());
|
||||
// Create timestamp option only then we have a date mapper,
|
||||
|
@ -99,6 +99,10 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase<RangeQueryBuil
|
|||
if (randomBoolean()) {
|
||||
query.to(null);
|
||||
}
|
||||
if (query.fieldName().equals(INT_RANGE_FIELD_NAME) || query.fieldName().equals(DATE_RANGE_FIELD_NAME)) {
|
||||
query.relation(
|
||||
randomFrom(ShapeRelation.CONTAINS.toString(), ShapeRelation.INTERSECTS.toString(), ShapeRelation.WITHIN.toString()));
|
||||
}
|
||||
return query;
|
||||
}
|
||||
|
||||
|
@ -143,7 +147,9 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase<RangeQueryBuil
|
|||
|
||||
} else if (getCurrentTypes().length == 0 ||
|
||||
(queryBuilder.fieldName().equals(DATE_FIELD_NAME) == false
|
||||
&& queryBuilder.fieldName().equals(INT_FIELD_NAME) == false)) {
|
||||
&& queryBuilder.fieldName().equals(INT_FIELD_NAME) == false
|
||||
&& queryBuilder.fieldName().equals(DATE_RANGE_FIELD_NAME) == false
|
||||
&& queryBuilder.fieldName().equals(INT_RANGE_FIELD_NAME) == false)) {
|
||||
assertThat(query, instanceOf(TermRangeQuery.class));
|
||||
TermRangeQuery termRangeQuery = (TermRangeQuery) query;
|
||||
assertThat(termRangeQuery.getField(), equalTo(queryBuilder.fieldName()));
|
||||
|
@ -219,6 +225,8 @@ public class RangeQueryBuilderTests extends AbstractQueryTestCase<RangeQueryBuil
|
|||
maxInt--;
|
||||
}
|
||||
}
|
||||
} else if (queryBuilder.fieldName().equals(DATE_RANGE_FIELD_NAME) || queryBuilder.fieldName().equals(INT_RANGE_FIELD_NAME)) {
|
||||
// todo can't check RangeFieldQuery because its currently package private (this will change)
|
||||
} else {
|
||||
throw new UnsupportedOperationException();
|
||||
}
|
||||
|
|
|
@ -19,12 +19,12 @@
|
|||
|
||||
package org.elasticsearch.index.query;
|
||||
|
||||
import org.apache.lucene.search.TermInSetQuery;
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.ConstantScoreQuery;
|
||||
import org.apache.lucene.search.MatchNoDocsQuery;
|
||||
import org.apache.lucene.search.PointInSetQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.TermInSetQuery;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.action.get.GetRequest;
|
||||
|
@ -77,9 +77,8 @@ public class TermsQueryBuilderTests extends AbstractQueryTestCase<TermsQueryBuil
|
|||
if (randomBoolean()) {
|
||||
// make between 0 and 5 different values of the same type
|
||||
String fieldName;
|
||||
do {
|
||||
fieldName = getRandomFieldName();
|
||||
} while (fieldName.equals(GEO_POINT_FIELD_NAME) || fieldName.equals(GEO_SHAPE_FIELD_NAME));
|
||||
fieldName = randomValueOtherThanMany(choice -> choice.equals(GEO_POINT_FIELD_NAME) || choice.equals(GEO_SHAPE_FIELD_NAME)
|
||||
|| choice.equals(INT_RANGE_FIELD_NAME) || choice.equals(DATE_RANGE_FIELD_NAME), () -> getRandomFieldName());
|
||||
Object[] values = new Object[randomInt(5)];
|
||||
for (int i = 0; i < values.length; i++) {
|
||||
values[i] = getRandomValueForFieldName(fieldName);
|
||||
|
|
|
@ -39,6 +39,7 @@ import org.apache.lucene.util.BytesRef;
|
|||
import org.apache.lucene.util.LuceneTestCase;
|
||||
import org.apache.lucene.util.NumericUtils;
|
||||
import org.apache.lucene.util.TestUtil;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
|
@ -68,6 +69,9 @@ import java.util.Map;
|
|||
import java.util.function.Consumer;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
|
||||
public class CompositeAggregatorTests extends AggregatorTestCase {
|
||||
private static MappedFieldType[] FIELD_TYPES;
|
||||
|
||||
|
@ -761,6 +765,89 @@ public class CompositeAggregatorTests extends AggregatorTestCase {
|
|||
);
|
||||
}
|
||||
|
||||
public void testWithDateHistogramAndFormat() throws IOException {
|
||||
final List<Map<String, List<Object>>> dataset = new ArrayList<>();
|
||||
dataset.addAll(
|
||||
Arrays.asList(
|
||||
createDocument("date", asLong("2017-10-20T03:08:45")),
|
||||
createDocument("date", asLong("2016-09-20T09:00:34")),
|
||||
createDocument("date", asLong("2016-09-20T11:34:00")),
|
||||
createDocument("date", asLong("2017-10-20T06:09:24")),
|
||||
createDocument("date", asLong("2017-10-19T06:09:24")),
|
||||
createDocument("long", 4L)
|
||||
)
|
||||
);
|
||||
final Sort sort = new Sort(new SortedNumericSortField("date", SortField.Type.LONG));
|
||||
testSearchCase(new MatchAllDocsQuery(), sort, dataset,
|
||||
() -> {
|
||||
DateHistogramValuesSourceBuilder histo = new DateHistogramValuesSourceBuilder("date")
|
||||
.field("date")
|
||||
.dateHistogramInterval(DateHistogramInterval.days(1))
|
||||
.format("yyyy-MM-dd");
|
||||
return new CompositeAggregationBuilder("name", Collections.singletonList(histo));
|
||||
},
|
||||
(result) -> {
|
||||
assertEquals(3, result.getBuckets().size());
|
||||
assertEquals("{date=2016-09-20}", result.getBuckets().get(0).getKeyAsString());
|
||||
assertEquals(2L, result.getBuckets().get(0).getDocCount());
|
||||
assertEquals("{date=2017-10-19}", result.getBuckets().get(1).getKeyAsString());
|
||||
assertEquals(1L, result.getBuckets().get(1).getDocCount());
|
||||
assertEquals("{date=2017-10-20}", result.getBuckets().get(2).getKeyAsString());
|
||||
assertEquals(2L, result.getBuckets().get(2).getDocCount());
|
||||
}
|
||||
);
|
||||
|
||||
testSearchCase(new MatchAllDocsQuery(), sort, dataset,
|
||||
() -> {
|
||||
DateHistogramValuesSourceBuilder histo = new DateHistogramValuesSourceBuilder("date")
|
||||
.field("date")
|
||||
.dateHistogramInterval(DateHistogramInterval.days(1))
|
||||
.format("yyyy-MM-dd");
|
||||
return new CompositeAggregationBuilder("name", Collections.singletonList(histo))
|
||||
.aggregateAfter(createAfterKey("date", "2016-09-20"));
|
||||
|
||||
}, (result) -> {
|
||||
assertEquals(2, result.getBuckets().size());
|
||||
assertEquals("{date=2017-10-19}", result.getBuckets().get(0).getKeyAsString());
|
||||
assertEquals(1L, result.getBuckets().get(0).getDocCount());
|
||||
assertEquals("{date=2017-10-20}", result.getBuckets().get(1).getKeyAsString());
|
||||
assertEquals(2L, result.getBuckets().get(1).getDocCount());
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
public void testThatDateHistogramFailsFormatAfter() throws IOException {
|
||||
ElasticsearchParseException exc = expectThrows(ElasticsearchParseException.class,
|
||||
() -> testSearchCase(new MatchAllDocsQuery(), null, Collections.emptyList(),
|
||||
() -> {
|
||||
DateHistogramValuesSourceBuilder histo = new DateHistogramValuesSourceBuilder("date")
|
||||
.field("date")
|
||||
.dateHistogramInterval(DateHistogramInterval.days(1))
|
||||
.format("yyyy-MM-dd");
|
||||
return new CompositeAggregationBuilder("name", Collections.singletonList(histo))
|
||||
.aggregateAfter(createAfterKey("date", "now"));
|
||||
},
|
||||
(result) -> {}
|
||||
));
|
||||
assertThat(exc.getCause(), instanceOf(IllegalArgumentException.class));
|
||||
assertThat(exc.getCause().getMessage(), containsString("now() is not supported in [after] key"));
|
||||
|
||||
exc = expectThrows(ElasticsearchParseException.class,
|
||||
() -> testSearchCase(new MatchAllDocsQuery(), null, Collections.emptyList(),
|
||||
() -> {
|
||||
DateHistogramValuesSourceBuilder histo = new DateHistogramValuesSourceBuilder("date")
|
||||
.field("date")
|
||||
.dateHistogramInterval(DateHistogramInterval.days(1))
|
||||
.format("yyyy-MM-dd");
|
||||
return new CompositeAggregationBuilder("name", Collections.singletonList(histo))
|
||||
.aggregateAfter(createAfterKey("date", "1474329600000"));
|
||||
},
|
||||
(result) -> {}
|
||||
));
|
||||
assertThat(exc.getCause(), instanceOf(IllegalArgumentException.class));
|
||||
assertThat(exc.getCause().getMessage(), containsString("Parse failure"));
|
||||
}
|
||||
|
||||
public void testWithDateHistogramAndTimeZone() throws IOException {
|
||||
final List<Map<String, List<Object>>> dataset = new ArrayList<>();
|
||||
dataset.addAll(
|
||||
|
|
|
@ -21,12 +21,15 @@ package org.elasticsearch.search.aggregations.bucket.composite;
|
|||
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.joda.Joda;
|
||||
import org.elasticsearch.common.util.BigArrays;
|
||||
import org.elasticsearch.search.DocValueFormat;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregations;
|
||||
import org.elasticsearch.search.aggregations.ParsedAggregation;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
|
||||
import org.elasticsearch.test.InternalMultiBucketAggregationTestCase;
|
||||
import org.joda.time.DateTimeZone;
|
||||
import org.junit.After;
|
||||
|
||||
import java.io.IOException;
|
||||
|
@ -41,28 +44,45 @@ import java.util.TreeSet;
|
|||
import java.util.stream.Collectors;
|
||||
|
||||
import static com.carrotsearch.randomizedtesting.RandomizedTest.randomAsciiLettersOfLengthBetween;
|
||||
import static com.carrotsearch.randomizedtesting.RandomizedTest.randomLongBetween;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.lessThanOrEqualTo;
|
||||
|
||||
public class InternalCompositeTests extends InternalMultiBucketAggregationTestCase<InternalComposite> {
|
||||
private List<String> sourceNames;
|
||||
private List<DocValueFormat> formats;
|
||||
private int[] reverseMuls;
|
||||
private int[] formats;
|
||||
private int[] types;
|
||||
private int size;
|
||||
|
||||
private static DocValueFormat randomDocValueFormat(boolean isLong) {
|
||||
if (isLong) {
|
||||
// we use specific format only for date histogram on a long/date field
|
||||
if (randomBoolean()) {
|
||||
return new DocValueFormat.DateTime(Joda.forPattern("epoch_second"), DateTimeZone.forOffsetHours(1));
|
||||
} else {
|
||||
return DocValueFormat.RAW;
|
||||
}
|
||||
} else {
|
||||
// and the raw format for the other types
|
||||
return DocValueFormat.RAW;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
int numFields = randomIntBetween(1, 10);
|
||||
size = randomNumberOfBuckets();
|
||||
sourceNames = new ArrayList<>();
|
||||
formats = new ArrayList<>();
|
||||
reverseMuls = new int[numFields];
|
||||
formats = new int[numFields];
|
||||
types = new int[numFields];
|
||||
for (int i = 0; i < numFields; i++) {
|
||||
sourceNames.add("field_" + i);
|
||||
reverseMuls[i] = randomBoolean() ? 1 : -1;
|
||||
formats[i] = randomIntBetween(0, 2);
|
||||
int type = randomIntBetween(0, 2);
|
||||
types[i] = type;
|
||||
formats.add(randomDocValueFormat(type == 0));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -70,9 +90,10 @@ public class InternalCompositeTests extends InternalMultiBucketAggregationTestCa
|
|||
@After
|
||||
public void tearDown() throws Exception {
|
||||
super.tearDown();
|
||||
sourceNames= null;
|
||||
reverseMuls = null;
|
||||
sourceNames = null;
|
||||
formats = null;
|
||||
reverseMuls = null;
|
||||
types = null;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -93,7 +114,7 @@ public class InternalCompositeTests extends InternalMultiBucketAggregationTestCa
|
|||
private CompositeKey createCompositeKey() {
|
||||
Comparable<?>[] keys = new Comparable<?>[sourceNames.size()];
|
||||
for (int j = 0; j < keys.length; j++) {
|
||||
switch (formats[j]) {
|
||||
switch (types[j]) {
|
||||
case 0:
|
||||
keys[j] = randomLong();
|
||||
break;
|
||||
|
@ -123,19 +144,6 @@ public class InternalCompositeTests extends InternalMultiBucketAggregationTestCa
|
|||
};
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private Comparator<InternalComposite.InternalBucket> getBucketComparator() {
|
||||
return (o1, o2) -> {
|
||||
for (int i = 0; i < o1.getRawKey().size(); i++) {
|
||||
int cmp = ((Comparable) o1.getRawKey().get(i)).compareTo(o2.getRawKey().get(i)) * reverseMuls[i];
|
||||
if (cmp != 0) {
|
||||
return cmp;
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
};
|
||||
}
|
||||
|
||||
@Override
|
||||
protected InternalComposite createTestInstance(String name, List<PipelineAggregator> pipelineAggregators,
|
||||
Map<String, Object> metaData, InternalAggregations aggregations) {
|
||||
|
@ -149,11 +157,11 @@ public class InternalCompositeTests extends InternalMultiBucketAggregationTestCa
|
|||
}
|
||||
keys.add(key);
|
||||
InternalComposite.InternalBucket bucket =
|
||||
new InternalComposite.InternalBucket(sourceNames, key, reverseMuls, 1L, aggregations);
|
||||
new InternalComposite.InternalBucket(sourceNames, formats, key, reverseMuls, 1L, aggregations);
|
||||
buckets.add(bucket);
|
||||
}
|
||||
Collections.sort(buckets, (o1, o2) -> o1.compareKey(o2));
|
||||
return new InternalComposite(name, size, sourceNames, buckets, reverseMuls, Collections.emptyList(), metaData);
|
||||
return new InternalComposite(name, size, sourceNames, formats, buckets, reverseMuls, Collections.emptyList(), metaData);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -172,7 +180,7 @@ public class InternalCompositeTests extends InternalMultiBucketAggregationTestCa
|
|||
break;
|
||||
case 1:
|
||||
buckets = new ArrayList<>(buckets);
|
||||
buckets.add(new InternalComposite.InternalBucket(sourceNames, createCompositeKey(), reverseMuls,
|
||||
buckets.add(new InternalComposite.InternalBucket(sourceNames, formats, createCompositeKey(), reverseMuls,
|
||||
randomLongBetween(1, 100), InternalAggregations.EMPTY)
|
||||
);
|
||||
break;
|
||||
|
@ -187,7 +195,7 @@ public class InternalCompositeTests extends InternalMultiBucketAggregationTestCa
|
|||
default:
|
||||
throw new AssertionError("illegal branch");
|
||||
}
|
||||
return new InternalComposite(instance.getName(), instance.getSize(), sourceNames, buckets, reverseMuls,
|
||||
return new InternalComposite(instance.getName(), instance.getSize(), sourceNames, formats, buckets, reverseMuls,
|
||||
instance.pipelineAggregators(), metaData);
|
||||
}
|
||||
|
||||
|
|
|
@ -36,6 +36,7 @@ import org.elasticsearch.index.query.MatchQueryBuilder;
|
|||
import org.elasticsearch.index.query.MultiMatchQueryBuilder;
|
||||
import org.elasticsearch.index.query.Operator;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.index.query.RangeQueryBuilder;
|
||||
import org.elasticsearch.index.query.TermQueryBuilder;
|
||||
import org.elasticsearch.index.query.WrapperQueryBuilder;
|
||||
import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders;
|
||||
|
@ -1893,4 +1894,17 @@ public class SearchQueryIT extends ESIntegTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
public void testRangeQueryRangeFields_24744() throws Exception {
|
||||
assertAcked(prepareCreate("test").addMapping("type1", "int_range", "type=integer_range"));
|
||||
|
||||
client().prepareIndex("test", "type1", "1")
|
||||
.setSource(jsonBuilder().startObject().startObject("int_range").field("gte", 10).field("lte", 20).endObject().endObject())
|
||||
.get();
|
||||
refresh();
|
||||
|
||||
RangeQueryBuilder range = new RangeQueryBuilder("int_range").relation("intersects").from(Integer.MIN_VALUE).to(Integer.MAX_VALUE);
|
||||
SearchResponse searchResponse = client().prepareSearch("test").setQuery(range).get();
|
||||
assertHitCount(searchResponse, 1);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -138,17 +138,19 @@ public abstract class AbstractQueryTestCase<QB extends AbstractQueryBuilder<QB>>
|
|||
public static final String STRING_FIELD_NAME = "mapped_string";
|
||||
protected static final String STRING_FIELD_NAME_2 = "mapped_string_2";
|
||||
protected static final String INT_FIELD_NAME = "mapped_int";
|
||||
protected static final String INT_RANGE_FIELD_NAME = "mapped_int_range";
|
||||
protected static final String DOUBLE_FIELD_NAME = "mapped_double";
|
||||
protected static final String BOOLEAN_FIELD_NAME = "mapped_boolean";
|
||||
protected static final String DATE_FIELD_NAME = "mapped_date";
|
||||
protected static final String DATE_RANGE_FIELD_NAME = "mapped_date_range";
|
||||
protected static final String OBJECT_FIELD_NAME = "mapped_object";
|
||||
protected static final String GEO_POINT_FIELD_NAME = "mapped_geo_point";
|
||||
protected static final String GEO_SHAPE_FIELD_NAME = "mapped_geo_shape";
|
||||
protected static final String[] MAPPED_FIELD_NAMES = new String[]{STRING_FIELD_NAME, INT_FIELD_NAME,
|
||||
DOUBLE_FIELD_NAME, BOOLEAN_FIELD_NAME, DATE_FIELD_NAME, OBJECT_FIELD_NAME, GEO_POINT_FIELD_NAME,
|
||||
protected static final String[] MAPPED_FIELD_NAMES = new String[]{STRING_FIELD_NAME, INT_FIELD_NAME, INT_RANGE_FIELD_NAME,
|
||||
DOUBLE_FIELD_NAME, BOOLEAN_FIELD_NAME, DATE_FIELD_NAME, DATE_RANGE_FIELD_NAME, OBJECT_FIELD_NAME, GEO_POINT_FIELD_NAME,
|
||||
GEO_SHAPE_FIELD_NAME};
|
||||
private static final String[] MAPPED_LEAF_FIELD_NAMES = new String[]{STRING_FIELD_NAME, INT_FIELD_NAME,
|
||||
DOUBLE_FIELD_NAME, BOOLEAN_FIELD_NAME, DATE_FIELD_NAME, GEO_POINT_FIELD_NAME, };
|
||||
private static final String[] MAPPED_LEAF_FIELD_NAMES = new String[]{STRING_FIELD_NAME, INT_FIELD_NAME, INT_RANGE_FIELD_NAME,
|
||||
DOUBLE_FIELD_NAME, BOOLEAN_FIELD_NAME, DATE_FIELD_NAME, DATE_RANGE_FIELD_NAME, GEO_POINT_FIELD_NAME, };
|
||||
private static final int NUMBER_OF_TESTQUERIES = 20;
|
||||
|
||||
protected static Version indexVersionCreated;
|
||||
|
@ -1077,9 +1079,11 @@ public abstract class AbstractQueryTestCase<QB extends AbstractQueryBuilder<QB>>
|
|||
STRING_FIELD_NAME, "type=text",
|
||||
STRING_FIELD_NAME_2, "type=keyword",
|
||||
INT_FIELD_NAME, "type=integer",
|
||||
INT_RANGE_FIELD_NAME, "type=integer_range",
|
||||
DOUBLE_FIELD_NAME, "type=double",
|
||||
BOOLEAN_FIELD_NAME, "type=boolean",
|
||||
DATE_FIELD_NAME, "type=date",
|
||||
DATE_RANGE_FIELD_NAME, "type=date_range",
|
||||
OBJECT_FIELD_NAME, "type=object",
|
||||
GEO_POINT_FIELD_NAME, "type=geo_point",
|
||||
GEO_SHAPE_FIELD_NAME, "type=geo_shape"
|
||||
|
|
Loading…
Reference in New Issue