diff --git a/.ci/java-versions.properties b/.ci/java-versions.properties
index a0713ce128e..9202be97bd9 100644
--- a/.ci/java-versions.properties
+++ b/.ci/java-versions.properties
@@ -4,5 +4,5 @@
# build and test Elasticsearch for this branch. Valid Java versions
# are 'java' or 'openjdk' followed by the major release number.
-ES_BUILD_JAVA=java10
+ES_BUILD_JAVA=java11
ES_RUNTIME_JAVA=java8
diff --git a/.ci/matrix-build-javas.yml b/.ci/matrix-build-javas.yml
index bbb61b8eb6d..debb074fce1 100644
--- a/.ci/matrix-build-javas.yml
+++ b/.ci/matrix-build-javas.yml
@@ -6,5 +6,4 @@
# or 'openjdk' followed by the major release number.
ES_BUILD_JAVA:
- - java10
- java11
diff --git a/.ci/matrix-runtime-javas.yml b/.ci/matrix-runtime-javas.yml
index ca7db3b5513..8025ac237db 100644
--- a/.ci/matrix-runtime-javas.yml
+++ b/.ci/matrix-runtime-javas.yml
@@ -8,5 +8,4 @@
ES_RUNTIME_JAVA:
- java8
- java8fips
- - java10
- java11
diff --git a/buildSrc/src/main/resources/minimumCompilerVersion b/buildSrc/src/main/resources/minimumCompilerVersion
index 578c71bd558..b8162070734 100644
--- a/buildSrc/src/main/resources/minimumCompilerVersion
+++ b/buildSrc/src/main/resources/minimumCompilerVersion
@@ -1 +1 @@
-1.10
\ No newline at end of file
+1.11
\ No newline at end of file
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/WatcherClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/WatcherClient.java
index b1a3eb3f87b..c182cc27e84 100644
--- a/client/rest-high-level/src/main/java/org/elasticsearch/client/WatcherClient.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/WatcherClient.java
@@ -19,6 +19,8 @@
package org.elasticsearch.client;
import org.elasticsearch.action.ActionListener;
+import org.elasticsearch.client.watcher.AckWatchRequest;
+import org.elasticsearch.client.watcher.AckWatchResponse;
import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest;
import org.elasticsearch.protocol.xpack.watcher.DeleteWatchResponse;
import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest;
@@ -91,4 +93,32 @@ public final class WatcherClient {
restHighLevelClient.performRequestAsyncAndParseEntity(request, WatcherRequestConverters::deleteWatch, options,
DeleteWatchResponse::fromXContent, listener, singleton(404));
}
+
+ /**
+ * Acknowledges a watch.
+ * See
+ * the docs for more information.
+ * @param request the request
+ * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
+ * @return the response
+ * @throws IOException if there is a problem sending the request or parsing back the response
+ */
+ public AckWatchResponse ackWatch(AckWatchRequest request, RequestOptions options) throws IOException {
+ return restHighLevelClient.performRequestAndParseEntity(request, WatcherRequestConverters::ackWatch, options,
+ AckWatchResponse::fromXContent, emptySet());
+ }
+
+ /**
+ * Asynchronously acknowledges a watch.
+ * See
+ * the docs for more information.
+ * @param request the request
+ * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
+ * @param listener the listener to be notified upon completion of the request
+ */
+ public void ackWatchAsync(AckWatchRequest request, RequestOptions options, ActionListener listener) {
+ restHighLevelClient.performRequestAsyncAndParseEntity(request, WatcherRequestConverters::ackWatch, options,
+ AckWatchResponse::fromXContent, listener, emptySet());
+ }
+
}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/WatcherRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/WatcherRequestConverters.java
index 3b52d1c7b99..7a8fa19633e 100644
--- a/client/rest-high-level/src/main/java/org/elasticsearch/client/WatcherRequestConverters.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/WatcherRequestConverters.java
@@ -23,6 +23,7 @@ import org.apache.http.client.methods.HttpDelete;
import org.apache.http.client.methods.HttpPut;
import org.apache.http.entity.ByteArrayEntity;
import org.apache.http.entity.ContentType;
+import org.elasticsearch.client.watcher.AckWatchRequest;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest;
import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest;
@@ -59,4 +60,17 @@ public class WatcherRequestConverters {
Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
return request;
}
+
+ public static Request ackWatch(AckWatchRequest ackWatchRequest) {
+ String endpoint = new RequestConverters.EndpointBuilder()
+ .addPathPartAsIs("_xpack")
+ .addPathPartAsIs("watcher")
+ .addPathPartAsIs("watch")
+ .addPathPart(ackWatchRequest.getWatchId())
+ .addPathPartAsIs("_ack")
+ .addCommaSeparatedPathParts(ackWatchRequest.getActionIds())
+ .build();
+ Request request = new Request(HttpPut.METHOD_NAME, endpoint);
+ return request;
+ }
}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/watcher/AckWatchRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/watcher/AckWatchRequest.java
new file mode 100644
index 00000000000..1381544744d
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/watcher/AckWatchRequest.java
@@ -0,0 +1,96 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.client.watcher;
+
+import org.elasticsearch.client.Validatable;
+import org.elasticsearch.client.ValidationException;
+import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest;
+
+import java.util.Locale;
+
+/**
+ * A request to explicitly acknowledge a watch.
+ */
+public class AckWatchRequest implements Validatable {
+
+ private final String watchId;
+ private final String[] actionIds;
+
+ public AckWatchRequest(String watchId, String... actionIds) {
+ validateIds(watchId, actionIds);
+ this.watchId = watchId;
+ this.actionIds = actionIds;
+ }
+
+ private void validateIds(String watchId, String... actionIds) {
+ ValidationException exception = new ValidationException();
+ if (watchId == null) {
+ exception.addValidationError("watch id is missing");
+ } else if (PutWatchRequest.isValidId(watchId) == false) {
+ exception.addValidationError("watch id contains whitespace");
+ }
+
+ if (actionIds != null) {
+ for (String actionId : actionIds) {
+ if (actionId == null) {
+ exception.addValidationError(String.format(Locale.ROOT, "action id may not be null"));
+ } else if (PutWatchRequest.isValidId(actionId) == false) {
+ exception.addValidationError(
+ String.format(Locale.ROOT, "action id [%s] contains whitespace", actionId));
+ }
+ }
+ }
+
+ if (!exception.validationErrors().isEmpty()) {
+ throw exception;
+ }
+ }
+
+ /**
+ * @return The ID of the watch to be acked.
+ */
+ public String getWatchId() {
+ return watchId;
+ }
+
+ /**
+ * @return The IDs of the actions to be acked. If omitted,
+ * all actions for the given watch will be acknowledged.
+ */
+ public String[] getActionIds() {
+ return actionIds;
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder sb = new StringBuilder("ack [").append(watchId).append("]");
+ if (actionIds.length > 0) {
+ sb.append("[");
+ for (int i = 0; i < actionIds.length; i++) {
+ if (i > 0) {
+ sb.append(", ");
+ }
+ sb.append(actionIds[i]);
+ }
+ sb.append("]");
+ }
+ return sb.toString();
+ }
+}
diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/watcher/AckWatchResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/watcher/AckWatchResponse.java
new file mode 100644
index 00000000000..5c6750193a7
--- /dev/null
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/watcher/AckWatchResponse.java
@@ -0,0 +1,61 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.client.watcher;
+
+import org.elasticsearch.common.ParseField;
+import org.elasticsearch.common.xcontent.ConstructingObjectParser;
+import org.elasticsearch.common.xcontent.XContentParser;
+
+import java.io.IOException;
+
+/**
+ * The response from an 'ack watch' request.
+ */
+public class AckWatchResponse {
+
+ private final WatchStatus status;
+
+ public AckWatchResponse(WatchStatus status) {
+ this.status = status;
+ }
+
+ /**
+ * @return the status of the requested watch. If an action was
+ * successfully acknowledged, this will be reflected in its status.
+ */
+ public WatchStatus getStatus() {
+ return status;
+ }
+
+ private static final ParseField STATUS_FIELD = new ParseField("status");
+ private static ConstructingObjectParser PARSER =
+ new ConstructingObjectParser<>("ack_watch_response", true,
+ a -> new AckWatchResponse((WatchStatus) a[0]));
+
+ static {
+ PARSER.declareObject(ConstructingObjectParser.constructorArg(),
+ (parser, context) -> WatchStatus.parse(parser),
+ STATUS_FIELD);
+ }
+
+ public static AckWatchResponse fromXContent(XContentParser parser) throws IOException {
+ return PARSER.parse(parser, null);
+ }
+}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/WatcherIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/WatcherIT.java
index 491992735af..4964fc4be50 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/WatcherIT.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/WatcherIT.java
@@ -18,6 +18,11 @@
*/
package org.elasticsearch.client;
+import org.elasticsearch.ElasticsearchStatusException;
+import org.elasticsearch.client.watcher.AckWatchRequest;
+import org.elasticsearch.client.watcher.AckWatchResponse;
+import org.elasticsearch.client.watcher.ActionStatus;
+import org.elasticsearch.client.watcher.ActionStatus.AckStatus;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.XContentType;
@@ -25,6 +30,7 @@ import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest;
import org.elasticsearch.protocol.xpack.watcher.DeleteWatchResponse;
import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest;
import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse;
+import org.elasticsearch.rest.RestStatus;
import static org.hamcrest.Matchers.is;
@@ -72,4 +78,34 @@ public class WatcherIT extends ESRestHighLevelClientTestCase {
}
}
+ public void testAckWatch() throws Exception {
+ String watchId = randomAlphaOfLength(10);
+ String actionId = "logme";
+
+ PutWatchResponse putWatchResponse = createWatch(watchId);
+ assertThat(putWatchResponse.isCreated(), is(true));
+
+ AckWatchResponse response = highLevelClient().watcher().ackWatch(
+ new AckWatchRequest(watchId, actionId), RequestOptions.DEFAULT);
+
+ ActionStatus actionStatus = response.getStatus().actionStatus(actionId);
+ assertEquals(AckStatus.State.AWAITS_SUCCESSFUL_EXECUTION, actionStatus.ackStatus().state());
+
+ // TODO: use the high-level REST client here once it supports 'execute watch'.
+ Request executeWatchRequest = new Request("POST", "_xpack/watcher/watch/" + watchId + "/_execute");
+ executeWatchRequest.setJsonEntity("{ \"record_execution\": true }");
+ Response executeResponse = client().performRequest(executeWatchRequest);
+ assertEquals(RestStatus.OK.getStatus(), executeResponse.getStatusLine().getStatusCode());
+
+ response = highLevelClient().watcher().ackWatch(
+ new AckWatchRequest(watchId, actionId), RequestOptions.DEFAULT);
+
+ actionStatus = response.getStatus().actionStatus(actionId);
+ assertEquals(AckStatus.State.ACKED, actionStatus.ackStatus().state());
+
+ ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class,
+ () -> highLevelClient().watcher().ackWatch(
+ new AckWatchRequest("nonexistent"), RequestOptions.DEFAULT));
+ assertEquals(RestStatus.NOT_FOUND, exception.status());
+ }
}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/WatcherRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/WatcherRequestConvertersTests.java
index cf5af1dd594..d6227e93941 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/WatcherRequestConvertersTests.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/WatcherRequestConvertersTests.java
@@ -21,6 +21,7 @@ package org.elasticsearch.client;
import org.apache.http.client.methods.HttpDelete;
import org.apache.http.client.methods.HttpPut;
+import org.elasticsearch.client.watcher.AckWatchRequest;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest;
@@ -30,6 +31,7 @@ import org.elasticsearch.test.ESTestCase;
import java.io.ByteArrayOutputStream;
import java.util.HashMap;
import java.util.Map;
+import java.util.StringJoiner;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.nullValue;
@@ -75,4 +77,24 @@ public class WatcherRequestConvertersTests extends ESTestCase {
assertEquals("/_xpack/watcher/watch/" + watchId, request.getEndpoint());
assertThat(request.getEntity(), nullValue());
}
+
+ public void testAckWatch() {
+ String watchId = randomAlphaOfLength(10);
+ String[] actionIds = generateRandomStringArray(5, 10, false, true);
+
+ AckWatchRequest ackWatchRequest = new AckWatchRequest(watchId, actionIds);
+ Request request = WatcherRequestConverters.ackWatch(ackWatchRequest);
+
+ assertEquals(HttpPut.METHOD_NAME, request.getMethod());
+
+ StringJoiner expectedEndpoint = new StringJoiner("/", "/", "")
+ .add("_xpack").add("watcher").add("watch").add(watchId).add("_ack");
+ if (ackWatchRequest.getActionIds().length > 0) {
+ String actionsParam = String.join(",", ackWatchRequest.getActionIds());
+ expectedEndpoint.add(actionsParam);
+ }
+
+ assertEquals(expectedEndpoint.toString(), request.getEndpoint());
+ assertThat(request.getEntity(), nullValue());
+ }
}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/WatcherDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/WatcherDocumentationIT.java
index 707997d1f31..48052f86a00 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/WatcherDocumentationIT.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/WatcherDocumentationIT.java
@@ -21,8 +21,15 @@ package org.elasticsearch.client.documentation;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.LatchedActionListener;
import org.elasticsearch.client.ESRestHighLevelClientTestCase;
+import org.elasticsearch.client.Request;
import org.elasticsearch.client.RequestOptions;
+import org.elasticsearch.client.Response;
import org.elasticsearch.client.RestHighLevelClient;
+import org.elasticsearch.client.watcher.AckWatchRequest;
+import org.elasticsearch.client.watcher.AckWatchResponse;
+import org.elasticsearch.client.watcher.ActionStatus;
+import org.elasticsearch.client.watcher.ActionStatus.AckStatus;
+import org.elasticsearch.client.watcher.WatchStatus;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.XContentType;
@@ -30,6 +37,7 @@ import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest;
import org.elasticsearch.protocol.xpack.watcher.DeleteWatchResponse;
import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest;
import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse;
+import org.elasticsearch.rest.RestStatus;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
@@ -132,4 +140,67 @@ public class WatcherDocumentationIT extends ESRestHighLevelClientTestCase {
}
}
+ public void testAckWatch() throws Exception {
+ RestHighLevelClient client = highLevelClient();
+
+ {
+ BytesReference watch = new BytesArray("{ \n" +
+ " \"trigger\": { \"schedule\": { \"interval\": \"10h\" } },\n" +
+ " \"input\": { \"simple\": { \"foo\" : \"bar\" } },\n" +
+ " \"actions\": { \"logme\": { \"logging\": { \"text\": \"{{ctx.payload}}\" } } }\n" +
+ "}");
+ PutWatchRequest putWatchRequest = new PutWatchRequest("my_watch_id", watch, XContentType.JSON);
+ client.watcher().putWatch(putWatchRequest, RequestOptions.DEFAULT);
+
+ // TODO: use the high-level REST client here once it supports 'execute watch'.
+ Request executeWatchRequest = new Request("POST", "_xpack/watcher/watch/my_watch_id/_execute");
+ executeWatchRequest.setJsonEntity("{ \"record_execution\": true }");
+ Response executeResponse = client().performRequest(executeWatchRequest);
+ assertEquals(RestStatus.OK.getStatus(), executeResponse.getStatusLine().getStatusCode());
+ }
+
+ {
+ //tag::ack-watch-execute
+ AckWatchRequest request = new AckWatchRequest("my_watch_id", // <1>
+ "logme", "emailme"); // <2>
+ AckWatchResponse response = client.watcher().ackWatch(request, RequestOptions.DEFAULT);
+ //end::ack-watch-execute
+
+ //tag::ack-watch-response
+ WatchStatus watchStatus = response.getStatus();
+ ActionStatus actionStatus = watchStatus.actionStatus("logme"); // <1>
+ AckStatus.State ackState = actionStatus.ackStatus().state(); // <2>
+ //end::ack-watch-response
+
+ assertEquals(AckStatus.State.ACKED, ackState);
+ }
+
+ {
+ AckWatchRequest request = new AckWatchRequest("my_watch_id");
+ // tag::ack-watch-execute-listener
+ ActionListener listener = new ActionListener() {
+ @Override
+ public void onResponse(AckWatchResponse response) {
+ // <1>
+ }
+
+ @Override
+ public void onFailure(Exception e) {
+ // <2>
+ }
+ };
+ // end::ack-watch-execute-listener
+
+ // For testing, replace the empty listener by a blocking listener.
+ final CountDownLatch latch = new CountDownLatch(1);
+ listener = new LatchedActionListener<>(listener, latch);
+
+ // tag::ack-watch-execute-async
+ client.watcher().ackWatchAsync(request, RequestOptions.DEFAULT, listener); // <1>
+ // end::ack-watch-execute-async
+
+ assertTrue(latch.await(30L, TimeUnit.SECONDS));
+ }
+ }
+
}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/watcher/AckWatchResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/watcher/AckWatchResponseTests.java
new file mode 100644
index 00000000000..6d6d14e13ed
--- /dev/null
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/watcher/AckWatchResponseTests.java
@@ -0,0 +1,105 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.client.watcher;
+
+import org.elasticsearch.common.bytes.BytesReference;
+import org.elasticsearch.common.xcontent.NamedXContentRegistry;
+import org.elasticsearch.common.xcontent.XContentBuilder;
+import org.elasticsearch.common.xcontent.XContentFactory;
+import org.elasticsearch.common.xcontent.XContentParseException;
+import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.common.xcontent.XContentType;
+import org.elasticsearch.test.ESTestCase;
+import org.elasticsearch.test.XContentTestUtils;
+
+import java.io.IOException;
+import java.util.function.Predicate;
+
+/**
+ * Basic unit tests for {@link AckWatchResponse}.
+ *
+ * Note that we only sanity check watch status parsing here, as there
+ * are dedicated tests for it in {@link WatchStatusTests}.
+ */
+public class AckWatchResponseTests extends ESTestCase {
+
+ public void testBasicParsing() throws IOException {
+ XContentType contentType = randomFrom(XContentType.values());
+ XContentBuilder builder = XContentFactory.contentBuilder(contentType).startObject()
+ .startObject("status")
+ .field("version", 42)
+ .field("execution_state", ExecutionState.ACKNOWLEDGED)
+ .endObject()
+ .endObject();
+ BytesReference bytes = BytesReference.bytes(builder);
+
+ AckWatchResponse response = parse(builder.contentType(), bytes);
+ WatchStatus status = response.getStatus();
+ assertNotNull(status);
+ assertEquals(42, status.version());
+ assertEquals(ExecutionState.ACKNOWLEDGED, status.getExecutionState());
+ }
+
+ public void testParsingWithMissingStatus() throws IOException {
+ XContentType contentType = randomFrom(XContentType.values());
+ XContentBuilder builder = XContentFactory.contentBuilder(contentType).startObject().endObject();
+ BytesReference bytes = BytesReference.bytes(builder);
+
+ expectThrows(IllegalArgumentException.class, () -> parse(builder.contentType(), bytes));
+ }
+
+ public void testParsingWithNullStatus() throws IOException {
+ XContentType contentType = randomFrom(XContentType.values());
+ XContentBuilder builder = XContentFactory.contentBuilder(contentType).startObject()
+ .nullField("status")
+ .endObject();
+ BytesReference bytes = BytesReference.bytes(builder);
+
+ expectThrows(XContentParseException.class, () -> parse(builder.contentType(), bytes));
+ }
+
+ public void testParsingWithUnknownKeys() throws IOException {
+ XContentType contentType = randomFrom(XContentType.values());
+ XContentBuilder builder = XContentFactory.contentBuilder(contentType).startObject()
+ .startObject("status")
+ .field("version", 42)
+ .field("execution_state", ExecutionState.ACKNOWLEDGED)
+ .endObject()
+ .endObject();
+ BytesReference bytes = BytesReference.bytes(builder);
+
+ Predicate excludeFilter = field -> field.equals("status.actions");
+ BytesReference bytesWithRandomFields = XContentTestUtils.insertRandomFields(
+ builder.contentType(), bytes, excludeFilter, random());
+
+ AckWatchResponse response = parse(builder.contentType(), bytesWithRandomFields);
+ WatchStatus status = response.getStatus();
+ assertNotNull(status);
+ assertEquals(42, status.version());
+ assertEquals(ExecutionState.ACKNOWLEDGED, status.getExecutionState());
+ }
+
+ private AckWatchResponse parse(XContentType contentType, BytesReference bytes) throws IOException {
+ XContentParser parser = XContentFactory.xContent(contentType)
+ .createParser(NamedXContentRegistry.EMPTY, null, bytes.streamInput());
+ parser.nextToken();
+ return AckWatchResponse.fromXContent(parser);
+ }
+}
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/watcher/WatchRequestValidationTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/watcher/WatchRequestValidationTests.java
new file mode 100644
index 00000000000..d75e36f7a36
--- /dev/null
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/watcher/WatchRequestValidationTests.java
@@ -0,0 +1,98 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.client.watcher;
+
+import org.elasticsearch.action.ActionRequestValidationException;
+import org.elasticsearch.client.ValidationException;
+import org.elasticsearch.common.bytes.BytesArray;
+import org.elasticsearch.common.xcontent.XContentType;
+import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest;
+import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest;
+import org.elasticsearch.test.ESTestCase;
+
+import java.util.Optional;
+
+import static org.hamcrest.Matchers.hasItem;
+import static org.hamcrest.Matchers.is;
+import static org.hamcrest.Matchers.notNullValue;
+
+public class WatchRequestValidationTests extends ESTestCase {
+
+ public void testAcknowledgeWatchInvalidWatchId() {
+ ValidationException e = expectThrows(ValidationException.class,
+ () -> new AckWatchRequest("id with whitespaces"));
+ assertThat(e.validationErrors(), hasItem("watch id contains whitespace"));
+ }
+
+ public void testAcknowledgeWatchInvalidActionId() {
+ ValidationException e = expectThrows(ValidationException.class,
+ () -> new AckWatchRequest("_id", "action id with whitespaces"));
+ assertThat(e.validationErrors(), hasItem("action id [action id with whitespaces] contains whitespace"));
+ }
+
+ public void testAcknowledgeWatchNullActionArray() {
+ // need this to prevent some compilation errors, i.e. in 1.8.0_91
+ String[] nullArray = null;
+ Optional e = new AckWatchRequest("_id", nullArray).validate();
+ assertFalse(e.isPresent());
+ }
+
+ public void testAcknowledgeWatchNullActionId() {
+ ValidationException e = expectThrows(ValidationException.class,
+ () -> new AckWatchRequest("_id", new String[] {null}));
+ assertThat(e.validationErrors(), hasItem("action id may not be null"));
+ }
+
+ public void testDeleteWatchInvalidWatchId() {
+ ActionRequestValidationException e = new DeleteWatchRequest("id with whitespaces").validate();
+ assertThat(e, is(notNullValue()));
+ assertThat(e.validationErrors(), hasItem("watch id contains whitespace"));
+ }
+
+ public void testDeleteWatchNullId() {
+ ActionRequestValidationException e = new DeleteWatchRequest(null).validate();
+ assertThat(e, is(notNullValue()));
+ assertThat(e.validationErrors(), hasItem("watch id is missing"));
+ }
+
+ public void testPutWatchInvalidWatchId() {
+ ActionRequestValidationException e = new PutWatchRequest("id with whitespaces", BytesArray.EMPTY, XContentType.JSON).validate();
+ assertThat(e, is(notNullValue()));
+ assertThat(e.validationErrors(), hasItem("watch id contains whitespace"));
+ }
+
+ public void testPutWatchNullId() {
+ ActionRequestValidationException e = new PutWatchRequest(null, BytesArray.EMPTY, XContentType.JSON).validate();
+ assertThat(e, is(notNullValue()));
+ assertThat(e.validationErrors(), hasItem("watch id is missing"));
+ }
+
+ public void testPutWatchSourceNull() {
+ ActionRequestValidationException e = new PutWatchRequest("foo", null, XContentType.JSON).validate();
+ assertThat(e, is(notNullValue()));
+ assertThat(e.validationErrors(), hasItem("watch source is missing"));
+ }
+
+ public void testPutWatchContentNull() {
+ ActionRequestValidationException e = new PutWatchRequest("foo", BytesArray.EMPTY, null).validate();
+ assertThat(e, is(notNullValue()));
+ assertThat(e.validationErrors(), hasItem("request body is missing"));
+ }
+}
diff --git a/distribution/bwc/build.gradle b/distribution/bwc/build.gradle
index 52334b72a76..1eed8b41c0e 100644
--- a/distribution/bwc/build.gradle
+++ b/distribution/bwc/build.gradle
@@ -157,8 +157,10 @@ subprojects {
environment('JAVA_HOME', getJavaHome(it, 8))
} else if ("6.2".equals(bwcBranch)) {
environment('JAVA_HOME', getJavaHome(it, 9))
- } else if (["6.3", "6.4", "6.x"].contains(bwcBranch)) {
+ } else if (["6.3", "6.4"].contains(bwcBranch)) {
environment('JAVA_HOME', getJavaHome(it, 10))
+ } else if (["6.x"].contains(bwcBranch)) {
+ environment('JAVA_HOME', getJavaHome(it, 11))
} else {
environment('JAVA_HOME', project.compilerJavaHome)
}
diff --git a/docs/java-rest/high-level/supported-apis.asciidoc b/docs/java-rest/high-level/supported-apis.asciidoc
index cfac4a3c293..bb326cbb9c6 100644
--- a/docs/java-rest/high-level/supported-apis.asciidoc
+++ b/docs/java-rest/high-level/supported-apis.asciidoc
@@ -310,9 +310,11 @@ The Java High Level REST Client supports the following Watcher APIs:
* <>
* <>
+* <>
include::watcher/put-watch.asciidoc[]
include::watcher/delete-watch.asciidoc[]
+include::watcher/ack-watch.asciidoc[]
== Graph APIs
diff --git a/docs/java-rest/high-level/watcher/ack-watch.asciidoc b/docs/java-rest/high-level/watcher/ack-watch.asciidoc
new file mode 100644
index 00000000000..13b62ba3be8
--- /dev/null
+++ b/docs/java-rest/high-level/watcher/ack-watch.asciidoc
@@ -0,0 +1,57 @@
+[[java-rest-high-watcher-ack-watch]]
+=== Ack Watch API
+
+[[java-rest-high-watcher-ack-watch-execution]]
+==== Execution
+
+{xpack-ref}/actions.html#actions-ack-throttle[Acknowledging a watch] enables you
+to manually throttle execution of a watch's actions. A watch can be acknowledged
+through the following request:
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/WatcherDocumentationIT.java[ack-watch-execute]
+--------------------------------------------------
+<1> The ID of the watch to ack.
+<2> An optional list of IDs representing the watch actions that should be acked.
+If no action IDs are provided, then all of the watch's actions will be acked.
+
+[[java-rest-high-watcher-ack-watch-response]]
+==== Response
+
+The returned `AckWatchResponse` contains the new status of the requested watch:
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/WatcherDocumentationIT.java[ack-watch-response]
+--------------------------------------------------
+<1> The status of a specific action that was acked.
+<2> The acknowledgement state of the action. If the action was successfully
+acked, this state will be equal to `AckStatus.State.ACKED`.
+
+[[java-rest-high-watcher-ack-watch-async]]
+==== Asynchronous Execution
+
+This request can be executed asynchronously:
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/WatcherDocumentationIT.java[ack-watch-execute-async]
+--------------------------------------------------
+<1> The `AckWatchRequest` to execute and the `ActionListener` to use when
+the execution completes.
+
+The asynchronous method does not block and returns immediately. Once the request
+completes, the `ActionListener` is called back using the `onResponse` method
+if the execution successfully completed or using the `onFailure` method if
+it failed.
+
+A listener for `AckWatchResponse` can be constructed as follows:
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/WatcherDocumentationIT.java[ack-watch-execute-listener]
+--------------------------------------------------
+<1> Called when the execution is successfully completed. The response is
+provided as an argument.
+<2> Called in case of failure. The raised exception is provided as an argument.
diff --git a/docs/plugins/discovery-gce.asciidoc b/docs/plugins/discovery-gce.asciidoc
index 33369eaba3c..633b136bc71 100644
--- a/docs/plugins/discovery-gce.asciidoc
+++ b/docs/plugins/discovery-gce.asciidoc
@@ -26,12 +26,17 @@ The following gce settings (prefixed with `cloud.gce`) are supported:
`project_id`::
- Your Google project id (mandatory).
+ Your Google project id.
+ By default the project id will be derived from the instance metadata.
+
+ Note: Deriving the project id from system properties or environment variables
+ (`GOOGLE_CLOUD_PROJECT` or `GCLOUD_PROJECT`) is not supported.
`zone`::
- helps to retrieve instances running in a given zone (mandatory). It should be one of the
- https://developers.google.com/compute/docs/zones#available[GCE supported zones].
+ helps to retrieve instances running in a given zone.
+ It should be one of the https://developers.google.com/compute/docs/zones#available[GCE supported zones].
+ By default the zone will be derived from the instance metadata.
See also <>.
`retry`::
diff --git a/docs/reference/migration/migrate_7_0/aggregations.asciidoc b/docs/reference/migration/migrate_7_0/aggregations.asciidoc
index b29f741dd85..2b8c2ed9cb7 100644
--- a/docs/reference/migration/migrate_7_0/aggregations.asciidoc
+++ b/docs/reference/migration/migrate_7_0/aggregations.asciidoc
@@ -26,3 +26,9 @@ has been removed. `missing_bucket` should be used instead.
The object used to share aggregation state between the scripts in a Scripted Metric
Aggregation is now a variable called `state` available in the script context, rather than
being provided via the `params` object as `params._agg`.
+
+[float]
+==== Make metric aggregation script parameters `reduce_script` and `combine_script` mandatory
+
+The metric aggregation has been changed to require these two script parameters to ensure users are
+explicitly defining how their data is processed.
diff --git a/docs/reference/migration/migrate_7_0/scripting.asciidoc b/docs/reference/migration/migrate_7_0/scripting.asciidoc
index de312c1c723..01d8805c896 100644
--- a/docs/reference/migration/migrate_7_0/scripting.asciidoc
+++ b/docs/reference/migration/migrate_7_0/scripting.asciidoc
@@ -14,6 +14,15 @@ now been removed. Instead, use `.value` on `date` fields, or explicitly
parse `long` fields into a date object using
`Instance.ofEpochMillis(doc["myfield"].value)`.
+[float]
+==== Accessing missing document values will throw an error
+`doc['field'].value` will throw an exception if
+the document is missing a value for the field `field`.
+
+To check if a document is missing a value, you can use
+`doc['field'].size() == 0`.
+
+
[float]
==== Script errors will return as `400` error codes
diff --git a/plugins/discovery-gce/build.gradle b/plugins/discovery-gce/build.gradle
index fa8005dfa47..bccb496859a 100644
--- a/plugins/discovery-gce/build.gradle
+++ b/plugins/discovery-gce/build.gradle
@@ -26,6 +26,11 @@ dependencyLicenses {
mapping from: /google-.*/, to: 'google'
}
+check {
+ // also execute the QA tests when testing the plugin
+ dependsOn 'qa:gce:check'
+}
+
test {
// this is needed for insecure plugins, remove if possible!
systemProperty 'tests.artifact', project.name
diff --git a/plugins/discovery-gce/qa/build.gradle b/plugins/discovery-gce/qa/build.gradle
new file mode 100644
index 00000000000..f6a418ae4bd
--- /dev/null
+++ b/plugins/discovery-gce/qa/build.gradle
@@ -0,0 +1 @@
+group = "${group}.plugins.discovery-gce.qa"
diff --git a/plugins/discovery-gce/qa/gce/build.gradle b/plugins/discovery-gce/qa/gce/build.gradle
new file mode 100644
index 00000000000..9b496207c56
--- /dev/null
+++ b/plugins/discovery-gce/qa/gce/build.gradle
@@ -0,0 +1,80 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+import org.elasticsearch.gradle.MavenFilteringHack
+import org.elasticsearch.gradle.test.AntFixture
+
+apply plugin: 'elasticsearch.standalone-rest-test'
+apply plugin: 'elasticsearch.rest-test'
+
+final int gceNumberOfNodes = 3
+File gceDiscoveryFile = new File(project.buildDir, 'generated-resources/nodes.uri')
+
+dependencies {
+ testCompile project(path: ':plugins:discovery-gce', configuration: 'runtime')
+}
+
+/** A task to start the GCEFixture which emulates a GCE service **/
+task gceFixture(type: AntFixture) {
+ dependsOn compileTestJava
+ env 'CLASSPATH', "${ -> project.sourceSets.test.runtimeClasspath.asPath }"
+ executable = new File(project.runtimeJavaHome, 'bin/java')
+ args 'org.elasticsearch.cloud.gce.GCEFixture', baseDir, gceDiscoveryFile.getAbsolutePath()
+}
+
+Map expansions = [
+ 'expected_nodes': gceNumberOfNodes
+]
+
+processTestResources {
+ inputs.properties(expansions)
+ MavenFilteringHack.filter(it, expansions)
+}
+
+integTestCluster {
+ dependsOn gceFixture
+ numNodes = gceNumberOfNodes
+ plugin ':plugins:discovery-gce'
+ setting 'discovery.zen.hosts_provider', 'gce'
+
+ // use gce fixture for Auth calls instead of http://metadata.google.internal
+ integTestCluster.environment 'GCE_METADATA_HOST', "http://${-> gceFixture.addressAndPort}"
+
+ // allows to configure hidden settings (`cloud.gce.host` and `cloud.gce.root_url`)
+ systemProperty 'es.allow_reroute_gce_settings', 'true'
+
+ // use gce fixture for metadata server calls instead of http://metadata.google.internal
+ setting 'cloud.gce.host', "http://${-> gceFixture.addressAndPort}"
+ // use gce fixture for API calls instead of https://www.googleapis.com
+ setting 'cloud.gce.root_url', "http://${-> gceFixture.addressAndPort}"
+
+ unicastTransportUri = { seedNode, node, ant -> return null }
+
+ waitCondition = { node, ant ->
+ gceDiscoveryFile.parentFile.mkdirs()
+ gceDiscoveryFile.setText(integTest.nodes.collect { n -> "${n.transportUri()}" }.join('\n'), 'UTF-8')
+
+ File tmpFile = new File(node.cwd, 'wait.success')
+ ant.get(src: "http://${node.httpUri()}/",
+ dest: tmpFile.toString(),
+ ignoreerrors: true,
+ retries: 10)
+ return tmpFile.exists()
+ }
+}
diff --git a/plugins/discovery-gce/qa/gce/src/test/java/org/elasticsearch/cloud/gce/GCEDiscoveryClientYamlTestSuiteIT.java b/plugins/discovery-gce/qa/gce/src/test/java/org/elasticsearch/cloud/gce/GCEDiscoveryClientYamlTestSuiteIT.java
new file mode 100644
index 00000000000..6c1ca9c72d3
--- /dev/null
+++ b/plugins/discovery-gce/qa/gce/src/test/java/org/elasticsearch/cloud/gce/GCEDiscoveryClientYamlTestSuiteIT.java
@@ -0,0 +1,37 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.cloud.gce;
+
+import com.carrotsearch.randomizedtesting.annotations.Name;
+import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
+import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
+import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
+
+public class GCEDiscoveryClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase {
+
+ public GCEDiscoveryClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
+ super(testCandidate);
+ }
+
+ @ParametersFactory
+ public static Iterable parameters() throws Exception {
+ return ESClientYamlSuiteTestCase.createParameters();
+ }
+}
diff --git a/plugins/discovery-gce/qa/gce/src/test/java/org/elasticsearch/cloud/gce/GCEFixture.java b/plugins/discovery-gce/qa/gce/src/test/java/org/elasticsearch/cloud/gce/GCEFixture.java
new file mode 100644
index 00000000000..f52e613ee29
--- /dev/null
+++ b/plugins/discovery-gce/qa/gce/src/test/java/org/elasticsearch/cloud/gce/GCEFixture.java
@@ -0,0 +1,214 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.cloud.gce;
+
+import org.apache.http.client.methods.HttpGet;
+import org.elasticsearch.common.Strings;
+import org.elasticsearch.common.SuppressForbidden;
+import org.elasticsearch.common.collect.MapBuilder;
+import org.elasticsearch.common.path.PathTrie;
+import org.elasticsearch.rest.RestStatus;
+import org.elasticsearch.rest.RestUtils;
+import org.elasticsearch.test.fixture.AbstractHttpFixture;
+
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+import java.util.concurrent.TimeUnit;
+import java.util.function.Consumer;
+import java.util.function.Function;
+
+import static java.nio.charset.StandardCharsets.UTF_8;
+import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
+
+/**
+ * {@link GCEFixture} is a fixture that emulates a GCE service.
+ */
+public class GCEFixture extends AbstractHttpFixture {
+
+ public static final String PROJECT_ID = "discovery-gce-test";
+ public static final String ZONE = "test-zone";
+ public static final String TOKEN = "1/fFAGRNJru1FTz70BzhT3Zg";
+ public static final String TOKEN_TYPE = "Bearer";
+
+ private final PathTrie handlers;
+
+ private final Path nodes;
+
+ private GCEFixture(final String workingDir, final String nodesUriPath) {
+ super(workingDir);
+ this.nodes = toPath(Objects.requireNonNull(nodesUriPath));
+ this.handlers = defaultHandlers();
+ }
+
+ public static void main(String[] args) throws Exception {
+ if (args == null || args.length != 2) {
+ throw new IllegalArgumentException("GCEFixture ");
+ }
+
+ final GCEFixture fixture = new GCEFixture(args[0], args[1]);
+ fixture.listen();
+ }
+
+ private static String nonAuthPath(Request request) {
+ return nonAuthPath(request.getMethod(), request.getPath());
+ }
+
+ private static String nonAuthPath(String method, String path) {
+ return "NONAUTH " + method + " " + path;
+ }
+
+ private static String authPath(Request request) {
+ return authPath(request.getMethod(), request.getPath());
+ }
+
+ private static String authPath(String method, String path) {
+ return "AUTH " + method + " " + path;
+ }
+
+ /** Builds the default request handlers **/
+ private PathTrie defaultHandlers() {
+ final PathTrie handlers = new PathTrie<>(RestUtils.REST_DECODER);
+
+ final Consumer> commonHeaderConsumer = headers -> headers.put("Metadata-Flavor", "Google");
+
+ final Function simpleValue = value -> {
+ final Map headers = new HashMap<>(TEXT_PLAIN_CONTENT_TYPE);
+ commonHeaderConsumer.accept(headers);
+
+ final byte[] responseAsBytes = value.getBytes(StandardCharsets.UTF_8);
+ return new Response(RestStatus.OK.getStatus(), headers, responseAsBytes);
+ };
+
+ final Function jsonValue = value -> {
+ final Map headers = new HashMap<>(JSON_CONTENT_TYPE);
+ commonHeaderConsumer.accept(headers);
+
+ final byte[] responseAsBytes = value.getBytes(StandardCharsets.UTF_8);
+ return new Response(RestStatus.OK.getStatus(), headers, responseAsBytes);
+ };
+
+ // https://cloud.google.com/compute/docs/storing-retrieving-metadata
+ handlers.insert(nonAuthPath(HttpGet.METHOD_NAME, "/computeMetadata/v1/project/project-id"),
+ request -> simpleValue.apply(PROJECT_ID));
+ handlers.insert(nonAuthPath(HttpGet.METHOD_NAME, "/computeMetadata/v1/project/attributes/google-compute-default-zone"),
+ request -> simpleValue.apply(ZONE));
+ // https://cloud.google.com/compute/docs/access/create-enable-service-accounts-for-instances
+ handlers.insert(nonAuthPath(HttpGet.METHOD_NAME, "/computeMetadata/v1/instance/service-accounts/default/token"),
+ request -> jsonValue.apply(Strings.toString(jsonBuilder()
+ .startObject()
+ .field("access_token", TOKEN)
+ .field("expires_in", TimeUnit.HOURS.toSeconds(1))
+ .field("token_type", TOKEN_TYPE)
+ .endObject())));
+
+ // https://cloud.google.com/compute/docs/reference/rest/v1/instances
+ handlers.insert(authPath(HttpGet.METHOD_NAME, "/compute/v1/projects/{project}/zones/{zone}/instances"),
+ request -> {
+ final List items = new ArrayList();
+ int count = 0;
+ for (String address : Files.readAllLines(nodes)) {
+ count++;
+ items.add(MapBuilder.newMapBuilder()
+ .put("id", Long.toString(9309873766405L + count))
+ .put("description", "ES node" + count)
+ .put("name", "test" + count)
+ .put("kind", "compute#instance")
+ .put("machineType", "n1-standard-1")
+ .put("networkInterfaces",
+ Collections.singletonList(MapBuilder.newMapBuilder()
+ .put("accessConfigs", Collections.emptyList())
+ .put("name", "nic0")
+ .put("network", "default")
+ .put("networkIP", address)
+ .immutableMap()))
+ .put("status", "RUNNING")
+ .put("zone", ZONE)
+ .immutableMap());
+ }
+
+ final String json = Strings.toString(jsonBuilder()
+ .startObject()
+ .field("id", "test-instances")
+ .field("items", items)
+ .endObject());
+
+ final byte[] responseAsBytes = json.getBytes(StandardCharsets.UTF_8);
+ final Map headers = new HashMap<>(JSON_CONTENT_TYPE);
+ commonHeaderConsumer.accept(headers);
+ return new Response(RestStatus.OK.getStatus(), headers, responseAsBytes);
+ });
+ return handlers;
+ }
+
+ @Override
+ protected Response handle(final Request request) throws IOException {
+ final String nonAuthorizedPath = nonAuthPath(request);
+ final RequestHandler nonAuthorizedHandler = handlers.retrieve(nonAuthorizedPath, request.getParameters());
+ if (nonAuthorizedHandler != null) {
+ return nonAuthorizedHandler.handle(request);
+ }
+
+ final String authorizedPath = authPath(request);
+ final RequestHandler authorizedHandler = handlers.retrieve(authorizedPath, request.getParameters());
+ if (authorizedHandler != null) {
+ final String authorization = request.getHeader("Authorization");
+ if ((TOKEN_TYPE + " " + TOKEN).equals(authorization) == false) {
+ return newError(RestStatus.UNAUTHORIZED, "Authorization", "Login Required");
+ }
+ return authorizedHandler.handle(request);
+ }
+
+ return null;
+ }
+
+ private static Response newError(final RestStatus status, final String code, final String message) throws IOException {
+ final String response = Strings.toString(jsonBuilder()
+ .startObject()
+ .field("error", MapBuilder.newMapBuilder()
+ .put("errors", Collections.singletonList(
+ MapBuilder.newMapBuilder()
+ .put("domain", "global")
+ .put("reason", "required")
+ .put("message", message)
+ .put("locationType", "header")
+ .put("location", code)
+ .immutableMap()
+ ))
+ .put("code", status.getStatus())
+ .put("message", message)
+ .immutableMap())
+ .endObject());
+
+ return new Response(status.getStatus(), JSON_CONTENT_TYPE, response.getBytes(UTF_8));
+ }
+
+ @SuppressForbidden(reason = "Paths#get is fine - we don't have environment here")
+ private static Path toPath(final String dir) {
+ return Paths.get(dir);
+ }
+}
diff --git a/plugins/discovery-gce/qa/gce/src/test/resources/rest-api-spec/test/discovery_gce/10_basic.yml b/plugins/discovery-gce/qa/gce/src/test/resources/rest-api-spec/test/discovery_gce/10_basic.yml
new file mode 100644
index 00000000000..562d69a7a38
--- /dev/null
+++ b/plugins/discovery-gce/qa/gce/src/test/resources/rest-api-spec/test/discovery_gce/10_basic.yml
@@ -0,0 +1,15 @@
+# Integration tests for discovery-gce
+setup:
+ - do:
+ cluster.health:
+ wait_for_status: green
+ wait_for_nodes: ${expected_nodes}
+
+---
+"All nodes are correctly discovered":
+
+ - do:
+ nodes.info:
+ metric: [ transport ]
+
+ - match: { _nodes.total: ${expected_nodes} }
diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceInstancesService.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceInstancesService.java
index d05f15344bc..fc78a8a091b 100644
--- a/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceInstancesService.java
+++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceInstancesService.java
@@ -75,4 +75,8 @@ public interface GceInstancesService extends Closeable {
* @return a collection of running instances within the same GCE project
*/
Collection instances();
+
+ String projectId();
+
+ List zones();
}
diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceInstancesServiceImpl.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceInstancesServiceImpl.java
index ed0bf07d75c..aab6e0c74ec 100644
--- a/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceInstancesServiceImpl.java
+++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/cloud/gce/GceInstancesServiceImpl.java
@@ -29,6 +29,11 @@ import java.util.function.Function;
import com.google.api.client.googleapis.compute.ComputeCredential;
import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport;
+import com.google.api.client.http.GenericUrl;
+import com.google.api.client.http.HttpHeaders;
+import com.google.api.client.http.HttpRequest;
+import com.google.api.client.http.HttpRequestFactory;
+import com.google.api.client.http.HttpResponse;
import com.google.api.client.http.HttpTransport;
import com.google.api.client.http.javanet.NetHttpTransport;
import com.google.api.client.json.JsonFactory;
@@ -103,9 +108,58 @@ public class GceInstancesServiceImpl extends AbstractComponent implements GceIns
public GceInstancesServiceImpl(Settings settings) {
super(settings);
- this.project = PROJECT_SETTING.get(settings);
- this.zones = ZONE_SETTING.get(settings);
this.validateCerts = GCE_VALIDATE_CERTIFICATES.get(settings);
+ this.project = resolveProject();
+ this.zones = resolveZones();
+ }
+
+ private String resolveProject() {
+ if (PROJECT_SETTING.exists(settings)) {
+ return PROJECT_SETTING.get(settings);
+ }
+
+ try {
+ // this code is based on a private GCE method: {@link com.google.cloud.ServiceOptions#getAppEngineProjectIdFromMetadataServer()}
+ return getAppEngineValueFromMetadataServer("/computeMetadata/v1/project/project-id");
+ } catch (Exception e) {
+ logger.warn("unable to resolve project from metadata server for GCE discovery service", e);
+ }
+ return null;
+ }
+
+ private List resolveZones() {
+ if (ZONE_SETTING.exists(settings)) {
+ return ZONE_SETTING.get(settings);
+ }
+
+ try {
+ final String defaultZone =
+ getAppEngineValueFromMetadataServer("/computeMetadata/v1/project/attributes/google-compute-default-zone");
+ return Collections.singletonList(defaultZone);
+ } catch (Exception e) {
+ logger.warn("unable to resolve default zone from metadata server for GCE discovery service", e);
+ }
+ return null;
+ }
+
+ String getAppEngineValueFromMetadataServer(String serviceURL) throws GeneralSecurityException, IOException {
+ String metadata = GceMetadataService.GCE_HOST.get(settings);
+ GenericUrl url = Access.doPrivileged(() -> new GenericUrl(metadata + serviceURL));
+
+ HttpTransport httpTransport = getGceHttpTransport();
+ HttpRequestFactory requestFactory = httpTransport.createRequestFactory();
+ HttpRequest request = requestFactory.buildGetRequest(url)
+ .setConnectTimeout(500)
+ .setReadTimeout(500)
+ .setHeaders(new HttpHeaders().set("Metadata-Flavor", "Google"));
+ HttpResponse response = Access.doPrivilegedIOException(() -> request.execute());
+ return headerContainsMetadataFlavor(response) ? response.parseAsString() : null;
+ }
+
+ private static boolean headerContainsMetadataFlavor(HttpResponse response) {
+ // com.google.cloud.ServiceOptions#headerContainsMetadataFlavor(HttpResponse)}
+ String metadataFlavorValue = response.getHeaders().getFirstHeaderStringValue("Metadata-Flavor");
+ return "Google".equals(metadataFlavorValue);
}
protected synchronized HttpTransport getGceHttpTransport() throws GeneralSecurityException, IOException {
@@ -180,6 +234,16 @@ public class GceInstancesServiceImpl extends AbstractComponent implements GceIns
return this.client;
}
+ @Override
+ public String projectId() {
+ return project;
+ }
+
+ @Override
+ public List zones() {
+ return zones;
+ }
+
@Override
public void close() throws IOException {
if (gceHttpTransport != null) {
diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java
index 778c38697c5..36f8aa36b34 100644
--- a/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java
+++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java
@@ -79,8 +79,8 @@ public class GceUnicastHostsProvider extends AbstractComponent implements Unicas
this.networkService = networkService;
this.refreshInterval = GceInstancesService.REFRESH_SETTING.get(settings);
- this.project = GceInstancesService.PROJECT_SETTING.get(settings);
- this.zones = GceInstancesService.ZONE_SETTING.get(settings);
+ this.project = gceInstancesService.projectId();
+ this.zones = gceInstancesService.zones();
this.tags = TAGS_SETTING.get(settings);
if (logger.isDebugEnabled()) {
diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/plugin/discovery/gce/GceDiscoveryPlugin.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/plugin/discovery/gce/GceDiscoveryPlugin.java
index 183dfeda884..418e3fffe24 100644
--- a/plugins/discovery-gce/src/main/java/org/elasticsearch/plugin/discovery/gce/GceDiscoveryPlugin.java
+++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/plugin/discovery/gce/GceDiscoveryPlugin.java
@@ -22,6 +22,7 @@ package org.elasticsearch.plugin.discovery.gce;
import com.google.api.client.http.HttpHeaders;
import com.google.api.client.util.ClassInfo;
import org.apache.logging.log4j.Logger;
+import org.elasticsearch.common.Booleans;
import org.elasticsearch.core.internal.io.IOUtils;
import org.apache.lucene.util.SetOnce;
import org.elasticsearch.cloud.gce.GceInstancesService;
@@ -41,6 +42,7 @@ import org.elasticsearch.transport.TransportService;
import java.io.Closeable;
import java.io.IOException;
+import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
@@ -49,8 +51,12 @@ import java.util.function.Supplier;
public class GceDiscoveryPlugin extends Plugin implements DiscoveryPlugin, Closeable {
+ /** Determines whether settings those reroutes GCE call should be allowed (for testing purposes only). */
+ private static final boolean ALLOW_REROUTE_GCE_SETTINGS =
+ Booleans.parseBoolean(System.getProperty("es.allow_reroute_gce_settings", "false"));
+
public static final String GCE = "gce";
- private final Settings settings;
+ protected final Settings settings;
private static final Logger logger = Loggers.getLogger(GceDiscoveryPlugin.class);
// stashed when created in order to properly close
private final SetOnce gceInstancesService = new SetOnce<>();
@@ -94,14 +100,22 @@ public class GceDiscoveryPlugin extends Plugin implements DiscoveryPlugin, Close
@Override
public List> getSettings() {
- return Arrays.asList(
- // Register GCE settings
- GceInstancesService.PROJECT_SETTING,
- GceInstancesService.ZONE_SETTING,
- GceUnicastHostsProvider.TAGS_SETTING,
- GceInstancesService.REFRESH_SETTING,
- GceInstancesService.RETRY_SETTING,
- GceInstancesService.MAX_WAIT_SETTING);
+ List> settings = new ArrayList<>(
+ Arrays.asList(
+ // Register GCE settings
+ GceInstancesService.PROJECT_SETTING,
+ GceInstancesService.ZONE_SETTING,
+ GceUnicastHostsProvider.TAGS_SETTING,
+ GceInstancesService.REFRESH_SETTING,
+ GceInstancesService.RETRY_SETTING,
+ GceInstancesService.MAX_WAIT_SETTING)
+ );
+
+ if (ALLOW_REROUTE_GCE_SETTINGS) {
+ settings.add(GceMetadataService.GCE_HOST);
+ settings.add(GceInstancesServiceImpl.GCE_ROOT_URL);
+ }
+ return Collections.unmodifiableList(settings);
}
diff --git a/plugins/discovery-gce/src/test/java/org/elasticsearch/cloud/gce/GceInstancesServiceImplTests.java b/plugins/discovery-gce/src/test/java/org/elasticsearch/cloud/gce/GceInstancesServiceImplTests.java
new file mode 100644
index 00000000000..efb9b6c03d8
--- /dev/null
+++ b/plugins/discovery-gce/src/test/java/org/elasticsearch/cloud/gce/GceInstancesServiceImplTests.java
@@ -0,0 +1,72 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.elasticsearch.cloud.gce;
+
+import com.google.api.client.http.HttpTransport;
+import com.google.api.client.http.LowLevelHttpRequest;
+import com.google.api.client.http.LowLevelHttpResponse;
+import com.google.api.client.json.Json;
+import com.google.api.client.testing.http.MockHttpTransport;
+import com.google.api.client.testing.http.MockLowLevelHttpRequest;
+import com.google.api.client.testing.http.MockLowLevelHttpResponse;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.test.ESTestCase;
+
+import java.util.concurrent.atomic.AtomicBoolean;
+
+import static org.hamcrest.CoreMatchers.nullValue;
+import static org.hamcrest.core.Is.is;
+
+public class GceInstancesServiceImplTests extends ESTestCase {
+
+ public void testHeaderContainsMetadataFlavor() throws Exception {
+ final AtomicBoolean addMetdataFlavor = new AtomicBoolean();
+ final MockHttpTransport transport = new MockHttpTransport() {
+ @Override
+ public LowLevelHttpRequest buildRequest(String method, final String url) {
+ return new MockLowLevelHttpRequest() {
+ @Override
+ public LowLevelHttpResponse execute() {
+ MockLowLevelHttpResponse response = new MockLowLevelHttpResponse();
+ response.setStatusCode(200);
+ response.setContentType(Json.MEDIA_TYPE);
+ response.setContent("value");
+ if (addMetdataFlavor.get()) {
+ response.addHeader("Metadata-Flavor", "Google");
+ }
+ return response;
+ }
+ };
+ }
+ };
+
+ final GceInstancesServiceImpl service = new GceInstancesServiceImpl(Settings.EMPTY) {
+ @Override
+ protected synchronized HttpTransport getGceHttpTransport() {
+ return transport;
+ }
+ };
+
+ final String serviceURL = "/computeMetadata/v1/project/project-id";
+ assertThat(service.getAppEngineValueFromMetadataServer(serviceURL), is(nullValue()));
+
+ addMetdataFlavor.set(true);
+ assertThat(service.getAppEngineValueFromMetadataServer(serviceURL), is("value"));
+ }
+}
diff --git a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoverTests.java b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoverTests.java
index 3ceacdedcf7..cc5c400ce3b 100644
--- a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoverTests.java
+++ b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoverTests.java
@@ -170,6 +170,16 @@ public class GceDiscoverTests extends ESIntegTestCase {
});
}
+ @Override
+ public String projectId() {
+ return PROJECT_SETTING.get(settings);
+ }
+
+ @Override
+ public List zones() {
+ return ZONE_SETTING.get(settings);
+ }
+
@Override
public void close() throws IOException {
}
diff --git a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java
index 816152186e7..629c2b3f524 100644
--- a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java
+++ b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java
@@ -21,6 +21,7 @@ package org.elasticsearch.discovery.gce;
import org.elasticsearch.Version;
import org.elasticsearch.cloud.gce.GceInstancesServiceImpl;
+import org.elasticsearch.cloud.gce.GceMetadataService;
import org.elasticsearch.common.network.NetworkService;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.TransportAddress;
@@ -40,6 +41,7 @@ import java.util.Locale;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.hasSize;
+import static org.hamcrest.Matchers.not;
/**
* This test class uses a GCE HTTP Mock system which allows to simulate JSON Responses.
@@ -211,7 +213,10 @@ public class GceDiscoveryTests extends ESTestCase {
}
public void testIllegalSettingsMissingAllRequired() {
- Settings nodeSettings = Settings.EMPTY;
+ Settings nodeSettings = Settings.builder()
+ // to prevent being resolved using default GCE host
+ .put(GceMetadataService.GCE_HOST.getKey(), "http://internal")
+ .build();
mock = new GceInstancesServiceMock(nodeSettings);
try {
buildDynamicNodes(mock, nodeSettings);
@@ -223,6 +228,8 @@ public class GceDiscoveryTests extends ESTestCase {
public void testIllegalSettingsMissingProject() {
Settings nodeSettings = Settings.builder()
+ // to prevent being resolved using default GCE host
+ .put(GceMetadataService.GCE_HOST.getKey(), "http://internal")
.putList(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "us-central1-a", "us-central1-b")
.build();
mock = new GceInstancesServiceMock(nodeSettings);
@@ -236,6 +243,8 @@ public class GceDiscoveryTests extends ESTestCase {
public void testIllegalSettingsMissingZone() {
Settings nodeSettings = Settings.builder()
+ // to prevent being resolved using default GCE host
+ .put(GceMetadataService.GCE_HOST.getKey(), "http://internal")
.put(GceInstancesServiceImpl.PROJECT_SETTING.getKey(), projectName)
.build();
mock = new GceInstancesServiceMock(nodeSettings);
@@ -261,4 +270,13 @@ public class GceDiscoveryTests extends ESTestCase {
List dynamicHosts = buildDynamicNodes(mock, nodeSettings);
assertThat(dynamicHosts, hasSize(1));
}
+
+ public void testMetadataServerValues() {
+ Settings nodeSettings = Settings.EMPTY;
+ mock = new GceInstancesServiceMock(nodeSettings);
+ assertThat(mock.projectId(), not(projectName));
+
+ List dynamicHosts = buildDynamicNodes(mock, nodeSettings);
+ assertThat(dynamicHosts, hasSize(1));
+ }
}
diff --git a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceInstancesServiceMock.java b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceInstancesServiceMock.java
index d2612ca75ab..0653391671b 100644
--- a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceInstancesServiceMock.java
+++ b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceInstancesServiceMock.java
@@ -32,11 +32,13 @@ public class GceInstancesServiceMock extends GceInstancesServiceImpl {
public GceInstancesServiceMock(Settings settings) {
super(settings);
- this.mockHttpTransport = GceMockUtils.configureMock();
}
@Override
protected HttpTransport getGceHttpTransport() throws GeneralSecurityException, IOException {
+ if (this.mockHttpTransport == null) {
+ this.mockHttpTransport = GceMockUtils.configureMock();
+ }
return this.mockHttpTransport;
}
}
diff --git a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceMockUtils.java b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceMockUtils.java
index d0e33fcb67c..3a34e3629db 100644
--- a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceMockUtils.java
+++ b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceMockUtils.java
@@ -39,7 +39,7 @@ import java.net.URL;
public class GceMockUtils {
protected static final Logger logger = Loggers.getLogger(GceMockUtils.class);
- public static final String GCE_METADATA_URL = "http://metadata.google.internal/computeMetadata/v1/instance";
+ public static final String GCE_METADATA_URL = "http://metadata.google.internal/computeMetadata/v1/";
protected static HttpTransport configureMock() {
return new MockHttpTransport() {
@@ -54,6 +54,7 @@ public class GceMockUtils {
if (url.startsWith(GCE_METADATA_URL)) {
logger.info("--> Simulate GCE Auth/Metadata response for [{}]", url);
response.setContent(readGoogleInternalJsonResponse(url));
+ response.addHeader("Metadata-Flavor", "Google");
} else {
logger.info("--> Simulate GCE API response for [{}]", url);
response.setContent(readGoogleApiJsonResponse(url));
diff --git a/plugins/discovery-gce/src/test/resources/org/elasticsearch/discovery/gce/compute/v1/projects/metadataserver/zones/europe-west1-b/instances b/plugins/discovery-gce/src/test/resources/org/elasticsearch/discovery/gce/compute/v1/projects/metadataserver/zones/europe-west1-b/instances
new file mode 100644
index 00000000000..049e0e1e1b1
--- /dev/null
+++ b/plugins/discovery-gce/src/test/resources/org/elasticsearch/discovery/gce/compute/v1/projects/metadataserver/zones/europe-west1-b/instances
@@ -0,0 +1,36 @@
+{
+ "id": "dummy",
+ "items":[
+ {
+ "description": "ES Node 1",
+ "id": "9309873766428965105",
+ "kind": "compute#instance",
+ "machineType": "n1-standard-1",
+ "name": "test1",
+ "networkInterfaces": [
+ {
+ "accessConfigs": [
+ {
+ "kind": "compute#accessConfig",
+ "name": "External NAT",
+ "natIP": "104.155.13.147",
+ "type": "ONE_TO_ONE_NAT"
+ }
+ ],
+ "name": "nic0",
+ "network": "default",
+ "networkIP": "10.240.79.59"
+ }
+ ],
+ "status": "RUNNING",
+ "tags": {
+ "fingerprint": "xA6QJb-rGtg=",
+ "items": [
+ "elasticsearch",
+ "dev"
+ ]
+ },
+ "zone": "europe-west1-b"
+ }
+ ]
+}
diff --git a/plugins/discovery-gce/src/test/resources/org/elasticsearch/discovery/gce/compute/v1/projects/metadataserver/zones/us-central1-a/instances b/plugins/discovery-gce/src/test/resources/org/elasticsearch/discovery/gce/compute/v1/projects/metadataserver/zones/us-central1-a/instances
new file mode 100644
index 00000000000..7e1e5d5d93a
--- /dev/null
+++ b/plugins/discovery-gce/src/test/resources/org/elasticsearch/discovery/gce/compute/v1/projects/metadataserver/zones/us-central1-a/instances
@@ -0,0 +1,36 @@
+{
+ "id": "dummy",
+ "items":[
+ {
+ "description": "ES Node 2",
+ "id": "9309873766428965105",
+ "kind": "compute#instance",
+ "machineType": "n1-standard-1",
+ "name": "test2",
+ "networkInterfaces": [
+ {
+ "accessConfigs": [
+ {
+ "kind": "compute#accessConfig",
+ "name": "External NAT",
+ "natIP": "104.155.13.147",
+ "type": "ONE_TO_ONE_NAT"
+ }
+ ],
+ "name": "nic0",
+ "network": "default",
+ "networkIP": "10.240.79.59"
+ }
+ ],
+ "status": "RUNNING",
+ "tags": {
+ "fingerprint": "xA6QJb-rGtg=",
+ "items": [
+ "elasticsearch",
+ "dev"
+ ]
+ },
+ "zone": "us-central1-a"
+ }
+ ]
+}
diff --git a/plugins/discovery-gce/src/test/resources/org/elasticsearch/discovery/gce/computeMetadata/v1/project/attributes/google-compute-default-zone b/plugins/discovery-gce/src/test/resources/org/elasticsearch/discovery/gce/computeMetadata/v1/project/attributes/google-compute-default-zone
new file mode 100644
index 00000000000..6cf886270be
--- /dev/null
+++ b/plugins/discovery-gce/src/test/resources/org/elasticsearch/discovery/gce/computeMetadata/v1/project/attributes/google-compute-default-zone
@@ -0,0 +1 @@
+europe-west1-b
\ No newline at end of file
diff --git a/plugins/discovery-gce/src/test/resources/org/elasticsearch/discovery/gce/computeMetadata/v1/project/project-id b/plugins/discovery-gce/src/test/resources/org/elasticsearch/discovery/gce/computeMetadata/v1/project/project-id
new file mode 100644
index 00000000000..25b80693818
--- /dev/null
+++ b/plugins/discovery-gce/src/test/resources/org/elasticsearch/discovery/gce/computeMetadata/v1/project/project-id
@@ -0,0 +1 @@
+metadataserver
\ No newline at end of file
diff --git a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java
index b4531f9c489..8d9a6887765 100644
--- a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java
+++ b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java
@@ -186,6 +186,11 @@ public class NumberFieldMapper extends FieldMapper {
return result;
}
+ @Override
+ public Number parsePoint(byte[] value) {
+ return HalfFloatPoint.decodeDimension(value, 0);
+ }
+
@Override
public Float parse(XContentParser parser, boolean coerce) throws IOException {
float parsed = parser.floatValue(coerce);
@@ -278,6 +283,11 @@ public class NumberFieldMapper extends FieldMapper {
return result;
}
+ @Override
+ public Number parsePoint(byte[] value) {
+ return FloatPoint.decodeDimension(value, 0);
+ }
+
@Override
public Float parse(XContentParser parser, boolean coerce) throws IOException {
float parsed = parser.floatValue(coerce);
@@ -359,6 +369,11 @@ public class NumberFieldMapper extends FieldMapper {
return parsed;
}
+ @Override
+ public Number parsePoint(byte[] value) {
+ return DoublePoint.decodeDimension(value, 0);
+ }
+
@Override
public Double parse(XContentParser parser, boolean coerce) throws IOException {
double parsed = parser.doubleValue(coerce);
@@ -451,6 +466,11 @@ public class NumberFieldMapper extends FieldMapper {
return (byte) doubleValue;
}
+ @Override
+ public Number parsePoint(byte[] value) {
+ return INTEGER.parsePoint(value).byteValue();
+ }
+
@Override
public Short parse(XContentParser parser, boolean coerce) throws IOException {
int value = parser.intValue(coerce);
@@ -507,6 +527,11 @@ public class NumberFieldMapper extends FieldMapper {
return (short) doubleValue;
}
+ @Override
+ public Number parsePoint(byte[] value) {
+ return INTEGER.parsePoint(value).shortValue();
+ }
+
@Override
public Short parse(XContentParser parser, boolean coerce) throws IOException {
return parser.shortValue(coerce);
@@ -559,6 +584,11 @@ public class NumberFieldMapper extends FieldMapper {
return (int) doubleValue;
}
+ @Override
+ public Number parsePoint(byte[] value) {
+ return IntPoint.decodeDimension(value, 0);
+ }
+
@Override
public Integer parse(XContentParser parser, boolean coerce) throws IOException {
return parser.intValue(coerce);
@@ -673,6 +703,11 @@ public class NumberFieldMapper extends FieldMapper {
return Numbers.toLong(stringValue, coerce);
}
+ @Override
+ public Number parsePoint(byte[] value) {
+ return LongPoint.decodeDimension(value, 0);
+ }
+
@Override
public Long parse(XContentParser parser, boolean coerce) throws IOException {
return parser.longValue(coerce);
@@ -789,6 +824,7 @@ public class NumberFieldMapper extends FieldMapper {
boolean hasDocValues);
public abstract Number parse(XContentParser parser, boolean coerce) throws IOException;
public abstract Number parse(Object value, boolean coerce);
+ public abstract Number parsePoint(byte[] value);
public abstract List createFields(String name, Number value, boolean indexed,
boolean docValued, boolean stored);
Number valueForSearch(Number value) {
@@ -937,6 +973,10 @@ public class NumberFieldMapper extends FieldMapper {
}
}
+ public Number parsePoint(byte[] value) {
+ return type.parsePoint(value);
+ }
+
@Override
public boolean equals(Object o) {
if (super.equals(o) == false) {
diff --git a/server/src/main/java/org/elasticsearch/index/mapper/ParseContext.java b/server/src/main/java/org/elasticsearch/index/mapper/ParseContext.java
index cf8cc4022fd..4cfd5be2afe 100644
--- a/server/src/main/java/org/elasticsearch/index/mapper/ParseContext.java
+++ b/server/src/main/java/org/elasticsearch/index/mapper/ParseContext.java
@@ -24,6 +24,7 @@ import com.carrotsearch.hppc.ObjectObjectMap;
import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.util.BytesRef;
+import org.elasticsearch.Version;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.IndexSettings;
@@ -32,6 +33,7 @@ import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
+import java.util.LinkedList;
import java.util.List;
import java.util.Set;
@@ -454,13 +456,40 @@ public abstract class ParseContext implements Iterable{
}
void postParse() {
- // reverse the order of docs for nested docs support, parent should be last
if (documents.size() > 1) {
docsReversed = true;
- Collections.reverse(documents);
+ if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_6_5_0)) {
+ /**
+ * For indices created on or after {@link Version#V_6_5_0} we preserve the order
+ * of the children while ensuring that parents appear after them.
+ */
+ List newDocs = reorderParent(documents);
+ documents.clear();
+ documents.addAll(newDocs);
+ } else {
+ // reverse the order of docs for nested docs support, parent should be last
+ Collections.reverse(documents);
+ }
}
}
+ /**
+ * Returns a copy of the provided {@link List} where parent documents appear
+ * after their children.
+ */
+ private List reorderParent(List docs) {
+ List newDocs = new ArrayList<>(docs.size());
+ LinkedList parents = new LinkedList<>();
+ for (Document doc : docs) {
+ while (parents.peek() != doc.getParent()){
+ newDocs.add(parents.poll());
+ }
+ parents.add(0, doc);
+ }
+ newDocs.addAll(parents);
+ return newDocs;
+ }
+
@Override
public Iterator iterator() {
return documents.iterator();
diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesRequestCache.java b/server/src/main/java/org/elasticsearch/indices/IndicesRequestCache.java
index a0095613cdb..da80a07abc1 100644
--- a/server/src/main/java/org/elasticsearch/indices/IndicesRequestCache.java
+++ b/server/src/main/java/org/elasticsearch/indices/IndicesRequestCache.java
@@ -21,6 +21,9 @@ package org.elasticsearch.indices;
import com.carrotsearch.hppc.ObjectHashSet;
import com.carrotsearch.hppc.ObjectSet;
+
+import org.apache.logging.log4j.LogManager;
+import org.apache.logging.log4j.Logger;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.util.Accountable;
@@ -75,6 +78,8 @@ public final class IndicesRequestCache extends AbstractComponent implements Remo
public static final Setting INDICES_CACHE_QUERY_EXPIRE =
Setting.positiveTimeSetting("indices.requests.cache.expire", new TimeValue(0), Property.NodeScope);
+ private static final Logger LOGGER = LogManager.getLogger(IndicesRequestCache.class);
+
private final ConcurrentMap registeredClosedListeners = ConcurrentCollections.newConcurrentMap();
private final Set keysToClean = ConcurrentCollections.newConcurrentSet();
private final ByteSizeValue size;
@@ -109,13 +114,19 @@ public final class IndicesRequestCache extends AbstractComponent implements Remo
notification.getKey().entity.onRemoval(notification);
}
+ // NORELEASE The cacheKeyRenderer has been added in order to debug
+ // https://github.com/elastic/elasticsearch/issues/32827, it should be
+ // removed when this issue is solved
BytesReference getOrCompute(CacheEntity cacheEntity, Supplier loader,
- DirectoryReader reader, BytesReference cacheKey) throws Exception {
+ DirectoryReader reader, BytesReference cacheKey, Supplier cacheKeyRenderer) throws Exception {
final Key key = new Key(cacheEntity, reader.getVersion(), cacheKey);
Loader cacheLoader = new Loader(cacheEntity, loader);
BytesReference value = cache.computeIfAbsent(key, cacheLoader);
if (cacheLoader.isLoaded()) {
key.entity.onMiss();
+ if (logger.isTraceEnabled()) {
+ logger.trace("Cache miss for reader version [{}] and request:\n {}", reader.getVersion(), cacheKeyRenderer.get());
+ }
// see if its the first time we see this reader, and make sure to register a cleanup key
CleanupKey cleanupKey = new CleanupKey(cacheEntity, reader.getVersion());
if (!registeredClosedListeners.containsKey(cleanupKey)) {
@@ -126,6 +137,9 @@ public final class IndicesRequestCache extends AbstractComponent implements Remo
}
} else {
key.entity.onHit();
+ if (logger.isTraceEnabled()) {
+ logger.trace("Cache hit for reader version [{}] and request:\n {}", reader.getVersion(), cacheKeyRenderer.get());
+ }
}
return value;
}
diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesService.java b/server/src/main/java/org/elasticsearch/indices/IndicesService.java
index e9f674e14a5..05dcf0b2d01 100644
--- a/server/src/main/java/org/elasticsearch/indices/IndicesService.java
+++ b/server/src/main/java/org/elasticsearch/indices/IndicesService.java
@@ -1191,7 +1191,9 @@ public class IndicesService extends AbstractLifecycleComponent
final DirectoryReader directoryReader = context.searcher().getDirectoryReader();
boolean[] loadedFromCache = new boolean[] { true };
- BytesReference bytesReference = cacheShardLevelResult(context.indexShard(), directoryReader, request.cacheKey(), out -> {
+ BytesReference bytesReference = cacheShardLevelResult(context.indexShard(), directoryReader, request.cacheKey(), () -> {
+ return "Shard: " + request.shardId() + "\nSource:\n" + request.source();
+ }, out -> {
queryPhase.execute(context);
try {
context.queryResult().writeToNoId(out);
@@ -1217,6 +1219,10 @@ public class IndicesService extends AbstractLifecycleComponent
// running a search that times out concurrently will likely timeout again if it's run while we have this `stale` result in the
// cache. One other option is to not cache requests with a timeout at all...
indicesRequestCache.invalidate(new IndexShardCacheEntity(context.indexShard()), directoryReader, request.cacheKey());
+ if (logger.isTraceEnabled()) {
+ logger.trace("Query timed out, invalidating cache entry for request on shard [{}]:\n {}", request.shardId(),
+ request.source());
+ }
}
}
@@ -1232,8 +1238,8 @@ public class IndicesService extends AbstractLifecycleComponent
* @param loader loads the data into the cache if needed
* @return the contents of the cache or the result of calling the loader
*/
- private BytesReference cacheShardLevelResult(IndexShard shard, DirectoryReader reader, BytesReference cacheKey, Consumer loader)
- throws Exception {
+ private BytesReference cacheShardLevelResult(IndexShard shard, DirectoryReader reader, BytesReference cacheKey,
+ Supplier cacheKeyRenderer, Consumer loader) throws Exception {
IndexShardCacheEntity cacheEntity = new IndexShardCacheEntity(shard);
Supplier supplier = () -> {
/* BytesStreamOutput allows to pass the expected size but by default uses
@@ -1251,7 +1257,7 @@ public class IndicesService extends AbstractLifecycleComponent
return out.bytes();
}
};
- return indicesRequestCache.getOrCompute(cacheEntity, supplier, reader, cacheKey);
+ return indicesRequestCache.getOrCompute(cacheEntity, supplier, reader, cacheKey, cacheKeyRenderer);
}
static final class IndexShardCacheEntity extends AbstractIndexShardCacheEntity {
diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MaxAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MaxAggregator.java
index c65277d389c..02083177099 100644
--- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MaxAggregator.java
+++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MaxAggregator.java
@@ -18,7 +18,12 @@
*/
package org.elasticsearch.search.aggregations.metrics;
+import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.index.PointValues;
+import org.apache.lucene.search.CollectionTerminatedException;
+import org.apache.lucene.util.Bits;
+import org.apache.lucene.util.FutureArrays;
import org.apache.lucene.search.ScoreMode;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.util.BigArrays;
@@ -33,30 +38,45 @@ import org.elasticsearch.search.aggregations.LeafBucketCollector;
import org.elasticsearch.search.aggregations.LeafBucketCollectorBase;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.search.aggregations.support.ValuesSource;
+import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
import java.util.List;
import java.util.Map;
+import java.util.function.Function;
+
+import static org.elasticsearch.search.aggregations.metrics.MinAggregator.getPointReaderOrNull;
class MaxAggregator extends NumericMetricsAggregator.SingleValue {
final ValuesSource.Numeric valuesSource;
final DocValueFormat formatter;
+ final String pointField;
+ final Function pointConverter;
+
DoubleArray maxes;
- MaxAggregator(String name, ValuesSource.Numeric valuesSource, DocValueFormat formatter,
- SearchContext context,
- Aggregator parent, List pipelineAggregators,
- Map metaData) throws IOException {
+ MaxAggregator(String name,
+ ValuesSourceConfig config,
+ ValuesSource.Numeric valuesSource,
+ SearchContext context,
+ Aggregator parent, List pipelineAggregators,
+ Map metaData) throws IOException {
super(name, context, parent, pipelineAggregators, metaData);
this.valuesSource = valuesSource;
- this.formatter = formatter;
if (valuesSource != null) {
maxes = context.bigArrays().newDoubleArray(1, false);
maxes.fill(0, maxes.size(), Double.NEGATIVE_INFINITY);
}
+ this.formatter = config.format();
+ this.pointConverter = getPointReaderOrNull(context, parent, config);
+ if (pointConverter != null) {
+ pointField = config.fieldContext().field();
+ } else {
+ pointField = null;
+ }
}
@Override
@@ -68,8 +88,28 @@ class MaxAggregator extends NumericMetricsAggregator.SingleValue {
public LeafBucketCollector getLeafCollector(LeafReaderContext ctx,
final LeafBucketCollector sub) throws IOException {
if (valuesSource == null) {
- return LeafBucketCollector.NO_OP_COLLECTOR;
- }
+ if (parent != null) {
+ return LeafBucketCollector.NO_OP_COLLECTOR;
+ } else {
+ // we have no parent and the values source is empty so we can skip collecting hits.
+ throw new CollectionTerminatedException();
+ }
+ }
+ if (pointConverter != null) {
+ Number segMax = findLeafMaxValue(ctx.reader(), pointField, pointConverter);
+ if (segMax != null) {
+ /**
+ * There is no parent aggregator (see {@link MinAggregator#getPointReaderOrNull}
+ * so the ordinal for the bucket is always 0.
+ */
+ assert maxes.size() == 1;
+ double max = maxes.get(0);
+ max = Math.max(max, segMax.doubleValue());
+ maxes.set(0, max);
+ // the maximum value has been extracted, we don't need to collect hits on this segment.
+ throw new CollectionTerminatedException();
+ }
+ }
final BigArrays bigArrays = context.bigArrays();
final SortedNumericDoubleValues allValues = valuesSource.doubleValues(ctx);
final NumericDoubleValues values = MultiValueMode.MAX.select(allValues);
@@ -118,4 +158,48 @@ class MaxAggregator extends NumericMetricsAggregator.SingleValue {
public void doClose() {
Releasables.close(maxes);
}
+
+ /**
+ * Returns the maximum value indexed in the fieldName
field or null
+ * if the value cannot be inferred from the indexed {@link PointValues}.
+ */
+ static Number findLeafMaxValue(LeafReader reader, String fieldName, Function converter) throws IOException {
+ final PointValues pointValues = reader.getPointValues(fieldName);
+ if (pointValues == null) {
+ return null;
+ }
+ final Bits liveDocs = reader.getLiveDocs();
+ if (liveDocs == null) {
+ return converter.apply(pointValues.getMaxPackedValue());
+ }
+ int numBytes = pointValues.getBytesPerDimension();
+ final byte[] maxValue = pointValues.getMaxPackedValue();
+ final Number[] result = new Number[1];
+ pointValues.intersect(new PointValues.IntersectVisitor() {
+ @Override
+ public void visit(int docID) {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public void visit(int docID, byte[] packedValue) {
+ if (liveDocs.get(docID)) {
+ // we need to collect all values in this leaf (the sort is ascending) where
+ // the last live doc is guaranteed to contain the max value for the segment.
+ result[0] = converter.apply(packedValue);
+ }
+ }
+
+ @Override
+ public PointValues.Relation compare(byte[] minPackedValue, byte[] maxPackedValue) {
+ if (FutureArrays.equals(maxValue, 0, numBytes, maxPackedValue, 0, numBytes)) {
+ // we only check leaves that contain the max value for the segment.
+ return PointValues.Relation.CELL_CROSSES_QUERY;
+ } else {
+ return PointValues.Relation.CELL_OUTSIDE_QUERY;
+ }
+ }
+ });
+ return result[0];
+ }
}
diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MaxAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MaxAggregatorFactory.java
index 314e1106b37..d64987d9cde 100644
--- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MaxAggregatorFactory.java
+++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MaxAggregatorFactory.java
@@ -43,13 +43,13 @@ class MaxAggregatorFactory extends ValuesSourceAggregatorFactory pipelineAggregators, Map metaData) throws IOException {
- return new MaxAggregator(name, null, config.format(), context, parent, pipelineAggregators, metaData);
+ return new MaxAggregator(name, config, null, context, parent, pipelineAggregators, metaData);
}
@Override
protected Aggregator doCreateInternal(ValuesSource.Numeric valuesSource, Aggregator parent,
boolean collectsFromSingleBucket, List pipelineAggregators, Map metaData)
throws IOException {
- return new MaxAggregator(name, valuesSource, config.format(), context, parent, pipelineAggregators, metaData);
+ return new MaxAggregator(name, config, valuesSource, context, parent, pipelineAggregators, metaData);
}
}
diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MinAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MinAggregator.java
index ea8e160e138..df24ee7387f 100644
--- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MinAggregator.java
+++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MinAggregator.java
@@ -18,13 +18,23 @@
*/
package org.elasticsearch.search.aggregations.metrics;
+import org.apache.lucene.document.LongPoint;
+import org.apache.lucene.index.IndexOptions;
+import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.index.PointValues;
+import org.apache.lucene.search.CollectionTerminatedException;
+import org.apache.lucene.search.MatchAllDocsQuery;
+import org.apache.lucene.util.Bits;
import org.apache.lucene.search.ScoreMode;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.util.DoubleArray;
import org.elasticsearch.index.fielddata.NumericDoubleValues;
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
+import org.elasticsearch.index.mapper.DateFieldMapper;
+import org.elasticsearch.index.mapper.MappedFieldType;
+import org.elasticsearch.index.mapper.NumberFieldMapper;
import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.search.aggregations.Aggregator;
@@ -33,29 +43,44 @@ import org.elasticsearch.search.aggregations.LeafBucketCollector;
import org.elasticsearch.search.aggregations.LeafBucketCollectorBase;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.search.aggregations.support.ValuesSource;
+import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
import java.util.List;
import java.util.Map;
+import java.util.function.Function;
class MinAggregator extends NumericMetricsAggregator.SingleValue {
final ValuesSource.Numeric valuesSource;
final DocValueFormat format;
+ final String pointField;
+ final Function pointConverter;
+
DoubleArray mins;
- MinAggregator(String name, ValuesSource.Numeric valuesSource, DocValueFormat formatter,
- SearchContext context, Aggregator parent, List pipelineAggregators,
- Map metaData) throws IOException {
+ MinAggregator(String name,
+ ValuesSourceConfig config,
+ ValuesSource.Numeric valuesSource,
+ SearchContext context,
+ Aggregator parent,
+ List pipelineAggregators,
+ Map metaData) throws IOException {
super(name, context, parent, pipelineAggregators, metaData);
this.valuesSource = valuesSource;
if (valuesSource != null) {
mins = context.bigArrays().newDoubleArray(1, false);
mins.fill(0, mins.size(), Double.POSITIVE_INFINITY);
}
- this.format = formatter;
+ this.format = config.format();
+ this.pointConverter = getPointReaderOrNull(context, parent, config);
+ if (pointConverter != null) {
+ pointField = config.fieldContext().field();
+ } else {
+ pointField = null;
+ }
}
@Override
@@ -67,7 +92,26 @@ class MinAggregator extends NumericMetricsAggregator.SingleValue {
public LeafBucketCollector getLeafCollector(LeafReaderContext ctx,
final LeafBucketCollector sub) throws IOException {
if (valuesSource == null) {
- return LeafBucketCollector.NO_OP_COLLECTOR;
+ if (parent == null) {
+ return LeafBucketCollector.NO_OP_COLLECTOR;
+ } else {
+ // we have no parent and the values source is empty so we can skip collecting hits.
+ throw new CollectionTerminatedException();
+ }
+ }
+ if (pointConverter != null) {
+ Number segMin = findLeafMinValue(ctx.reader(), pointField, pointConverter);
+ if (segMin != null) {
+ /**
+ * There is no parent aggregator (see {@link MinAggregator#getPointReaderOrNull}
+ * so the ordinal for the bucket is always 0.
+ */
+ double min = mins.get(0);
+ min = Math.min(min, segMin.doubleValue());
+ mins.set(0, min);
+ // the minimum value has been extracted, we don't need to collect hits on this segment.
+ throw new CollectionTerminatedException();
+ }
}
final BigArrays bigArrays = context.bigArrays();
final SortedNumericDoubleValues allValues = valuesSource.doubleValues(ctx);
@@ -117,4 +161,77 @@ class MinAggregator extends NumericMetricsAggregator.SingleValue {
public void doClose() {
Releasables.close(mins);
}
+
+
+ /**
+ * Returns a converter for point values if early termination is applicable to
+ * the context or null
otherwise.
+ *
+ * @param context The {@link SearchContext} of the aggregation.
+ * @param parent The parent aggregator.
+ * @param config The config for the values source metric.
+ */
+ static Function getPointReaderOrNull(SearchContext context, Aggregator parent,
+ ValuesSourceConfig config) {
+ if (context.query() != null &&
+ context.query().getClass() != MatchAllDocsQuery.class) {
+ return null;
+ }
+ if (parent != null) {
+ return null;
+ }
+ if (config.fieldContext() != null && config.script() == null) {
+ MappedFieldType fieldType = config.fieldContext().fieldType();
+ if (fieldType == null || fieldType.indexOptions() == IndexOptions.NONE) {
+ return null;
+ }
+ Function converter = null;
+ if (fieldType instanceof NumberFieldMapper.NumberFieldType) {
+ converter = ((NumberFieldMapper.NumberFieldType) fieldType)::parsePoint;
+ } else if (fieldType.getClass() == DateFieldMapper.DateFieldType.class) {
+ converter = (in) -> LongPoint.decodeDimension(in, 0);
+ }
+ return converter;
+ }
+ return null;
+ }
+
+ /**
+ * Returns the minimum value indexed in the fieldName
field or null
+ * if the value cannot be inferred from the indexed {@link PointValues}.
+ */
+ static Number findLeafMinValue(LeafReader reader, String fieldName, Function converter) throws IOException {
+ final PointValues pointValues = reader.getPointValues(fieldName);
+ if (pointValues == null) {
+ return null;
+ }
+ final Bits liveDocs = reader.getLiveDocs();
+ if (liveDocs == null) {
+ return converter.apply(pointValues.getMinPackedValue());
+ }
+ final Number[] result = new Number[1];
+ try {
+ pointValues.intersect(new PointValues.IntersectVisitor() {
+ @Override
+ public void visit(int docID) {
+ throw new UnsupportedOperationException();
+ }
+
+ @Override
+ public void visit(int docID, byte[] packedValue) {
+ if (liveDocs.get(docID)) {
+ result[0] = converter.apply(packedValue);
+ // this is the first leaf with a live doc so the value is the minimum for this segment.
+ throw new CollectionTerminatedException();
+ }
+ }
+
+ @Override
+ public PointValues.Relation compare(byte[] minPackedValue, byte[] maxPackedValue) {
+ return PointValues.Relation.CELL_CROSSES_QUERY;
+ }
+ });
+ } catch (CollectionTerminatedException e) {}
+ return result[0];
+ }
}
diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MinAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MinAggregatorFactory.java
index d08b8199a33..240cf2ba715 100644
--- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MinAggregatorFactory.java
+++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/MinAggregatorFactory.java
@@ -43,12 +43,12 @@ class MinAggregatorFactory extends ValuesSourceAggregatorFactory pipelineAggregators, Map metaData)
throws IOException {
- return new MinAggregator(name, null, config.format(), context, parent, pipelineAggregators, metaData);
+ return new MinAggregator(name, config, null, context, parent, pipelineAggregators, metaData);
}
@Override
protected Aggregator doCreateInternal(ValuesSource.Numeric valuesSource, Aggregator parent, boolean collectsFromSingleBucket,
List pipelineAggregators, Map metaData) throws IOException {
- return new MinAggregator(name, valuesSource, config.format(), context, parent, pipelineAggregators, metaData);
+ return new MinAggregator(name, config, valuesSource, context, parent, pipelineAggregators, metaData);
}
}
diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregationBuilder.java
index 6a25c51737b..25fcebc6aa5 100644
--- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregationBuilder.java
+++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregationBuilder.java
@@ -196,6 +196,14 @@ public class ScriptedMetricAggregationBuilder extends AbstractAggregationBuilder
protected ScriptedMetricAggregatorFactory doBuild(SearchContext context, AggregatorFactory> parent,
Builder subfactoriesBuilder) throws IOException {
+ if (combineScript == null) {
+ throw new IllegalArgumentException("[combineScript] must not be null: [" + name + "]");
+ }
+
+ if(reduceScript == null) {
+ throw new IllegalArgumentException("[reduceScript] must not be null: [" + name + "]");
+ }
+
QueryShardContext queryShardContext = context.getQueryShardContext();
// Extract params from scripts and pass them along to ScriptedMetricAggregatorFactory, since it won't have
@@ -215,16 +223,14 @@ public class ScriptedMetricAggregationBuilder extends AbstractAggregationBuilder
ScriptedMetricAggContexts.MapScript.CONTEXT);
Map mapScriptParams = mapScript.getParams();
+
ScriptedMetricAggContexts.CombineScript.Factory compiledCombineScript;
Map combineScriptParams;
- if (combineScript != null) {
- compiledCombineScript = queryShardContext.getScriptService().compile(combineScript,
- ScriptedMetricAggContexts.CombineScript.CONTEXT);
- combineScriptParams = combineScript.getParams();
- } else {
- compiledCombineScript = (p, a) -> null;
- combineScriptParams = Collections.emptyMap();
- }
+
+ compiledCombineScript = queryShardContext.getScriptService().compile(combineScript,
+ ScriptedMetricAggContexts.CombineScript.CONTEXT);
+ combineScriptParams = combineScript.getParams();
+
return new ScriptedMetricAggregatorFactory(name, compiledMapScript, mapScriptParams, compiledInitScript,
initScriptParams, compiledCombineScript, combineScriptParams, reduceScript,
params, queryShardContext.lookup(), context, parent, subfactoriesBuilder, metaData);
diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregatorFactory.java
index 28d82f4cafd..6faf6a5d58c 100644
--- a/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregatorFactory.java
+++ b/server/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSourceAggregatorFactory.java
@@ -45,6 +45,10 @@ public abstract class ValuesSourceAggregatorFactory getConfig() {
+ return config;
+ }
+
@Override
public Aggregator createInternal(Aggregator parent, boolean collectsFromSingleBucket,
List pipelineAggregators, Map metaData) throws IOException {
diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java
index 69ac9049686..1b4cbbbd882 100644
--- a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java
+++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java
@@ -30,6 +30,7 @@ import org.apache.lucene.search.TotalHits.Relation;
import org.apache.lucene.search.Weight;
import org.apache.lucene.util.BitSet;
import org.elasticsearch.ExceptionsHelper;
+import org.elasticsearch.Version;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.document.DocumentField;
@@ -38,6 +39,7 @@ import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
+import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.fieldvisitor.CustomFieldsVisitor;
import org.elasticsearch.index.fieldvisitor.FieldsVisitor;
import org.elasticsearch.index.mapper.DocumentMapper;
@@ -344,6 +346,7 @@ public class FetchPhase implements SearchPhase {
ObjectMapper current = nestedObjectMapper;
String originalName = nestedObjectMapper.name();
SearchHit.NestedIdentity nestedIdentity = null;
+ final IndexSettings indexSettings = context.getQueryShardContext().getIndexSettings();
do {
Query parentFilter;
nestedParentObjectMapper = current.getParentObjectMapper(mapperService);
@@ -374,12 +377,32 @@ public class FetchPhase implements SearchPhase {
BitSet parentBits = context.bitsetFilterCache().getBitSetProducer(parentFilter).getBitSet(subReaderContext);
int offset = 0;
- int nextParent = parentBits.nextSetBit(currentParent);
- for (int docId = childIter.advance(currentParent + 1); docId < nextParent && docId != DocIdSetIterator.NO_MORE_DOCS;
- docId = childIter.nextDoc()) {
- offset++;
+ if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_6_5_0)) {
+ /**
+ * Starts from the previous parent and finds the offset of the
+ * nestedSubDocID
within the nested children. Nested documents
+ * are indexed in the same order than in the source array so the offset
+ * of the nested child is the number of nested document with the same parent
+ * that appear before him.
+ */
+ int previousParent = parentBits.prevSetBit(currentParent);
+ for (int docId = childIter.advance(previousParent + 1); docId < nestedSubDocId && docId != DocIdSetIterator.NO_MORE_DOCS;
+ docId = childIter.nextDoc()) {
+ offset++;
+ }
+ currentParent = nestedSubDocId;
+ } else {
+ /**
+ * Nested documents are in reverse order in this version so we start from the current nested document
+ * and find the number of documents with the same parent that appear after it.
+ */
+ int nextParent = parentBits.nextSetBit(currentParent);
+ for (int docId = childIter.advance(currentParent + 1); docId < nextParent && docId != DocIdSetIterator.NO_MORE_DOCS;
+ docId = childIter.nextDoc()) {
+ offset++;
+ }
+ currentParent = nextParent;
}
- currentParent = nextParent;
current = nestedObjectMapper = nestedParentObjectMapper;
int currentPrefix = current == null ? 0 : current.name().length() + 1;
nestedIdentity = new SearchHit.NestedIdentity(originalName.substring(currentPrefix), offset, nestedIdentity);
diff --git a/server/src/test/java/org/elasticsearch/index/mapper/CopyToMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/CopyToMapperTests.java
index 5eb102208eb..c24b023a4d5 100644
--- a/server/src/test/java/org/elasticsearch/index/mapper/CopyToMapperTests.java
+++ b/server/src/test/java/org/elasticsearch/index/mapper/CopyToMapperTests.java
@@ -389,28 +389,28 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
assertEquals(6, doc.docs().size());
Document nested = doc.docs().get(0);
- assertFieldValue(nested, "n1.n2.target", 7L);
+ assertFieldValue(nested, "n1.n2.target", 3L);
assertFieldValue(nested, "n1.target");
assertFieldValue(nested, "target");
- nested = doc.docs().get(2);
+ nested = doc.docs().get(1);
assertFieldValue(nested, "n1.n2.target", 5L);
assertFieldValue(nested, "n1.target");
assertFieldValue(nested, "target");
nested = doc.docs().get(3);
- assertFieldValue(nested, "n1.n2.target", 3L);
+ assertFieldValue(nested, "n1.n2.target", 7L);
assertFieldValue(nested, "n1.target");
assertFieldValue(nested, "target");
- Document parent = doc.docs().get(1);
+ Document parent = doc.docs().get(2);
assertFieldValue(parent, "target");
- assertFieldValue(parent, "n1.target", 7L);
+ assertFieldValue(parent, "n1.target", 3L, 5L);
assertFieldValue(parent, "n1.n2.target");
parent = doc.docs().get(4);
assertFieldValue(parent, "target");
- assertFieldValue(parent, "n1.target", 3L, 5L);
+ assertFieldValue(parent, "n1.target", 7L);
assertFieldValue(parent, "n1.n2.target");
Document root = doc.docs().get(5);
diff --git a/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java
index 9319e6d3b6e..6be0894186a 100644
--- a/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java
+++ b/server/src/test/java/org/elasticsearch/index/mapper/NestedObjectMapperTests.java
@@ -21,6 +21,8 @@ package org.elasticsearch.index.mapper;
import java.util.HashSet;
import org.apache.lucene.index.IndexableField;
+import org.elasticsearch.Version;
+import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent;
@@ -33,6 +35,7 @@ import org.elasticsearch.index.mapper.ObjectMapper.Dynamic;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESSingleNodeTestCase;
import org.elasticsearch.test.InternalSettingsPlugin;
+import org.elasticsearch.test.VersionUtils;
import java.io.IOException;
import java.io.UncheckedIOException;
@@ -120,11 +123,11 @@ public class NestedObjectMapperTests extends ESSingleNodeTestCase {
assertThat(doc.docs().size(), equalTo(3));
assertThat(doc.docs().get(0).get(TypeFieldMapper.NAME), equalTo(nested1Mapper.nestedTypePathAsString()));
- assertThat(doc.docs().get(0).get("nested1.field1"), equalTo("3"));
- assertThat(doc.docs().get(0).get("nested1.field2"), equalTo("4"));
+ assertThat(doc.docs().get(0).get("nested1.field1"), equalTo("1"));
+ assertThat(doc.docs().get(0).get("nested1.field2"), equalTo("2"));
assertThat(doc.docs().get(1).get(TypeFieldMapper.NAME), equalTo(nested1Mapper.nestedTypePathAsString()));
- assertThat(doc.docs().get(1).get("nested1.field1"), equalTo("1"));
- assertThat(doc.docs().get(1).get("nested1.field2"), equalTo("2"));
+ assertThat(doc.docs().get(1).get("nested1.field1"), equalTo("3"));
+ assertThat(doc.docs().get(1).get("nested1.field2"), equalTo("4"));
assertThat(doc.docs().get(2).get("field"), equalTo("value"));
}
@@ -160,20 +163,20 @@ public class NestedObjectMapperTests extends ESSingleNodeTestCase {
XContentType.JSON));
assertThat(doc.docs().size(), equalTo(7));
- assertThat(doc.docs().get(0).get("nested1.nested2.field2"), equalTo("6"));
+ assertThat(doc.docs().get(0).get("nested1.nested2.field2"), equalTo("2"));
assertThat(doc.docs().get(0).get("nested1.field1"), nullValue());
assertThat(doc.docs().get(0).get("field"), nullValue());
- assertThat(doc.docs().get(1).get("nested1.nested2.field2"), equalTo("5"));
+ assertThat(doc.docs().get(1).get("nested1.nested2.field2"), equalTo("3"));
assertThat(doc.docs().get(1).get("nested1.field1"), nullValue());
assertThat(doc.docs().get(1).get("field"), nullValue());
- assertThat(doc.docs().get(2).get("nested1.field1"), equalTo("4"));
+ assertThat(doc.docs().get(2).get("nested1.field1"), equalTo("1"));
assertThat(doc.docs().get(2).get("nested1.nested2.field2"), nullValue());
assertThat(doc.docs().get(2).get("field"), nullValue());
- assertThat(doc.docs().get(3).get("nested1.nested2.field2"), equalTo("3"));
+ assertThat(doc.docs().get(3).get("nested1.nested2.field2"), equalTo("5"));
assertThat(doc.docs().get(3).get("field"), nullValue());
- assertThat(doc.docs().get(4).get("nested1.nested2.field2"), equalTo("2"));
+ assertThat(doc.docs().get(4).get("nested1.nested2.field2"), equalTo("6"));
assertThat(doc.docs().get(4).get("field"), nullValue());
- assertThat(doc.docs().get(5).get("nested1.field1"), equalTo("1"));
+ assertThat(doc.docs().get(5).get("nested1.field1"), equalTo("4"));
assertThat(doc.docs().get(5).get("nested1.nested2.field2"), nullValue());
assertThat(doc.docs().get(5).get("field"), nullValue());
assertThat(doc.docs().get(6).get("field"), equalTo("value"));
@@ -212,21 +215,21 @@ public class NestedObjectMapperTests extends ESSingleNodeTestCase {
XContentType.JSON));
assertThat(doc.docs().size(), equalTo(7));
- assertThat(doc.docs().get(0).get("nested1.nested2.field2"), equalTo("6"));
+ assertThat(doc.docs().get(0).get("nested1.nested2.field2"), equalTo("2"));
assertThat(doc.docs().get(0).get("nested1.field1"), nullValue());
assertThat(doc.docs().get(0).get("field"), nullValue());
- assertThat(doc.docs().get(1).get("nested1.nested2.field2"), equalTo("5"));
+ assertThat(doc.docs().get(1).get("nested1.nested2.field2"), equalTo("3"));
assertThat(doc.docs().get(1).get("nested1.field1"), nullValue());
assertThat(doc.docs().get(1).get("field"), nullValue());
- assertThat(doc.docs().get(2).get("nested1.field1"), equalTo("4"));
- assertThat(doc.docs().get(2).get("nested1.nested2.field2"), equalTo("5"));
+ assertThat(doc.docs().get(2).get("nested1.field1"), equalTo("1"));
+ assertThat(doc.docs().get(2).get("nested1.nested2.field2"), equalTo("2"));
assertThat(doc.docs().get(2).get("field"), nullValue());
- assertThat(doc.docs().get(3).get("nested1.nested2.field2"), equalTo("3"));
+ assertThat(doc.docs().get(3).get("nested1.nested2.field2"), equalTo("5"));
assertThat(doc.docs().get(3).get("field"), nullValue());
- assertThat(doc.docs().get(4).get("nested1.nested2.field2"), equalTo("2"));
+ assertThat(doc.docs().get(4).get("nested1.nested2.field2"), equalTo("6"));
assertThat(doc.docs().get(4).get("field"), nullValue());
- assertThat(doc.docs().get(5).get("nested1.field1"), equalTo("1"));
- assertThat(doc.docs().get(5).get("nested1.nested2.field2"), equalTo("2"));
+ assertThat(doc.docs().get(5).get("nested1.field1"), equalTo("4"));
+ assertThat(doc.docs().get(5).get("nested1.nested2.field2"), equalTo("5"));
assertThat(doc.docs().get(5).get("field"), nullValue());
assertThat(doc.docs().get(6).get("field"), equalTo("value"));
assertThat(doc.docs().get(6).get("nested1.field1"), nullValue());
@@ -264,21 +267,21 @@ public class NestedObjectMapperTests extends ESSingleNodeTestCase {
XContentType.JSON));
assertThat(doc.docs().size(), equalTo(7));
- assertThat(doc.docs().get(0).get("nested1.nested2.field2"), equalTo("6"));
+ assertThat(doc.docs().get(0).get("nested1.nested2.field2"), equalTo("2"));
assertThat(doc.docs().get(0).get("nested1.field1"), nullValue());
assertThat(doc.docs().get(0).get("field"), nullValue());
- assertThat(doc.docs().get(1).get("nested1.nested2.field2"), equalTo("5"));
+ assertThat(doc.docs().get(1).get("nested1.nested2.field2"), equalTo("3"));
assertThat(doc.docs().get(1).get("nested1.field1"), nullValue());
assertThat(doc.docs().get(1).get("field"), nullValue());
- assertThat(doc.docs().get(2).get("nested1.field1"), equalTo("4"));
- assertThat(doc.docs().get(2).get("nested1.nested2.field2"), equalTo("5"));
+ assertThat(doc.docs().get(2).get("nested1.field1"), equalTo("1"));
+ assertThat(doc.docs().get(2).get("nested1.nested2.field2"), equalTo("2"));
assertThat(doc.docs().get(2).get("field"), nullValue());
- assertThat(doc.docs().get(3).get("nested1.nested2.field2"), equalTo("3"));
+ assertThat(doc.docs().get(3).get("nested1.nested2.field2"), equalTo("5"));
assertThat(doc.docs().get(3).get("field"), nullValue());
- assertThat(doc.docs().get(4).get("nested1.nested2.field2"), equalTo("2"));
+ assertThat(doc.docs().get(4).get("nested1.nested2.field2"), equalTo("6"));
assertThat(doc.docs().get(4).get("field"), nullValue());
- assertThat(doc.docs().get(5).get("nested1.field1"), equalTo("1"));
- assertThat(doc.docs().get(5).get("nested1.nested2.field2"), equalTo("2"));
+ assertThat(doc.docs().get(5).get("nested1.field1"), equalTo("4"));
+ assertThat(doc.docs().get(5).get("nested1.nested2.field2"), equalTo("5"));
assertThat(doc.docs().get(5).get("field"), nullValue());
assertThat(doc.docs().get(6).get("field"), equalTo("value"));
assertThat(doc.docs().get(6).getFields("nested1.field1").length, equalTo(2));
@@ -316,20 +319,20 @@ public class NestedObjectMapperTests extends ESSingleNodeTestCase {
XContentType.JSON));
assertThat(doc.docs().size(), equalTo(7));
- assertThat(doc.docs().get(0).get("nested1.nested2.field2"), equalTo("6"));
+ assertThat(doc.docs().get(0).get("nested1.nested2.field2"), equalTo("2"));
assertThat(doc.docs().get(0).get("nested1.field1"), nullValue());
assertThat(doc.docs().get(0).get("field"), nullValue());
- assertThat(doc.docs().get(1).get("nested1.nested2.field2"), equalTo("5"));
+ assertThat(doc.docs().get(1).get("nested1.nested2.field2"), equalTo("3"));
assertThat(doc.docs().get(1).get("nested1.field1"), nullValue());
assertThat(doc.docs().get(1).get("field"), nullValue());
- assertThat(doc.docs().get(2).get("nested1.field1"), equalTo("4"));
+ assertThat(doc.docs().get(2).get("nested1.field1"), equalTo("1"));
assertThat(doc.docs().get(2).get("nested1.nested2.field2"), nullValue());
assertThat(doc.docs().get(2).get("field"), nullValue());
- assertThat(doc.docs().get(3).get("nested1.nested2.field2"), equalTo("3"));
+ assertThat(doc.docs().get(3).get("nested1.nested2.field2"), equalTo("5"));
assertThat(doc.docs().get(3).get("field"), nullValue());
- assertThat(doc.docs().get(4).get("nested1.nested2.field2"), equalTo("2"));
+ assertThat(doc.docs().get(4).get("nested1.nested2.field2"), equalTo("6"));
assertThat(doc.docs().get(4).get("field"), nullValue());
- assertThat(doc.docs().get(5).get("nested1.field1"), equalTo("1"));
+ assertThat(doc.docs().get(5).get("nested1.field1"), equalTo("4"));
assertThat(doc.docs().get(5).get("nested1.nested2.field2"), nullValue());
assertThat(doc.docs().get(5).get("field"), nullValue());
assertThat(doc.docs().get(6).get("field"), equalTo("value"));
@@ -424,9 +427,9 @@ public class NestedObjectMapperTests extends ESSingleNodeTestCase {
XContentType.JSON));
assertThat(doc.docs().size(), equalTo(3));
- assertThat(doc.docs().get(0).get("nested1.field1"), equalTo("4"));
+ assertThat(doc.docs().get(0).get("nested1.field1"), equalTo("1"));
assertThat(doc.docs().get(0).get("field"), nullValue());
- assertThat(doc.docs().get(1).get("nested1.field1"), equalTo("1"));
+ assertThat(doc.docs().get(1).get("nested1.field1"), equalTo("4"));
assertThat(doc.docs().get(1).get("field"), nullValue());
assertThat(doc.docs().get(2).get("field"), equalTo("value"));
}
@@ -634,4 +637,63 @@ public class NestedObjectMapperTests extends ESSingleNodeTestCase {
);
}
+ @Override
+ protected boolean forbidPrivateIndexSettings() {
+ /**
+ * This is needed to force the index version with {@link IndexMetaData.SETTING_INDEX_VERSION_CREATED}.
+ */
+ return false;
+ }
+
+ public void testReorderParentBWC() throws IOException {
+ String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")
+ .startObject("nested1").field("type", "nested").endObject()
+ .endObject().endObject().endObject());
+
+ Version bwcVersion = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0,
+ Version.V_6_5_0);
+ for (Version version : new Version[] {Version.V_6_5_0, bwcVersion}) {
+ DocumentMapper docMapper = createIndex("test-" + version,
+ Settings.builder().put(IndexMetaData.SETTING_INDEX_VERSION_CREATED.getKey(), version).build())
+ .mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
+
+ assertThat(docMapper.hasNestedObjects(), equalTo(true));
+ ObjectMapper nested1Mapper = docMapper.objectMappers().get("nested1");
+ assertThat(nested1Mapper.nested().isNested(), equalTo(true));
+
+ ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", BytesReference
+ .bytes(XContentFactory.jsonBuilder()
+ .startObject()
+ .field("field", "value")
+ .startArray("nested1")
+ .startObject()
+ .field("field1", "1")
+ .field("field2", "2")
+ .endObject()
+ .startObject()
+ .field("field1", "3")
+ .field("field2", "4")
+ .endObject()
+ .endArray()
+ .endObject()),
+ XContentType.JSON));
+
+ assertThat(doc.docs().size(), equalTo(3));
+ if (version.onOrAfter(Version.V_6_5_0)) {
+ assertThat(doc.docs().get(0).get(TypeFieldMapper.NAME), equalTo(nested1Mapper.nestedTypePathAsString()));
+ assertThat(doc.docs().get(0).get("nested1.field1"), equalTo("1"));
+ assertThat(doc.docs().get(0).get("nested1.field2"), equalTo("2"));
+ assertThat(doc.docs().get(1).get("nested1.field1"), equalTo("3"));
+ assertThat(doc.docs().get(1).get("nested1.field2"), equalTo("4"));
+ assertThat(doc.docs().get(2).get("field"), equalTo("value"));
+ } else {
+ assertThat(doc.docs().get(0).get(TypeFieldMapper.NAME), equalTo(nested1Mapper.nestedTypePathAsString()));
+ assertThat(doc.docs().get(0).get("nested1.field1"), equalTo("3"));
+ assertThat(doc.docs().get(0).get("nested1.field2"), equalTo("4"));
+ assertThat(doc.docs().get(1).get("nested1.field1"), equalTo("1"));
+ assertThat(doc.docs().get(1).get("nested1.field2"), equalTo("2"));
+ assertThat(doc.docs().get(2).get("field"), equalTo("value"));
+ }
+ }
+ }
}
diff --git a/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldTypeTests.java
index 4b2967553e5..28f1280382a 100644
--- a/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldTypeTests.java
+++ b/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldTypeTests.java
@@ -21,6 +21,7 @@ package org.elasticsearch.index.mapper;
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
import org.apache.lucene.document.Document;
+import org.apache.lucene.document.DoublePoint;
import org.apache.lucene.document.FloatPoint;
import org.apache.lucene.document.HalfFloatPoint;
import org.apache.lucene.document.IntPoint;
@@ -53,6 +54,7 @@ import java.util.List;
import java.util.function.Supplier;
import static org.hamcrest.Matchers.containsString;
+import static org.hamcrest.Matchers.equalTo;
public class NumberFieldTypeTests extends FieldTypeTestCase {
@@ -530,4 +532,49 @@ public class NumberFieldTypeTests extends FieldTypeTestCase {
assertEquals(Double.valueOf(1.2),
new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.DOUBLE).valueForDisplay(1.2));
}
+
+ public void testParsePoint() {
+ {
+ byte[] bytes = new byte[Integer.BYTES];
+ byte value = randomByte();
+ IntPoint.encodeDimension(value, bytes, 0);
+ assertThat(NumberType.BYTE.parsePoint(bytes), equalTo(value));
+ }
+ {
+ byte[] bytes = new byte[Integer.BYTES];
+ short value = randomShort();
+ IntPoint.encodeDimension(value, bytes, 0);
+ assertThat(NumberType.SHORT.parsePoint(bytes), equalTo(value));
+ }
+ {
+ byte[] bytes = new byte[Integer.BYTES];
+ int value = randomInt();
+ IntPoint.encodeDimension(value, bytes, 0);
+ assertThat(NumberType.INTEGER.parsePoint(bytes), equalTo(value));
+ }
+ {
+ byte[] bytes = new byte[Long.BYTES];
+ long value = randomLong();
+ LongPoint.encodeDimension(value, bytes, 0);
+ assertThat(NumberType.LONG.parsePoint(bytes), equalTo(value));
+ }
+ {
+ byte[] bytes = new byte[Float.BYTES];
+ float value = randomFloat();
+ FloatPoint.encodeDimension(value, bytes, 0);
+ assertThat(NumberType.FLOAT.parsePoint(bytes), equalTo(value));
+ }
+ {
+ byte[] bytes = new byte[Double.BYTES];
+ double value = randomDouble();
+ DoublePoint.encodeDimension(value, bytes, 0);
+ assertThat(NumberType.DOUBLE.parsePoint(bytes), equalTo(value));
+ }
+ {
+ byte[] bytes = new byte[Float.BYTES];
+ float value = 3f;
+ HalfFloatPoint.encodeDimension(value, bytes, 0);
+ assertThat(NumberType.HALF_FLOAT.parsePoint(bytes), equalTo(value));
+ }
+ }
}
diff --git a/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheIT.java b/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheIT.java
index 3f6feb23286..809d6475274 100644
--- a/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheIT.java
+++ b/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheIT.java
@@ -33,6 +33,7 @@ import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.hamcrest.ElasticsearchAssertions;
+import org.elasticsearch.test.junit.annotations.TestLogging;
import org.joda.time.DateTimeZone;
import java.time.ZoneOffset;
@@ -49,6 +50,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSear
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
+@TestLogging(value = "org.elasticsearch.indices.IndicesRequestCache:TRACE")
public class IndicesRequestCacheIT extends ESIntegTestCase {
// One of the primary purposes of the query cache is to cache aggs results
@@ -417,8 +419,8 @@ public class IndicesRequestCacheIT extends ESIntegTestCase {
.getRequestCache();
// Check the hit count and miss count together so if they are not
// correct we can see both values
- assertEquals(Arrays.asList(expectedHits, expectedMisses),
- Arrays.asList(requestCacheStats.getHitCount(), requestCacheStats.getMissCount()));
+ assertEquals(Arrays.asList(expectedHits, expectedMisses, 0L),
+ Arrays.asList(requestCacheStats.getHitCount(), requestCacheStats.getMissCount(), requestCacheStats.getEvictions()));
}
}
diff --git a/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java b/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java
index 4418a7cfb7f..08bf43b91bb 100644
--- a/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java
+++ b/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheTests.java
@@ -31,7 +31,6 @@ import org.apache.lucene.search.TopDocs;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.BytesRef;
-import org.elasticsearch.core.internal.io.IOUtils;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
@@ -39,6 +38,7 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentType;
+import org.elasticsearch.core.internal.io.IOUtils;
import org.elasticsearch.index.cache.request.ShardRequestCache;
import org.elasticsearch.index.query.TermQueryBuilder;
import org.elasticsearch.index.shard.ShardId;
@@ -68,7 +68,7 @@ public class IndicesRequestCacheTests extends ESTestCase {
// initial cache
TestEntity entity = new TestEntity(requestCacheStats, indexShard);
Loader loader = new Loader(reader, 0);
- BytesReference value = cache.getOrCompute(entity, loader, reader, termBytes);
+ BytesReference value = cache.getOrCompute(entity, loader, reader, termBytes, () -> termQuery.toString());
assertEquals("foo", value.streamInput().readString());
assertEquals(0, requestCacheStats.stats().getHitCount());
assertEquals(1, requestCacheStats.stats().getMissCount());
@@ -79,7 +79,7 @@ public class IndicesRequestCacheTests extends ESTestCase {
// cache hit
entity = new TestEntity(requestCacheStats, indexShard);
loader = new Loader(reader, 0);
- value = cache.getOrCompute(entity, loader, reader, termBytes);
+ value = cache.getOrCompute(entity, loader, reader, termBytes, () -> termQuery.toString());
assertEquals("foo", value.streamInput().readString());
assertEquals(1, requestCacheStats.stats().getHitCount());
assertEquals(1, requestCacheStats.stats().getMissCount());
@@ -126,7 +126,7 @@ public class IndicesRequestCacheTests extends ESTestCase {
// initial cache
TestEntity entity = new TestEntity(requestCacheStats, indexShard);
Loader loader = new Loader(reader, 0);
- BytesReference value = cache.getOrCompute(entity, loader, reader, termBytes);
+ BytesReference value = cache.getOrCompute(entity, loader, reader, termBytes, () -> termQuery.toString());
assertEquals("foo", value.streamInput().readString());
assertEquals(0, requestCacheStats.stats().getHitCount());
assertEquals(1, requestCacheStats.stats().getMissCount());
@@ -140,7 +140,7 @@ public class IndicesRequestCacheTests extends ESTestCase {
// cache the second
TestEntity secondEntity = new TestEntity(requestCacheStats, indexShard);
loader = new Loader(secondReader, 0);
- value = cache.getOrCompute(entity, loader, secondReader, termBytes);
+ value = cache.getOrCompute(entity, loader, secondReader, termBytes, () -> termQuery.toString());
assertEquals("bar", value.streamInput().readString());
assertEquals(0, requestCacheStats.stats().getHitCount());
assertEquals(2, requestCacheStats.stats().getMissCount());
@@ -152,7 +152,7 @@ public class IndicesRequestCacheTests extends ESTestCase {
secondEntity = new TestEntity(requestCacheStats, indexShard);
loader = new Loader(secondReader, 0);
- value = cache.getOrCompute(secondEntity, loader, secondReader, termBytes);
+ value = cache.getOrCompute(secondEntity, loader, secondReader, termBytes, () -> termQuery.toString());
assertEquals("bar", value.streamInput().readString());
assertEquals(1, requestCacheStats.stats().getHitCount());
assertEquals(2, requestCacheStats.stats().getMissCount());
@@ -162,7 +162,7 @@ public class IndicesRequestCacheTests extends ESTestCase {
entity = new TestEntity(requestCacheStats, indexShard);
loader = new Loader(reader, 0);
- value = cache.getOrCompute(entity, loader, reader, termBytes);
+ value = cache.getOrCompute(entity, loader, reader, termBytes, () -> termQuery.toString());
assertEquals("foo", value.streamInput().readString());
assertEquals(2, requestCacheStats.stats().getHitCount());
assertEquals(2, requestCacheStats.stats().getMissCount());
@@ -222,9 +222,9 @@ public class IndicesRequestCacheTests extends ESTestCase {
TestEntity secondEntity = new TestEntity(requestCacheStats, indexShard);
Loader secondLoader = new Loader(secondReader, 0);
- BytesReference value1 = cache.getOrCompute(entity, loader, reader, termBytes);
+ BytesReference value1 = cache.getOrCompute(entity, loader, reader, termBytes, () -> termQuery.toString());
assertEquals("foo", value1.streamInput().readString());
- BytesReference value2 = cache.getOrCompute(secondEntity, secondLoader, secondReader, termBytes);
+ BytesReference value2 = cache.getOrCompute(secondEntity, secondLoader, secondReader, termBytes, () -> termQuery.toString());
assertEquals("bar", value2.streamInput().readString());
size = requestCacheStats.stats().getMemorySize();
IOUtils.close(reader, secondReader, writer, dir, cache);
@@ -257,12 +257,12 @@ public class IndicesRequestCacheTests extends ESTestCase {
TestEntity thirddEntity = new TestEntity(requestCacheStats, indexShard);
Loader thirdLoader = new Loader(thirdReader, 0);
- BytesReference value1 = cache.getOrCompute(entity, loader, reader, termBytes);
+ BytesReference value1 = cache.getOrCompute(entity, loader, reader, termBytes, () -> termQuery.toString());
assertEquals("foo", value1.streamInput().readString());
- BytesReference value2 = cache.getOrCompute(secondEntity, secondLoader, secondReader, termBytes);
+ BytesReference value2 = cache.getOrCompute(secondEntity, secondLoader, secondReader, termBytes, () -> termQuery.toString());
assertEquals("bar", value2.streamInput().readString());
logger.info("Memory size: {}", requestCacheStats.stats().getMemorySize());
- BytesReference value3 = cache.getOrCompute(thirddEntity, thirdLoader, thirdReader, termBytes);
+ BytesReference value3 = cache.getOrCompute(thirddEntity, thirdLoader, thirdReader, termBytes, () -> termQuery.toString());
assertEquals("baz", value3.streamInput().readString());
assertEquals(2, cache.count());
assertEquals(1, requestCacheStats.stats().getEvictions());
@@ -298,12 +298,12 @@ public class IndicesRequestCacheTests extends ESTestCase {
TestEntity thirddEntity = new TestEntity(requestCacheStats, differentIdentity);
Loader thirdLoader = new Loader(thirdReader, 0);
- BytesReference value1 = cache.getOrCompute(entity, loader, reader, termBytes);
+ BytesReference value1 = cache.getOrCompute(entity, loader, reader, termBytes, () -> termQuery.toString());
assertEquals("foo", value1.streamInput().readString());
- BytesReference value2 = cache.getOrCompute(secondEntity, secondLoader, secondReader, termBytes);
+ BytesReference value2 = cache.getOrCompute(secondEntity, secondLoader, secondReader, termBytes, () -> termQuery.toString());
assertEquals("bar", value2.streamInput().readString());
logger.info("Memory size: {}", requestCacheStats.stats().getMemorySize());
- BytesReference value3 = cache.getOrCompute(thirddEntity, thirdLoader, thirdReader, termBytes);
+ BytesReference value3 = cache.getOrCompute(thirddEntity, thirdLoader, thirdReader, termBytes, () -> termQuery.toString());
assertEquals("baz", value3.streamInput().readString());
assertEquals(3, cache.count());
final long hitCount = requestCacheStats.stats().getHitCount();
@@ -312,7 +312,7 @@ public class IndicesRequestCacheTests extends ESTestCase {
cache.cleanCache();
assertEquals(1, cache.count());
// third has not been validated since it's a different identity
- value3 = cache.getOrCompute(thirddEntity, thirdLoader, thirdReader, termBytes);
+ value3 = cache.getOrCompute(thirddEntity, thirdLoader, thirdReader, termBytes, () -> termQuery.toString());
assertEquals(hitCount + 1, requestCacheStats.stats().getHitCount());
assertEquals("baz", value3.streamInput().readString());
@@ -371,7 +371,7 @@ public class IndicesRequestCacheTests extends ESTestCase {
// initial cache
TestEntity entity = new TestEntity(requestCacheStats, indexShard);
Loader loader = new Loader(reader, 0);
- BytesReference value = cache.getOrCompute(entity, loader, reader, termBytes);
+ BytesReference value = cache.getOrCompute(entity, loader, reader, termBytes, () -> termQuery.toString());
assertEquals("foo", value.streamInput().readString());
assertEquals(0, requestCacheStats.stats().getHitCount());
assertEquals(1, requestCacheStats.stats().getMissCount());
@@ -382,7 +382,7 @@ public class IndicesRequestCacheTests extends ESTestCase {
// cache hit
entity = new TestEntity(requestCacheStats, indexShard);
loader = new Loader(reader, 0);
- value = cache.getOrCompute(entity, loader, reader, termBytes);
+ value = cache.getOrCompute(entity, loader, reader, termBytes, () -> termQuery.toString());
assertEquals("foo", value.streamInput().readString());
assertEquals(1, requestCacheStats.stats().getHitCount());
assertEquals(1, requestCacheStats.stats().getMissCount());
@@ -396,7 +396,7 @@ public class IndicesRequestCacheTests extends ESTestCase {
entity = new TestEntity(requestCacheStats, indexShard);
loader = new Loader(reader, 0);
cache.invalidate(entity, reader, termBytes);
- value = cache.getOrCompute(entity, loader, reader, termBytes);
+ value = cache.getOrCompute(entity, loader, reader, termBytes, () -> termQuery.toString());
assertEquals("foo", value.streamInput().readString());
assertEquals(1, requestCacheStats.stats().getHitCount());
assertEquals(2, requestCacheStats.stats().getMissCount());
diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxAggregatorTests.java
index b27d33aa5ca..013f951cc3f 100644
--- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxAggregatorTests.java
+++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxAggregatorTests.java
@@ -19,27 +19,49 @@
package org.elasticsearch.search.aggregations.metrics;
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.DoublePoint;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.document.FloatPoint;
import org.apache.lucene.document.IntPoint;
+import org.apache.lucene.document.LongPoint;
import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.document.SortedNumericDocValuesField;
+import org.apache.lucene.document.StringField;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.index.NoMergePolicy;
+import org.apache.lucene.index.PointValues;
import org.apache.lucene.index.RandomIndexWriter;
+import org.apache.lucene.index.Term;
import org.apache.lucene.search.DocValuesFieldExistsQuery;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.store.Directory;
+import org.apache.lucene.util.Bits;
+import org.apache.lucene.util.FutureArrays;
import org.elasticsearch.common.CheckedConsumer;
+import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.NumberFieldMapper;
import org.elasticsearch.search.aggregations.AggregatorTestCase;
import java.io.IOException;
+import java.util.ArrayList;
import java.util.Arrays;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.List;
import java.util.function.Consumer;
+import java.util.function.Function;
+import java.util.function.Supplier;
import static java.util.Collections.singleton;
+import static org.hamcrest.Matchers.equalTo;
public class MaxAggregatorTests extends AggregatorTestCase {
public void testNoDocs() throws IOException {
@@ -77,7 +99,6 @@ public class MaxAggregatorTests extends AggregatorTestCase {
});
}
-
public void testQueryFiltering() throws IOException {
testCase(IntPoint.newRangeQuery("number", 0, 5), iw -> {
iw.addDocument(Arrays.asList(new IntPoint("number", 7), new SortedNumericDocValuesField("number", 7)));
@@ -96,8 +117,9 @@ public class MaxAggregatorTests extends AggregatorTestCase {
});
}
- private void testCase(Query query, CheckedConsumer buildIndex, Consumer verify)
- throws IOException {
+ private void testCase(Query query,
+ CheckedConsumer buildIndex,
+ Consumer verify) throws IOException {
Directory directory = newDirectory();
RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory);
buildIndex.accept(indexWriter);
@@ -107,10 +129,10 @@ public class MaxAggregatorTests extends AggregatorTestCase {
IndexSearcher indexSearcher = newSearcher(indexReader, true, true);
MaxAggregationBuilder aggregationBuilder = new MaxAggregationBuilder("_name").field("number");
- MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG);
+ MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER);
fieldType.setName("number");
- MaxAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType);
+ MaxAggregator aggregator = createAggregator(query, aggregationBuilder, indexSearcher, createIndexSettings(), fieldType);
aggregator.preCollection();
indexSearcher.search(query, aggregator);
aggregator.postCollection();
@@ -119,4 +141,110 @@ public class MaxAggregatorTests extends AggregatorTestCase {
indexReader.close();
directory.close();
}
+
+ public void testMaxShortcutRandom() throws Exception {
+ testMaxShortcutCase(
+ () -> randomLongBetween(Integer.MIN_VALUE, Integer.MAX_VALUE),
+ (n) -> new LongPoint("number", n.longValue()),
+ (v) -> LongPoint.decodeDimension(v, 0));
+
+ testMaxShortcutCase(
+ () -> randomInt(),
+ (n) -> new IntPoint("number", n.intValue()),
+ (v) -> IntPoint.decodeDimension(v, 0));
+
+ testMaxShortcutCase(
+ () -> randomFloat(),
+ (n) -> new FloatPoint("number", n.floatValue()),
+ (v) -> FloatPoint.decodeDimension(v, 0));
+
+ testMaxShortcutCase(
+ () -> randomDouble(),
+ (n) -> new DoublePoint("number", n.doubleValue()),
+ (v) -> DoublePoint.decodeDimension(v, 0));
+ }
+
+ private void testMaxShortcutCase(Supplier randomNumber,
+ Function pointFieldFunc,
+ Function pointConvertFunc) throws IOException {
+ Directory directory = newDirectory();
+ IndexWriterConfig config = newIndexWriterConfig().setMergePolicy(NoMergePolicy.INSTANCE);
+ IndexWriter indexWriter = new IndexWriter(directory, config);
+ List documents = new ArrayList<>();
+ List> values = new ArrayList<>();
+ int numValues = atLeast(50);
+ int docID = 0;
+ for (int i = 0; i < numValues; i++) {
+ int numDup = randomIntBetween(1, 3);
+ for (int j = 0; j < numDup; j++) {
+ Document document = new Document();
+ Number nextValue = randomNumber.get();
+ values.add(new Tuple<>(docID, nextValue));
+ document.add(new StringField("id", Integer.toString(docID), Field.Store.NO));
+ document.add(pointFieldFunc.apply(nextValue));
+ documents.add(document);
+ docID ++;
+ }
+ }
+ // insert some documents without a value for the metric field.
+ for (int i = 0; i < 3; i++) {
+ Document document = new Document();
+ documents.add(document);
+ }
+ indexWriter.addDocuments(documents);
+ Collections.sort(values, Comparator.comparingDouble(t -> t.v2().doubleValue()));
+ try (IndexReader reader = DirectoryReader.open(indexWriter)) {
+ LeafReaderContext ctx = reader.leaves().get(0);
+ Number res = MaxAggregator.findLeafMaxValue(ctx.reader(), "number" , pointConvertFunc);
+ assertThat(res, equalTo(values.get(values.size()-1).v2()));
+ }
+ for (int i = values.size()-1; i > 0; i--) {
+ indexWriter.deleteDocuments(new Term("id", values.get(i).v1().toString()));
+ try (IndexReader reader = DirectoryReader.open(indexWriter)) {
+ LeafReaderContext ctx = reader.leaves().get(0);
+ Number res = MaxAggregator.findLeafMaxValue(ctx.reader(), "number" , pointConvertFunc);
+ if (res != null) {
+ assertThat(res, equalTo(values.get(i - 1).v2()));
+ } else {
+ assertAllDeleted(ctx.reader().getLiveDocs(), ctx.reader().getPointValues("number"));
+ }
+ }
+ }
+ indexWriter.deleteDocuments(new Term("id", values.get(0).v1().toString()));
+ try (IndexReader reader = DirectoryReader.open(indexWriter)) {
+ LeafReaderContext ctx = reader.leaves().get(0);
+ Number res = MaxAggregator.findLeafMaxValue(ctx.reader(), "number" , pointConvertFunc);
+ assertThat(res, equalTo(null));
+ }
+ indexWriter.close();
+ directory.close();
+ }
+
+ // checks that documents inside the max leaves are all deleted
+ private void assertAllDeleted(Bits liveDocs, PointValues values) throws IOException {
+ final byte[] maxValue = values.getMaxPackedValue();
+ int numBytes = values.getBytesPerDimension();
+ final boolean[] seen = new boolean[1];
+ values.intersect(new PointValues.IntersectVisitor() {
+ @Override
+ public void visit(int docID) {
+ throw new AssertionError();
+ }
+
+ @Override
+ public void visit(int docID, byte[] packedValue) {
+ assertFalse(liveDocs.get(docID));
+ seen[0] = true;
+ }
+
+ @Override
+ public PointValues.Relation compare(byte[] minPackedValue, byte[] maxPackedValue) {
+ if (FutureArrays.equals(maxPackedValue, 0, numBytes, maxValue, 0, numBytes)) {
+ return PointValues.Relation.CELL_CROSSES_QUERY;
+ }
+ return PointValues.Relation.CELL_OUTSIDE_QUERY;
+ }
+ });
+ assertTrue(seen[0]);
+ }
}
diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxIT.java
index 5447406f2f2..9c46c1db7ea 100644
--- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxIT.java
+++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MaxIT.java
@@ -40,6 +40,7 @@ import java.util.Map;
import static java.util.Collections.emptyMap;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
import static org.elasticsearch.index.query.QueryBuilders.termQuery;
+import static org.elasticsearch.search.aggregations.AggregationBuilders.count;
import static org.elasticsearch.search.aggregations.AggregationBuilders.filter;
import static org.elasticsearch.search.aggregations.AggregationBuilders.global;
import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram;
@@ -392,4 +393,22 @@ public class MaxIT extends AbstractNumericTestCase {
assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache()
.getMissCount(), equalTo(1L));
}
+
+ public void testEarlyTermination() throws Exception {
+ SearchResponse searchResponse = client().prepareSearch("idx")
+ .setTrackTotalHits(false)
+ .setQuery(matchAllQuery())
+ .addAggregation(max("max").field("values"))
+ .addAggregation(count("count").field("values"))
+ .execute().actionGet();
+
+ Max max = searchResponse.getAggregations().get("max");
+ assertThat(max, notNullValue());
+ assertThat(max.getName(), equalTo("max"));
+ assertThat(max.getValue(), equalTo(12.0));
+
+ ValueCount count = searchResponse.getAggregations().get("count");
+ assertThat(count.getName(), equalTo("count"));
+ assertThat(count.getValue(), equalTo(20L));
+ }
}
diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MinAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MinAggregatorTests.java
index 5b279f1ea49..ad897a2ef32 100644
--- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MinAggregatorTests.java
+++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MinAggregatorTests.java
@@ -16,20 +16,59 @@
* specific language governing permissions and limitations
* under the License.
*/
+
package org.elasticsearch.search.aggregations.metrics;
import org.apache.lucene.document.Document;
+import org.apache.lucene.document.DoublePoint;
+import org.apache.lucene.document.Field;
+import org.apache.lucene.document.FloatPoint;
+import org.apache.lucene.document.IntPoint;
+import org.apache.lucene.document.LongPoint;
import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.document.SortedNumericDocValuesField;
+import org.apache.lucene.document.StringField;
import org.apache.lucene.index.DirectoryReader;
+import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.IndexWriter;
+import org.apache.lucene.index.IndexWriterConfig;
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.index.NoMergePolicy;
import org.apache.lucene.index.RandomIndexWriter;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.search.DocValuesFieldExistsQuery;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchAllDocsQuery;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.TermQuery;
import org.apache.lucene.store.Directory;
+import org.elasticsearch.common.collect.Tuple;
+import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.NumberFieldMapper;
+import org.elasticsearch.search.aggregations.AggregationBuilder;
+import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorTestCase;
+import org.elasticsearch.search.aggregations.support.FieldContext;
+import org.elasticsearch.search.aggregations.support.ValuesSource;
+import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
+import org.elasticsearch.search.internal.SearchContext;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.List;
+import java.util.function.DoubleConsumer;
+import java.util.function.Function;
+import java.util.function.Supplier;
+
+import static org.hamcrest.Matchers.equalTo;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
public class MinAggregatorTests extends AggregatorTestCase {
@@ -38,21 +77,27 @@ public class MinAggregatorTests extends AggregatorTestCase {
RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory);
Document document = new Document();
document.add(new NumericDocValuesField("number", 9));
+ document.add(new LongPoint("number", 9));
indexWriter.addDocument(document);
document = new Document();
document.add(new NumericDocValuesField("number", 7));
+ document.add(new LongPoint("number", 7));
indexWriter.addDocument(document);
document = new Document();
document.add(new NumericDocValuesField("number", 5));
+ document.add(new LongPoint("number", 5));
indexWriter.addDocument(document);
document = new Document();
document.add(new NumericDocValuesField("number", 3));
+ document.add(new LongPoint("number", 3));
indexWriter.addDocument(document);
document = new Document();
document.add(new NumericDocValuesField("number", 1));
+ document.add(new LongPoint("number", 1));
indexWriter.addDocument(document);
document = new Document();
document.add(new NumericDocValuesField("number", -1));
+ document.add(new LongPoint("number", -1));
indexWriter.addDocument(document);
indexWriter.close();
@@ -63,6 +108,8 @@ public class MinAggregatorTests extends AggregatorTestCase {
MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG);
fieldType.setName("number");
+ testMinCase(indexSearcher, aggregationBuilder, fieldType, min -> assertEquals(-1.0d, min, 0));
+
MinAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType);
aggregator.preCollection();
indexSearcher.search(new MatchAllDocsQuery(), aggregator);
@@ -80,14 +127,20 @@ public class MinAggregatorTests extends AggregatorTestCase {
Document document = new Document();
document.add(new SortedNumericDocValuesField("number", 9));
document.add(new SortedNumericDocValuesField("number", 7));
+ document.add(new LongPoint("number", 9));
+ document.add(new LongPoint("number", 7));
indexWriter.addDocument(document);
document = new Document();
document.add(new SortedNumericDocValuesField("number", 5));
document.add(new SortedNumericDocValuesField("number", 3));
+ document.add(new LongPoint("number", 5));
+ document.add(new LongPoint("number", 3));
indexWriter.addDocument(document);
document = new Document();
document.add(new SortedNumericDocValuesField("number", 1));
document.add(new SortedNumericDocValuesField("number", -1));
+ document.add(new LongPoint("number", 1));
+ document.add(new LongPoint("number", -1));
indexWriter.addDocument(document);
indexWriter.close();
@@ -164,4 +217,207 @@ public class MinAggregatorTests extends AggregatorTestCase {
directory.close();
}
+ public void testShortcutIsApplicable() {
+ for (NumberFieldMapper.NumberType type : NumberFieldMapper.NumberType.values()) {
+ assertNotNull(
+ MinAggregator.getPointReaderOrNull(
+ mockSearchContext(new MatchAllDocsQuery()),
+ null,
+ mockNumericValuesSourceConfig("number", type, true)
+ )
+ );
+ assertNotNull(
+ MinAggregator.getPointReaderOrNull(
+ mockSearchContext(null),
+ null,
+ mockNumericValuesSourceConfig("number", type, true)
+ )
+ );
+ assertNull(
+ MinAggregator.getPointReaderOrNull(
+ mockSearchContext(null),
+ mockAggregator(),
+ mockNumericValuesSourceConfig("number", type, true)
+ )
+ );
+ assertNull(
+ MinAggregator.getPointReaderOrNull(
+ mockSearchContext(new TermQuery(new Term("foo", "bar"))),
+ null,
+ mockNumericValuesSourceConfig("number", type, true)
+ )
+ );
+ assertNull(
+ MinAggregator.getPointReaderOrNull(
+ mockSearchContext(null),
+ mockAggregator(),
+ mockNumericValuesSourceConfig("number", type, true)
+ )
+ );
+ assertNull(
+ MinAggregator.getPointReaderOrNull(
+ mockSearchContext(null),
+ null,
+ mockNumericValuesSourceConfig("number", type, false)
+ )
+ );
+ }
+ assertNotNull(
+ MinAggregator.getPointReaderOrNull(
+ mockSearchContext(new MatchAllDocsQuery()),
+ null,
+ mockDateValuesSourceConfig("number", true)
+ )
+ );
+ assertNull(
+ MinAggregator.getPointReaderOrNull(
+ mockSearchContext(new MatchAllDocsQuery()),
+ mockAggregator(),
+ mockDateValuesSourceConfig("number", true)
+ )
+ );
+ assertNull(
+ MinAggregator.getPointReaderOrNull(
+ mockSearchContext(new TermQuery(new Term("foo", "bar"))),
+ null,
+ mockDateValuesSourceConfig("number", true)
+ )
+ );
+ assertNull(
+ MinAggregator.getPointReaderOrNull(
+ mockSearchContext(null),
+ mockAggregator(),
+ mockDateValuesSourceConfig("number", true)
+ )
+ );
+ assertNull(
+ MinAggregator.getPointReaderOrNull(
+ mockSearchContext(null),
+ null,
+ mockDateValuesSourceConfig("number", false)
+ )
+ );
+ }
+
+ public void testMinShortcutRandom() throws Exception {
+ testMinShortcutCase(
+ () -> randomLongBetween(Integer.MIN_VALUE, Integer.MAX_VALUE),
+ (n) -> new LongPoint("number", n.longValue()),
+ (v) -> LongPoint.decodeDimension(v, 0));
+
+ testMinShortcutCase(
+ () -> randomInt(),
+ (n) -> new IntPoint("number", n.intValue()),
+ (v) -> IntPoint.decodeDimension(v, 0));
+
+ testMinShortcutCase(
+ () -> randomFloat(),
+ (n) -> new FloatPoint("number", n.floatValue()),
+ (v) -> FloatPoint.decodeDimension(v, 0));
+
+ testMinShortcutCase(
+ () -> randomDouble(),
+ (n) -> new DoublePoint("number", n.doubleValue()),
+ (v) -> DoublePoint.decodeDimension(v, 0));
+ }
+
+ private void testMinCase(IndexSearcher searcher,
+ AggregationBuilder aggregationBuilder,
+ MappedFieldType ft,
+ DoubleConsumer testResult) throws IOException {
+ Collection queries = Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery(ft.name()));
+ for (Query query : queries) {
+ MinAggregator aggregator = createAggregator(query, aggregationBuilder, searcher, createIndexSettings(), ft);
+ aggregator.preCollection();
+ searcher.search(new MatchAllDocsQuery(), aggregator);
+ aggregator.postCollection();
+ InternalMin result = (InternalMin) aggregator.buildAggregation(0L);
+ testResult.accept(result.getValue());
+ }
+ }
+
+ private void testMinShortcutCase(Supplier randomNumber,
+ Function pointFieldFunc,
+ Function pointConvertFunc) throws IOException {
+ Directory directory = newDirectory();
+ IndexWriterConfig config = newIndexWriterConfig().setMergePolicy(NoMergePolicy.INSTANCE);
+ IndexWriter indexWriter = new IndexWriter(directory, config);
+ List documents = new ArrayList<>();
+ List> values = new ArrayList<>();
+ int numValues = atLeast(50);
+ int docID = 0;
+ for (int i = 0; i < numValues; i++) {
+ int numDup = randomIntBetween(1, 3);
+ for (int j = 0; j < numDup; j++) {
+ Document document = new Document();
+ Number nextValue = randomNumber.get();
+ values.add(new Tuple<>(docID, nextValue));
+ document.add(new StringField("id", Integer.toString(docID), Field.Store.NO));
+ document.add(pointFieldFunc.apply(nextValue));
+ document.add(pointFieldFunc.apply(nextValue));
+ documents.add(document);
+ docID ++;
+ }
+ }
+ // insert some documents without a value for the metric field.
+ for (int i = 0; i < 3; i++) {
+ Document document = new Document();
+ documents.add(document);
+ }
+ indexWriter.addDocuments(documents);
+ Collections.sort(values, Comparator.comparingDouble(t -> t.v2().doubleValue()));
+ try (IndexReader reader = DirectoryReader.open(indexWriter)) {
+ LeafReaderContext ctx = reader.leaves().get(0);
+ Number res = MinAggregator.findLeafMinValue(ctx.reader(), "number", pointConvertFunc);
+ assertThat(res, equalTo(values.get(0).v2()));
+ }
+ for (int i = 1; i < values.size(); i++) {
+ indexWriter.deleteDocuments(new Term("id", values.get(i-1).v1().toString()));
+ try (IndexReader reader = DirectoryReader.open(indexWriter)) {
+ LeafReaderContext ctx = reader.leaves().get(0);
+ Number res = MinAggregator.findLeafMinValue(ctx.reader(), "number", pointConvertFunc);
+ assertThat(res, equalTo(values.get(i).v2()));
+ }
+ }
+ indexWriter.deleteDocuments(new Term("id", values.get(values.size()-1).v1().toString()));
+ try (IndexReader reader = DirectoryReader.open(indexWriter)) {
+ LeafReaderContext ctx = reader.leaves().get(0);
+ Number res = MinAggregator.findLeafMinValue(ctx.reader(), "number", pointConvertFunc);
+ assertThat(res, equalTo(null));
+ }
+ indexWriter.close();
+ directory.close();
+ }
+
+ private SearchContext mockSearchContext(Query query) {
+ SearchContext searchContext = mock(SearchContext.class);
+ when(searchContext.query()).thenReturn(query);
+ return searchContext;
+ }
+
+ private Aggregator mockAggregator() {
+ return mock(Aggregator.class);
+ }
+
+ private ValuesSourceConfig mockNumericValuesSourceConfig(String fieldName,
+ NumberFieldMapper.NumberType numType,
+ boolean indexed) {
+ ValuesSourceConfig config = mock(ValuesSourceConfig.class);
+ MappedFieldType ft = new NumberFieldMapper.NumberFieldType(numType);
+ ft.setName(fieldName);
+ ft.setIndexOptions(indexed ? IndexOptions.DOCS : IndexOptions.NONE);
+ ft.freeze();
+ when(config.fieldContext()).thenReturn(new FieldContext(fieldName, null, ft));
+ return config;
+ }
+
+ private ValuesSourceConfig mockDateValuesSourceConfig(String fieldName, boolean indexed) {
+ ValuesSourceConfig config = mock(ValuesSourceConfig.class);
+ MappedFieldType ft = new DateFieldMapper.Builder(fieldName).fieldType();
+ ft.setName(fieldName);
+ ft.setIndexOptions(indexed ? IndexOptions.DOCS : IndexOptions.NONE);
+ ft.freeze();
+ when(config.fieldContext()).thenReturn(new FieldContext(fieldName, null, ft));
+ return config;
+ }
}
diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MinIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MinIT.java
index d92d212f4d2..7bb0d23c4c2 100644
--- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MinIT.java
+++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/MinIT.java
@@ -40,6 +40,7 @@ import java.util.Map;
import static java.util.Collections.emptyMap;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
import static org.elasticsearch.index.query.QueryBuilders.termQuery;
+import static org.elasticsearch.search.aggregations.AggregationBuilders.count;
import static org.elasticsearch.search.aggregations.AggregationBuilders.filter;
import static org.elasticsearch.search.aggregations.AggregationBuilders.global;
import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram;
@@ -404,4 +405,22 @@ public class MinIT extends AbstractNumericTestCase {
assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache()
.getMissCount(), equalTo(1L));
}
+
+ public void testEarlyTermination() throws Exception {
+ SearchResponse searchResponse = client().prepareSearch("idx")
+ .setTrackTotalHits(false)
+ .setQuery(matchAllQuery())
+ .addAggregation(min("min").field("values"))
+ .addAggregation(count("count").field("values"))
+ .execute().actionGet();
+
+ Min min = searchResponse.getAggregations().get("min");
+ assertThat(min, notNullValue());
+ assertThat(min.getName(), equalTo("min"));
+ assertThat(min.getValue(), equalTo(2.0));
+
+ ValueCount count = searchResponse.getAggregations().get("count");
+ assertThat(count.getName(), equalTo("count"));
+ assertThat(count.getValue(), equalTo(20L));
+ }
}
diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregatorTests.java
index 34ef4b9b93c..05115a03e30 100644
--- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregatorTests.java
+++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricAggregatorTests.java
@@ -54,6 +54,8 @@ public class ScriptedMetricAggregatorTests extends AggregatorTestCase {
private static final Script MAP_SCRIPT = new Script(ScriptType.INLINE, MockScriptEngine.NAME, "mapScript", Collections.emptyMap());
private static final Script COMBINE_SCRIPT = new Script(ScriptType.INLINE, MockScriptEngine.NAME, "combineScript",
Collections.emptyMap());
+ private static final Script REDUCE_SCRIPT = new Script(ScriptType.INLINE, MockScriptEngine.NAME, "reduceScript",
+ Collections.emptyMap());
private static final Script INIT_SCRIPT_SCORE = new Script(ScriptType.INLINE, MockScriptEngine.NAME, "initScriptScore",
Collections.emptyMap());
@@ -61,6 +63,8 @@ public class ScriptedMetricAggregatorTests extends AggregatorTestCase {
Collections.emptyMap());
private static final Script COMBINE_SCRIPT_SCORE = new Script(ScriptType.INLINE, MockScriptEngine.NAME, "combineScriptScore",
Collections.emptyMap());
+ private static final Script COMBINE_SCRIPT_NOOP = new Script(ScriptType.INLINE, MockScriptEngine.NAME, "combineScriptNoop",
+ Collections.emptyMap());
private static final Script INIT_SCRIPT_PARAMS = new Script(ScriptType.INLINE, MockScriptEngine.NAME, "initScriptParams",
Collections.singletonMap("initialValue", 24));
@@ -96,6 +100,14 @@ public class ScriptedMetricAggregatorTests extends AggregatorTestCase {
Map state = (Map) params.get("state");
return ((List) state.get("collector")).stream().mapToInt(Integer::intValue).sum();
});
+ SCRIPTS.put("combineScriptNoop", params -> {
+ Map state = (Map) params.get("state");
+ return state;
+ });
+ SCRIPTS.put("reduceScript", params -> {
+ Map state = (Map) params.get("state");
+ return state;
+ });
SCRIPTS.put("initScriptScore", params -> {
Map state = (Map) params.get("state");
@@ -160,7 +172,7 @@ public class ScriptedMetricAggregatorTests extends AggregatorTestCase {
}
try (IndexReader indexReader = DirectoryReader.open(directory)) {
ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME);
- aggregationBuilder.mapScript(MAP_SCRIPT); // map script is mandatory, even if its not used in this case
+ aggregationBuilder.mapScript(MAP_SCRIPT).combineScript(COMBINE_SCRIPT_NOOP).reduceScript(REDUCE_SCRIPT);
ScriptedMetric scriptedMetric = search(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder);
assertEquals(AGG_NAME, scriptedMetric.getName());
assertNotNull(scriptedMetric.aggregation());
@@ -169,9 +181,6 @@ public class ScriptedMetricAggregatorTests extends AggregatorTestCase {
}
}
- /**
- * without combine script, the "states" map should contain a list of the size of the number of documents matched
- */
public void testScriptedMetricWithoutCombine() throws IOException {
try (Directory directory = newDirectory()) {
int numDocs = randomInt(100);
@@ -182,15 +191,28 @@ public class ScriptedMetricAggregatorTests extends AggregatorTestCase {
}
try (IndexReader indexReader = DirectoryReader.open(directory)) {
ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME);
- aggregationBuilder.initScript(INIT_SCRIPT).mapScript(MAP_SCRIPT);
- ScriptedMetric scriptedMetric = search(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder);
- assertEquals(AGG_NAME, scriptedMetric.getName());
- assertNotNull(scriptedMetric.aggregation());
- @SuppressWarnings("unchecked")
- Map agg = (Map) scriptedMetric.aggregation();
- @SuppressWarnings("unchecked")
- List list = (List) agg.get("collector");
- assertEquals(numDocs, list.size());
+ aggregationBuilder.initScript(INIT_SCRIPT).mapScript(MAP_SCRIPT).reduceScript(REDUCE_SCRIPT);
+ IllegalArgumentException exception = expectThrows(IllegalArgumentException.class,
+ () -> search(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder));
+ assertEquals(exception.getMessage(), "[combineScript] must not be null: [scriptedMetric]");
+ }
+ }
+ }
+
+ public void testScriptedMetricWithoutReduce() throws IOException {
+ try (Directory directory = newDirectory()) {
+ int numDocs = randomInt(100);
+ try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) {
+ for (int i = 0; i < numDocs; i++) {
+ indexWriter.addDocument(singleton(new SortedNumericDocValuesField("number", i)));
+ }
+ }
+ try (IndexReader indexReader = DirectoryReader.open(directory)) {
+ ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME);
+ aggregationBuilder.initScript(INIT_SCRIPT).mapScript(MAP_SCRIPT).combineScript(COMBINE_SCRIPT);
+ IllegalArgumentException exception = expectThrows(IllegalArgumentException.class,
+ () -> search(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder));
+ assertEquals(exception.getMessage(), "[reduceScript] must not be null: [scriptedMetric]");
}
}
}
@@ -208,7 +230,8 @@ public class ScriptedMetricAggregatorTests extends AggregatorTestCase {
}
try (IndexReader indexReader = DirectoryReader.open(directory)) {
ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME);
- aggregationBuilder.initScript(INIT_SCRIPT).mapScript(MAP_SCRIPT).combineScript(COMBINE_SCRIPT);
+ aggregationBuilder.initScript(INIT_SCRIPT).mapScript(MAP_SCRIPT)
+ .combineScript(COMBINE_SCRIPT).reduceScript(REDUCE_SCRIPT);
ScriptedMetric scriptedMetric = search(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder);
assertEquals(AGG_NAME, scriptedMetric.getName());
assertNotNull(scriptedMetric.aggregation());
@@ -230,7 +253,8 @@ public class ScriptedMetricAggregatorTests extends AggregatorTestCase {
}
try (IndexReader indexReader = DirectoryReader.open(directory)) {
ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME);
- aggregationBuilder.initScript(INIT_SCRIPT_SCORE).mapScript(MAP_SCRIPT_SCORE).combineScript(COMBINE_SCRIPT_SCORE);
+ aggregationBuilder.initScript(INIT_SCRIPT_SCORE).mapScript(MAP_SCRIPT_SCORE)
+ .combineScript(COMBINE_SCRIPT_SCORE).reduceScript(REDUCE_SCRIPT);
ScriptedMetric scriptedMetric = search(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder);
assertEquals(AGG_NAME, scriptedMetric.getName());
assertNotNull(scriptedMetric.aggregation());
@@ -250,7 +274,8 @@ public class ScriptedMetricAggregatorTests extends AggregatorTestCase {
try (IndexReader indexReader = DirectoryReader.open(directory)) {
ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME);
- aggregationBuilder.initScript(INIT_SCRIPT_PARAMS).mapScript(MAP_SCRIPT_PARAMS).combineScript(COMBINE_SCRIPT_PARAMS);
+ aggregationBuilder.initScript(INIT_SCRIPT_PARAMS).mapScript(MAP_SCRIPT_PARAMS)
+ .combineScript(COMBINE_SCRIPT_PARAMS).reduceScript(REDUCE_SCRIPT);
ScriptedMetric scriptedMetric = search(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder);
// The result value depends on the script params.
@@ -270,8 +295,8 @@ public class ScriptedMetricAggregatorTests extends AggregatorTestCase {
try (IndexReader indexReader = DirectoryReader.open(directory)) {
ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME);
Map aggParams = Collections.singletonMap(CONFLICTING_PARAM_NAME, "blah");
- aggregationBuilder.params(aggParams).initScript(INIT_SCRIPT_PARAMS).mapScript(MAP_SCRIPT_PARAMS).
- combineScript(COMBINE_SCRIPT_PARAMS);
+ aggregationBuilder.params(aggParams).initScript(INIT_SCRIPT_PARAMS).mapScript(MAP_SCRIPT_PARAMS)
+ .combineScript(COMBINE_SCRIPT_PARAMS).reduceScript(REDUCE_SCRIPT);
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () ->
search(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder)
@@ -289,7 +314,8 @@ public class ScriptedMetricAggregatorTests extends AggregatorTestCase {
}
try (IndexReader indexReader = DirectoryReader.open(directory)) {
ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME);
- aggregationBuilder.initScript(INIT_SCRIPT_SELF_REF).mapScript(MAP_SCRIPT);
+ aggregationBuilder.initScript(INIT_SCRIPT_SELF_REF).mapScript(MAP_SCRIPT)
+ .combineScript(COMBINE_SCRIPT_PARAMS).reduceScript(REDUCE_SCRIPT);
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () ->
search(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder)
@@ -309,7 +335,8 @@ public class ScriptedMetricAggregatorTests extends AggregatorTestCase {
}
try (IndexReader indexReader = DirectoryReader.open(directory)) {
ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME);
- aggregationBuilder.initScript(INIT_SCRIPT).mapScript(MAP_SCRIPT_SELF_REF);
+ aggregationBuilder.initScript(INIT_SCRIPT).mapScript(MAP_SCRIPT_SELF_REF)
+ .combineScript(COMBINE_SCRIPT_PARAMS).reduceScript(REDUCE_SCRIPT);
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () ->
search(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder)
@@ -326,7 +353,8 @@ public class ScriptedMetricAggregatorTests extends AggregatorTestCase {
}
try (IndexReader indexReader = DirectoryReader.open(directory)) {
ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME);
- aggregationBuilder.initScript(INIT_SCRIPT).mapScript(MAP_SCRIPT).combineScript(COMBINE_SCRIPT_SELF_REF);
+ aggregationBuilder.initScript(INIT_SCRIPT).mapScript(MAP_SCRIPT)
+ .combineScript(COMBINE_SCRIPT_SELF_REF).reduceScript(REDUCE_SCRIPT);
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () ->
search(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder)
diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java
index 2643b6c6166..3c3fcddc32e 100644
--- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java
+++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java
@@ -153,6 +153,14 @@ public class ScriptedMetricIT extends ESIntegTestCase {
return newAggregation;
});
+ scripts.put("no-op aggregation", vars -> {
+ return (Map) vars.get("state");
+ });
+
+ scripts.put("no-op list aggregation", vars -> {
+ return (List>) vars.get("states");
+ });
+
// Equivalent to:
//
// newaggregation = [];
@@ -188,6 +196,11 @@ public class ScriptedMetricIT extends ESIntegTestCase {
Integer sum = 0;
List> states = (List>) vars.get("states");
+
+ if(states == null) {
+ return newAggregation;
+ }
+
for (Map state : states) {
List> list = (List>) state.get("list");
if (list != null) {
@@ -328,10 +341,14 @@ public class ScriptedMetricIT extends ESIntegTestCase {
public void testMap() {
Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state['count'] = 1", Collections.emptyMap());
+ Script combineScript =
+ new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "no-op aggregation", Collections.emptyMap());
+ Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME,
+ "no-op list aggregation", Collections.emptyMap());
SearchResponse response = client().prepareSearch("idx")
.setQuery(matchAllQuery())
- .addAggregation(scriptedMetric("scripted").mapScript(mapScript))
+ .addAggregation(scriptedMetric("scripted").mapScript(mapScript).combineScript(combineScript).reduceScript(reduceScript))
.get();
assertSearchResponse(response);
assertThat(response.getHits().getTotalHits(), equalTo(numDocs));
@@ -369,10 +386,18 @@ public class ScriptedMetricIT extends ESIntegTestCase {
Map aggregationParams = Collections.singletonMap("param2", 1);
Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state[param1] = param2", scriptParams);
+ Script combineScript =
+ new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "no-op aggregation", Collections.emptyMap());
+ Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME,
+ "no-op list aggregation", Collections.emptyMap());
SearchResponse response = client().prepareSearch("idx")
.setQuery(matchAllQuery())
- .addAggregation(scriptedMetric("scripted").params(aggregationParams).mapScript(mapScript))
+ .addAggregation(scriptedMetric("scripted")
+ .params(aggregationParams)
+ .mapScript(mapScript)
+ .combineScript(combineScript)
+ .reduceScript(reduceScript))
.get();
assertSearchResponse(response);
assertThat(response.getHits().getTotalHits(), equalTo(numDocs));
@@ -423,7 +448,11 @@ public class ScriptedMetricIT extends ESIntegTestCase {
.initScript(
new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "vars.multiplier = 3", Collections.emptyMap()))
.mapScript(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME,
- "state.list.add(vars.multiplier)", Collections.emptyMap())))
+ "state.list.add(vars.multiplier)", Collections.emptyMap()))
+ .combineScript(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME,
+ "no-op aggregation", Collections.emptyMap()))
+ .reduceScript(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME,
+ "no-op list aggregation", Collections.emptyMap())))
.get();
assertSearchResponse(response);
assertThat(response.getHits().getTotalHits(), equalTo(numDocs));
@@ -466,6 +495,8 @@ public class ScriptedMetricIT extends ESIntegTestCase {
Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state.list.add(1)", Collections.emptyMap());
Script combineScript =
new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum state values as a new aggregation", Collections.emptyMap());
+ Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME,
+ "no-op list aggregation", Collections.emptyMap());
SearchResponse response = client()
.prepareSearch("idx")
@@ -474,7 +505,8 @@ public class ScriptedMetricIT extends ESIntegTestCase {
scriptedMetric("scripted")
.params(params)
.mapScript(mapScript)
- .combineScript(combineScript))
+ .combineScript(combineScript)
+ .reduceScript(reduceScript))
.execute().actionGet();
assertSearchResponse(response);
assertThat(response.getHits().getTotalHits(), equalTo(numDocs));
@@ -519,6 +551,8 @@ public class ScriptedMetricIT extends ESIntegTestCase {
Collections.emptyMap());
Script combineScript =
new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum state values as a new aggregation", Collections.emptyMap());
+ Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME,
+ "no-op list aggregation", Collections.emptyMap());
SearchResponse response = client()
.prepareSearch("idx")
@@ -528,7 +562,8 @@ public class ScriptedMetricIT extends ESIntegTestCase {
.params(params)
.initScript(initScript)
.mapScript(mapScript)
- .combineScript(combineScript))
+ .combineScript(combineScript)
+ .reduceScript(reduceScript))
.get();
assertSearchResponse(response);
assertThat(response.getHits().getTotalHits(), equalTo(numDocs));
@@ -713,6 +748,8 @@ public class ScriptedMetricIT extends ESIntegTestCase {
Script initScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "vars.multiplier = 3", Collections.emptyMap());
Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state.list.add(vars.multiplier)",
Collections.emptyMap());
+ Script combineScript =
+ new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "no-op aggregation", Collections.emptyMap());
Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME,
"sum all states' state.list values as a new aggregation", Collections.emptyMap());
@@ -724,6 +761,7 @@ public class ScriptedMetricIT extends ESIntegTestCase {
.params(params)
.initScript(initScript)
.mapScript(mapScript)
+ .combineScript(combineScript)
.reduceScript(reduceScript))
.get();
assertSearchResponse(response);
@@ -752,6 +790,8 @@ public class ScriptedMetricIT extends ESIntegTestCase {
Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state.list.add(vars.multiplier)",
Collections.emptyMap());
+ Script combineScript =
+ new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "no-op aggregation", Collections.emptyMap());
Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME,
"sum all states' state.list values as a new aggregation", Collections.emptyMap());
@@ -762,6 +802,7 @@ public class ScriptedMetricIT extends ESIntegTestCase {
scriptedMetric("scripted")
.params(params)
.mapScript(mapScript)
+ .combineScript(combineScript)
.reduceScript(reduceScript))
.get();
assertSearchResponse(response);
@@ -980,6 +1021,11 @@ public class ScriptedMetricIT extends ESIntegTestCase {
*/
public void testDontCacheScripts() throws Exception {
Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state['count'] = 1", Collections.emptyMap());
+ Script combineScript =
+ new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "no-op aggregation", Collections.emptyMap());
+ Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME,
+ "no-op list aggregation", Collections.emptyMap());
+
assertAcked(prepareCreate("cache_test_idx").addMapping("type", "d", "type=long")
.setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1))
.get());
@@ -994,7 +1040,7 @@ public class ScriptedMetricIT extends ESIntegTestCase {
// Test that a request using a script does not get cached
SearchResponse r = client().prepareSearch("cache_test_idx").setSize(0)
- .addAggregation(scriptedMetric("foo").mapScript(mapScript)).get();
+ .addAggregation(scriptedMetric("foo").mapScript(mapScript).combineScript(combineScript).reduceScript(reduceScript)).get();
assertSearchResponse(r);
assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache()
@@ -1006,10 +1052,17 @@ public class ScriptedMetricIT extends ESIntegTestCase {
public void testConflictingAggAndScriptParams() {
Map params = Collections.singletonMap("param1", "12");
Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state.list.add(1)", params);
+ Script combineScript =
+ new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "no-op aggregation", Collections.emptyMap());
+ Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME,
+ "no-op list aggregation", Collections.emptyMap());
SearchRequestBuilder builder = client().prepareSearch("idx")
.setQuery(matchAllQuery())
- .addAggregation(scriptedMetric("scripted").params(params).mapScript(mapScript));
+ .addAggregation(scriptedMetric("scripted")
+ .params(params).mapScript(mapScript)
+ .combineScript(combineScript)
+ .reduceScript(reduceScript));
SearchPhaseExecutionException ex = expectThrows(SearchPhaseExecutionException.class, builder::get);
assertThat(ex.getCause().getMessage(), containsString("Parameter name \"param1\" used in both aggregation and script parameters"));
diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java
index 03fa60c6d8e..aec8a36b2ca 100644
--- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java
+++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/TopHitsIT.java
@@ -826,16 +826,16 @@ public class TopHitsIT extends ESIntegTestCase {
assertThat(topReviewers.getHits().getAt(2).getId(), equalTo("1"));
assertThat(extractValue("name", topReviewers.getHits().getAt(2).getSourceAsMap()), equalTo("user c"));
assertThat(topReviewers.getHits().getAt(2).getNestedIdentity().getField().string(), equalTo("comments"));
- assertThat(topReviewers.getHits().getAt(2).getNestedIdentity().getOffset(), equalTo(0));
+ assertThat(topReviewers.getHits().getAt(2).getNestedIdentity().getOffset(), equalTo(1));
assertThat(topReviewers.getHits().getAt(2).getNestedIdentity().getChild().getField().string(), equalTo("reviewers"));
- assertThat(topReviewers.getHits().getAt(2).getNestedIdentity().getChild().getOffset(), equalTo(2));
+ assertThat(topReviewers.getHits().getAt(2).getNestedIdentity().getChild().getOffset(), equalTo(0));
assertThat(topReviewers.getHits().getAt(3).getId(), equalTo("1"));
assertThat(extractValue("name", topReviewers.getHits().getAt(3).getSourceAsMap()), equalTo("user c"));
assertThat(topReviewers.getHits().getAt(3).getNestedIdentity().getField().string(), equalTo("comments"));
- assertThat(topReviewers.getHits().getAt(3).getNestedIdentity().getOffset(), equalTo(1));
+ assertThat(topReviewers.getHits().getAt(3).getNestedIdentity().getOffset(), equalTo(0));
assertThat(topReviewers.getHits().getAt(3).getNestedIdentity().getChild().getField().string(), equalTo("reviewers"));
- assertThat(topReviewers.getHits().getAt(3).getNestedIdentity().getChild().getOffset(), equalTo(0));
+ assertThat(topReviewers.getHits().getAt(3).getNestedIdentity().getChild().getOffset(), equalTo(2));
assertThat(topReviewers.getHits().getAt(4).getId(), equalTo("1"));
assertThat(extractValue("name", topReviewers.getHits().getAt(4).getSourceAsMap()), equalTo("user d"));
diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/InnerHitsIT.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/InnerHitsIT.java
index 802c343871e..ea4f1133a2b 100644
--- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/InnerHitsIT.java
+++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/InnerHitsIT.java
@@ -208,9 +208,9 @@ public class InnerHitsIT extends ESIntegTestCase {
int size = randomIntBetween(0, numDocs);
BoolQueryBuilder boolQuery = new BoolQueryBuilder();
boolQuery.should(nestedQuery("field1", matchAllQuery(), ScoreMode.Avg).innerHit(new InnerHitBuilder("a").setSize(size)
- .addSort(new FieldSortBuilder("_doc").order(SortOrder.DESC))));
+ .addSort(new FieldSortBuilder("_doc").order(SortOrder.ASC))));
boolQuery.should(nestedQuery("field2", matchAllQuery(), ScoreMode.Avg).innerHit(new InnerHitBuilder("b")
- .addSort(new FieldSortBuilder("_doc").order(SortOrder.DESC)).setSize(size)));
+ .addSort(new FieldSortBuilder("_doc").order(SortOrder.ASC)).setSize(size)));
SearchResponse searchResponse = client().prepareSearch("idx")
.setQuery(boolQuery)
.setSize(numDocs)
diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/DnRoleMapper.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/DnRoleMapper.java
index 8a02977d55c..bdabc690f76 100644
--- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/DnRoleMapper.java
+++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/DnRoleMapper.java
@@ -8,14 +8,17 @@ package org.elasticsearch.xpack.security.authc.support;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
+import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
+import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.CopyOnWriteArrayList;
+import java.util.stream.Collectors;
import com.unboundid.ldap.sdk.DN;
import com.unboundid.ldap.sdk.LDAPException;
@@ -51,7 +54,7 @@ public class DnRoleMapper implements UserRoleMapper {
private final Path file;
private final boolean useUnmappedGroupsAsRoles;
private final CopyOnWriteArrayList listeners = new CopyOnWriteArrayList<>();
- private volatile Map> dnRoles;
+ private volatile Map> dnRoles;
public DnRoleMapper(RealmConfig config, ResourceWatcherService watcherService) {
this.config = config;
@@ -87,7 +90,7 @@ public class DnRoleMapper implements UserRoleMapper {
* logging the error and skipping/removing all mappings. This is aligned with how we handle other auto-loaded files
* in security.
*/
- public static Map> parseFileLenient(Path path, Logger logger, String realmType, String realmName) {
+ public static Map> parseFileLenient(Path path, Logger logger, String realmType, String realmName) {
try {
return parseFile(path, logger, realmType, realmName, false);
} catch (Exception e) {
@@ -98,7 +101,7 @@ public class DnRoleMapper implements UserRoleMapper {
}
}
- public static Map> parseFile(Path path, Logger logger, String realmType, String realmName, boolean strict) {
+ public static Map> parseFile(Path path, Logger logger, String realmType, String realmName, boolean strict) {
logger.trace("reading realm [{}/{}] role mappings file [{}]...", realmType, realmName, path.toAbsolutePath());
@@ -149,7 +152,10 @@ public class DnRoleMapper implements UserRoleMapper {
logger.debug("[{}] role mappings found in file [{}] for realm [{}/{}]", dnToRoles.size(), path.toAbsolutePath(), realmType,
realmName);
- return unmodifiableMap(dnToRoles);
+ Map> normalizedMap = dnToRoles.entrySet().stream().collect(Collectors.toMap(
+ entry -> entry.getKey().toNormalizedString(),
+ entry -> Collections.unmodifiableList(new ArrayList<>(entry.getValue()))));
+ return unmodifiableMap(normalizedMap);
} catch (IOException | SettingsException e) {
throw new ElasticsearchException("could not read realm [" + realmType + "/" + realmName + "] role mappings file [" +
path.toAbsolutePath() + "]", e);
@@ -176,8 +182,9 @@ public class DnRoleMapper implements UserRoleMapper {
Set roles = new HashSet<>();
for (String groupDnString : groupDns) {
DN groupDn = dn(groupDnString);
- if (dnRoles.containsKey(groupDn)) {
- roles.addAll(dnRoles.get(groupDn));
+ String normalizedGroupDn = groupDn.toNormalizedString();
+ if (dnRoles.containsKey(normalizedGroupDn)) {
+ roles.addAll(dnRoles.get(normalizedGroupDn));
} else if (useUnmappedGroupsAsRoles) {
roles.add(relativeName(groupDn));
}
@@ -187,14 +194,14 @@ public class DnRoleMapper implements UserRoleMapper {
groupDns, file.getFileName(), config.type(), config.name());
}
- DN userDn = dn(userDnString);
- Set rolesMappedToUserDn = dnRoles.get(userDn);
+ String normalizedUserDn = dn(userDnString).toNormalizedString();
+ List rolesMappedToUserDn = dnRoles.get(normalizedUserDn);
if (rolesMappedToUserDn != null) {
roles.addAll(rolesMappedToUserDn);
}
if (logger.isDebugEnabled()) {
logger.debug("the roles [{}], are mapped from the user [{}] using file [{}] for realm [{}/{}]",
- (rolesMappedToUserDn == null) ? Collections.emptySet() : rolesMappedToUserDn, userDnString, file.getFileName(),
+ (rolesMappedToUserDn == null) ? Collections.emptySet() : rolesMappedToUserDn, normalizedUserDn, file.getFileName(),
config.type(), config.name());
}
return roles;
diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java
index 8128b03a065..f1ab81ce3a4 100644
--- a/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java
+++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java
@@ -862,6 +862,9 @@ public class DocumentLevelSecurityTests extends SecurityIntegTestCase {
.startObject()
.field("field2", "value2")
.endObject()
+ .startObject()
+ .array("field2", "value2", "value3")
+ .endObject()
.endArray()
.endObject())
.get();
@@ -889,6 +892,9 @@ public class DocumentLevelSecurityTests extends SecurityIntegTestCase {
assertThat(response.getHits().getAt(0).getInnerHits().get("nested_field").getAt(0).getNestedIdentity().getOffset(), equalTo(0));
assertThat(response.getHits().getAt(0).getInnerHits().get("nested_field").getAt(0).getSourceAsString(),
equalTo("{\"field2\":\"value2\"}"));
+ assertThat(response.getHits().getAt(0).getInnerHits().get("nested_field").getAt(1).getNestedIdentity().getOffset(), equalTo(1));
+ assertThat(response.getHits().getAt(0).getInnerHits().get("nested_field").getAt(1).getSourceAsString(),
+ equalTo("{\"field2\":[\"value2\",\"value3\"]}"));
}
public void testSuggesters() throws Exception {
diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/DnRoleMapperTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/DnRoleMapperTests.java
index 263c5ee4929..83110c7a10a 100644
--- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/DnRoleMapperTests.java
+++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/DnRoleMapperTests.java
@@ -200,27 +200,27 @@ public class DnRoleMapperTests extends ESTestCase {
public void testParseFile() throws Exception {
Path file = getDataPath("role_mapping.yml");
Logger logger = CapturingLogger.newCapturingLogger(Level.INFO, null);
- Map> mappings = DnRoleMapper.parseFile(file, logger, "_type", "_name", false);
+ Map> mappings = DnRoleMapper.parseFile(file, logger, "_type", "_name", false);
assertThat(mappings, notNullValue());
assertThat(mappings.size(), is(3));
DN dn = new DN("cn=avengers,ou=marvel,o=superheros");
- assertThat(mappings, hasKey(dn));
- Set roles = mappings.get(dn);
+ assertThat(mappings, hasKey(dn.toNormalizedString()));
+ List roles = mappings.get(dn.toNormalizedString());
assertThat(roles, notNullValue());
assertThat(roles, hasSize(2));
assertThat(roles, containsInAnyOrder("security", "avenger"));
dn = new DN("cn=shield,ou=marvel,o=superheros");
- assertThat(mappings, hasKey(dn));
- roles = mappings.get(dn);
+ assertThat(mappings, hasKey(dn.toNormalizedString()));
+ roles = mappings.get(dn.toNormalizedString());
assertThat(roles, notNullValue());
assertThat(roles, hasSize(1));
assertThat(roles, contains("security"));
dn = new DN("cn=Horatio Hornblower,ou=people,o=sevenSeas");
- assertThat(mappings, hasKey(dn));
- roles = mappings.get(dn);
+ assertThat(mappings, hasKey(dn.toNormalizedString()));
+ roles = mappings.get(dn.toNormalizedString());
assertThat(roles, notNullValue());
assertThat(roles, hasSize(1));
assertThat(roles, contains("avenger"));
@@ -230,7 +230,7 @@ public class DnRoleMapperTests extends ESTestCase {
Path file = createTempDir().resolve("foo.yaml");
Files.createFile(file);
Logger logger = CapturingLogger.newCapturingLogger(Level.DEBUG, null);
- Map> mappings = DnRoleMapper.parseFile(file, logger, "_type", "_name", false);
+ Map> mappings = DnRoleMapper.parseFile(file, logger, "_type", "_name", false);
assertThat(mappings, notNullValue());
assertThat(mappings.isEmpty(), is(true));
List events = CapturingLogger.output(logger.getName(), Level.DEBUG);
@@ -242,7 +242,7 @@ public class DnRoleMapperTests extends ESTestCase {
public void testParseFile_WhenFileDoesNotExist() throws Exception {
Path file = createTempDir().resolve(randomAlphaOfLength(10));
Logger logger = CapturingLogger.newCapturingLogger(Level.INFO, null);
- Map> mappings = DnRoleMapper.parseFile(file, logger, "_type", "_name", false);
+ Map> mappings = DnRoleMapper.parseFile(file, logger, "_type", "_name", false);
assertThat(mappings, notNullValue());
assertThat(mappings.isEmpty(), is(true));
@@ -272,7 +272,7 @@ public class DnRoleMapperTests extends ESTestCase {
// writing in utf_16 should cause a parsing error as we try to read the file in utf_8
Files.write(file, Collections.singletonList("aldlfkjldjdflkjd"), StandardCharsets.UTF_16);
Logger logger = CapturingLogger.newCapturingLogger(Level.INFO, null);
- Map> mappings = DnRoleMapper.parseFileLenient(file, logger, "_type", "_name");
+ Map> mappings = DnRoleMapper.parseFileLenient(file, logger, "_type", "_name");
assertThat(mappings, notNullValue());
assertThat(mappings.isEmpty(), is(true));
List events = CapturingLogger.output(logger.getName(), Level.ERROR);