Merge branch 'master' into zen2

This commit is contained in:
David Turner 2018-10-03 22:12:56 +01:00
commit cbe1cf98c6
65 changed files with 2352 additions and 179 deletions

View File

@ -4,5 +4,5 @@
# build and test Elasticsearch for this branch. Valid Java versions # build and test Elasticsearch for this branch. Valid Java versions
# are 'java' or 'openjdk' followed by the major release number. # are 'java' or 'openjdk' followed by the major release number.
ES_BUILD_JAVA=java10 ES_BUILD_JAVA=java11
ES_RUNTIME_JAVA=java8 ES_RUNTIME_JAVA=java8

View File

@ -6,5 +6,4 @@
# or 'openjdk' followed by the major release number. # or 'openjdk' followed by the major release number.
ES_BUILD_JAVA: ES_BUILD_JAVA:
- java10
- java11 - java11

View File

@ -8,5 +8,4 @@
ES_RUNTIME_JAVA: ES_RUNTIME_JAVA:
- java8 - java8
- java8fips - java8fips
- java10
- java11 - java11

View File

@ -1 +1 @@
1.10 1.11

View File

@ -19,6 +19,8 @@
package org.elasticsearch.client; package org.elasticsearch.client;
import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListener;
import org.elasticsearch.client.watcher.AckWatchRequest;
import org.elasticsearch.client.watcher.AckWatchResponse;
import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest; import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest;
import org.elasticsearch.protocol.xpack.watcher.DeleteWatchResponse; import org.elasticsearch.protocol.xpack.watcher.DeleteWatchResponse;
import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest;
@ -91,4 +93,32 @@ public final class WatcherClient {
restHighLevelClient.performRequestAsyncAndParseEntity(request, WatcherRequestConverters::deleteWatch, options, restHighLevelClient.performRequestAsyncAndParseEntity(request, WatcherRequestConverters::deleteWatch, options,
DeleteWatchResponse::fromXContent, listener, singleton(404)); DeleteWatchResponse::fromXContent, listener, singleton(404));
} }
/**
* Acknowledges a watch.
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/watcher-api-ack-watch.html">
* the docs</a> for more information.
* @param request the request
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @return the response
* @throws IOException if there is a problem sending the request or parsing back the response
*/
public AckWatchResponse ackWatch(AckWatchRequest request, RequestOptions options) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(request, WatcherRequestConverters::ackWatch, options,
AckWatchResponse::fromXContent, emptySet());
}
/**
* Asynchronously acknowledges a watch.
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/watcher-api-ack-watch.html">
* the docs</a> for more information.
* @param request the request
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
* @param listener the listener to be notified upon completion of the request
*/
public void ackWatchAsync(AckWatchRequest request, RequestOptions options, ActionListener<AckWatchResponse> listener) {
restHighLevelClient.performRequestAsyncAndParseEntity(request, WatcherRequestConverters::ackWatch, options,
AckWatchResponse::fromXContent, listener, emptySet());
}
} }

View File

@ -23,6 +23,7 @@ import org.apache.http.client.methods.HttpDelete;
import org.apache.http.client.methods.HttpPut; import org.apache.http.client.methods.HttpPut;
import org.apache.http.entity.ByteArrayEntity; import org.apache.http.entity.ByteArrayEntity;
import org.apache.http.entity.ContentType; import org.apache.http.entity.ContentType;
import org.elasticsearch.client.watcher.AckWatchRequest;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest; import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest;
import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest;
@ -59,4 +60,17 @@ public class WatcherRequestConverters {
Request request = new Request(HttpDelete.METHOD_NAME, endpoint); Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
return request; return request;
} }
public static Request ackWatch(AckWatchRequest ackWatchRequest) {
String endpoint = new RequestConverters.EndpointBuilder()
.addPathPartAsIs("_xpack")
.addPathPartAsIs("watcher")
.addPathPartAsIs("watch")
.addPathPart(ackWatchRequest.getWatchId())
.addPathPartAsIs("_ack")
.addCommaSeparatedPathParts(ackWatchRequest.getActionIds())
.build();
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
return request;
}
} }

View File

@ -0,0 +1,96 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.watcher;
import org.elasticsearch.client.Validatable;
import org.elasticsearch.client.ValidationException;
import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest;
import java.util.Locale;
/**
* A request to explicitly acknowledge a watch.
*/
public class AckWatchRequest implements Validatable {
private final String watchId;
private final String[] actionIds;
public AckWatchRequest(String watchId, String... actionIds) {
validateIds(watchId, actionIds);
this.watchId = watchId;
this.actionIds = actionIds;
}
private void validateIds(String watchId, String... actionIds) {
ValidationException exception = new ValidationException();
if (watchId == null) {
exception.addValidationError("watch id is missing");
} else if (PutWatchRequest.isValidId(watchId) == false) {
exception.addValidationError("watch id contains whitespace");
}
if (actionIds != null) {
for (String actionId : actionIds) {
if (actionId == null) {
exception.addValidationError(String.format(Locale.ROOT, "action id may not be null"));
} else if (PutWatchRequest.isValidId(actionId) == false) {
exception.addValidationError(
String.format(Locale.ROOT, "action id [%s] contains whitespace", actionId));
}
}
}
if (!exception.validationErrors().isEmpty()) {
throw exception;
}
}
/**
* @return The ID of the watch to be acked.
*/
public String getWatchId() {
return watchId;
}
/**
* @return The IDs of the actions to be acked. If omitted,
* all actions for the given watch will be acknowledged.
*/
public String[] getActionIds() {
return actionIds;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder("ack [").append(watchId).append("]");
if (actionIds.length > 0) {
sb.append("[");
for (int i = 0; i < actionIds.length; i++) {
if (i > 0) {
sb.append(", ");
}
sb.append(actionIds[i]);
}
sb.append("]");
}
return sb.toString();
}
}

View File

@ -0,0 +1,61 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.watcher;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
/**
* The response from an 'ack watch' request.
*/
public class AckWatchResponse {
private final WatchStatus status;
public AckWatchResponse(WatchStatus status) {
this.status = status;
}
/**
* @return the status of the requested watch. If an action was
* successfully acknowledged, this will be reflected in its status.
*/
public WatchStatus getStatus() {
return status;
}
private static final ParseField STATUS_FIELD = new ParseField("status");
private static ConstructingObjectParser<AckWatchResponse, Void> PARSER =
new ConstructingObjectParser<>("ack_watch_response", true,
a -> new AckWatchResponse((WatchStatus) a[0]));
static {
PARSER.declareObject(ConstructingObjectParser.constructorArg(),
(parser, context) -> WatchStatus.parse(parser),
STATUS_FIELD);
}
public static AckWatchResponse fromXContent(XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
}

View File

@ -18,6 +18,11 @@
*/ */
package org.elasticsearch.client; package org.elasticsearch.client;
import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.client.watcher.AckWatchRequest;
import org.elasticsearch.client.watcher.AckWatchResponse;
import org.elasticsearch.client.watcher.ActionStatus;
import org.elasticsearch.client.watcher.ActionStatus.AckStatus;
import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
@ -25,6 +30,7 @@ import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest;
import org.elasticsearch.protocol.xpack.watcher.DeleteWatchResponse; import org.elasticsearch.protocol.xpack.watcher.DeleteWatchResponse;
import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest;
import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse; import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse;
import org.elasticsearch.rest.RestStatus;
import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.is;
@ -72,4 +78,34 @@ public class WatcherIT extends ESRestHighLevelClientTestCase {
} }
} }
public void testAckWatch() throws Exception {
String watchId = randomAlphaOfLength(10);
String actionId = "logme";
PutWatchResponse putWatchResponse = createWatch(watchId);
assertThat(putWatchResponse.isCreated(), is(true));
AckWatchResponse response = highLevelClient().watcher().ackWatch(
new AckWatchRequest(watchId, actionId), RequestOptions.DEFAULT);
ActionStatus actionStatus = response.getStatus().actionStatus(actionId);
assertEquals(AckStatus.State.AWAITS_SUCCESSFUL_EXECUTION, actionStatus.ackStatus().state());
// TODO: use the high-level REST client here once it supports 'execute watch'.
Request executeWatchRequest = new Request("POST", "_xpack/watcher/watch/" + watchId + "/_execute");
executeWatchRequest.setJsonEntity("{ \"record_execution\": true }");
Response executeResponse = client().performRequest(executeWatchRequest);
assertEquals(RestStatus.OK.getStatus(), executeResponse.getStatusLine().getStatusCode());
response = highLevelClient().watcher().ackWatch(
new AckWatchRequest(watchId, actionId), RequestOptions.DEFAULT);
actionStatus = response.getStatus().actionStatus(actionId);
assertEquals(AckStatus.State.ACKED, actionStatus.ackStatus().state());
ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class,
() -> highLevelClient().watcher().ackWatch(
new AckWatchRequest("nonexistent"), RequestOptions.DEFAULT));
assertEquals(RestStatus.NOT_FOUND, exception.status());
}
} }

View File

@ -21,6 +21,7 @@ package org.elasticsearch.client;
import org.apache.http.client.methods.HttpDelete; import org.apache.http.client.methods.HttpDelete;
import org.apache.http.client.methods.HttpPut; import org.apache.http.client.methods.HttpPut;
import org.elasticsearch.client.watcher.AckWatchRequest;
import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest; import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest;
@ -30,6 +31,7 @@ import org.elasticsearch.test.ESTestCase;
import java.io.ByteArrayOutputStream; import java.io.ByteArrayOutputStream;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import java.util.StringJoiner;
import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.nullValue;
@ -75,4 +77,24 @@ public class WatcherRequestConvertersTests extends ESTestCase {
assertEquals("/_xpack/watcher/watch/" + watchId, request.getEndpoint()); assertEquals("/_xpack/watcher/watch/" + watchId, request.getEndpoint());
assertThat(request.getEntity(), nullValue()); assertThat(request.getEntity(), nullValue());
} }
public void testAckWatch() {
String watchId = randomAlphaOfLength(10);
String[] actionIds = generateRandomStringArray(5, 10, false, true);
AckWatchRequest ackWatchRequest = new AckWatchRequest(watchId, actionIds);
Request request = WatcherRequestConverters.ackWatch(ackWatchRequest);
assertEquals(HttpPut.METHOD_NAME, request.getMethod());
StringJoiner expectedEndpoint = new StringJoiner("/", "/", "")
.add("_xpack").add("watcher").add("watch").add(watchId).add("_ack");
if (ackWatchRequest.getActionIds().length > 0) {
String actionsParam = String.join(",", ackWatchRequest.getActionIds());
expectedEndpoint.add(actionsParam);
}
assertEquals(expectedEndpoint.toString(), request.getEndpoint());
assertThat(request.getEntity(), nullValue());
}
} }

View File

@ -21,8 +21,15 @@ package org.elasticsearch.client.documentation;
import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.LatchedActionListener; import org.elasticsearch.action.LatchedActionListener;
import org.elasticsearch.client.ESRestHighLevelClientTestCase; import org.elasticsearch.client.ESRestHighLevelClientTestCase;
import org.elasticsearch.client.Request;
import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.Response;
import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.client.watcher.AckWatchRequest;
import org.elasticsearch.client.watcher.AckWatchResponse;
import org.elasticsearch.client.watcher.ActionStatus;
import org.elasticsearch.client.watcher.ActionStatus.AckStatus;
import org.elasticsearch.client.watcher.WatchStatus;
import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
@ -30,6 +37,7 @@ import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest;
import org.elasticsearch.protocol.xpack.watcher.DeleteWatchResponse; import org.elasticsearch.protocol.xpack.watcher.DeleteWatchResponse;
import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest; import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest;
import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse; import org.elasticsearch.protocol.xpack.watcher.PutWatchResponse;
import org.elasticsearch.rest.RestStatus;
import java.util.concurrent.CountDownLatch; import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
@ -132,4 +140,67 @@ public class WatcherDocumentationIT extends ESRestHighLevelClientTestCase {
} }
} }
public void testAckWatch() throws Exception {
RestHighLevelClient client = highLevelClient();
{
BytesReference watch = new BytesArray("{ \n" +
" \"trigger\": { \"schedule\": { \"interval\": \"10h\" } },\n" +
" \"input\": { \"simple\": { \"foo\" : \"bar\" } },\n" +
" \"actions\": { \"logme\": { \"logging\": { \"text\": \"{{ctx.payload}}\" } } }\n" +
"}");
PutWatchRequest putWatchRequest = new PutWatchRequest("my_watch_id", watch, XContentType.JSON);
client.watcher().putWatch(putWatchRequest, RequestOptions.DEFAULT);
// TODO: use the high-level REST client here once it supports 'execute watch'.
Request executeWatchRequest = new Request("POST", "_xpack/watcher/watch/my_watch_id/_execute");
executeWatchRequest.setJsonEntity("{ \"record_execution\": true }");
Response executeResponse = client().performRequest(executeWatchRequest);
assertEquals(RestStatus.OK.getStatus(), executeResponse.getStatusLine().getStatusCode());
}
{
//tag::ack-watch-execute
AckWatchRequest request = new AckWatchRequest("my_watch_id", // <1>
"logme", "emailme"); // <2>
AckWatchResponse response = client.watcher().ackWatch(request, RequestOptions.DEFAULT);
//end::ack-watch-execute
//tag::ack-watch-response
WatchStatus watchStatus = response.getStatus();
ActionStatus actionStatus = watchStatus.actionStatus("logme"); // <1>
AckStatus.State ackState = actionStatus.ackStatus().state(); // <2>
//end::ack-watch-response
assertEquals(AckStatus.State.ACKED, ackState);
}
{
AckWatchRequest request = new AckWatchRequest("my_watch_id");
// tag::ack-watch-execute-listener
ActionListener<AckWatchResponse> listener = new ActionListener<AckWatchResponse>() {
@Override
public void onResponse(AckWatchResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::ack-watch-execute-listener
// For testing, replace the empty listener by a blocking listener.
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::ack-watch-execute-async
client.watcher().ackWatchAsync(request, RequestOptions.DEFAULT, listener); // <1>
// end::ack-watch-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
} }

View File

@ -0,0 +1,105 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.watcher;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParseException;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.XContentTestUtils;
import java.io.IOException;
import java.util.function.Predicate;
/**
* Basic unit tests for {@link AckWatchResponse}.
*
* Note that we only sanity check watch status parsing here, as there
* are dedicated tests for it in {@link WatchStatusTests}.
*/
public class AckWatchResponseTests extends ESTestCase {
public void testBasicParsing() throws IOException {
XContentType contentType = randomFrom(XContentType.values());
XContentBuilder builder = XContentFactory.contentBuilder(contentType).startObject()
.startObject("status")
.field("version", 42)
.field("execution_state", ExecutionState.ACKNOWLEDGED)
.endObject()
.endObject();
BytesReference bytes = BytesReference.bytes(builder);
AckWatchResponse response = parse(builder.contentType(), bytes);
WatchStatus status = response.getStatus();
assertNotNull(status);
assertEquals(42, status.version());
assertEquals(ExecutionState.ACKNOWLEDGED, status.getExecutionState());
}
public void testParsingWithMissingStatus() throws IOException {
XContentType contentType = randomFrom(XContentType.values());
XContentBuilder builder = XContentFactory.contentBuilder(contentType).startObject().endObject();
BytesReference bytes = BytesReference.bytes(builder);
expectThrows(IllegalArgumentException.class, () -> parse(builder.contentType(), bytes));
}
public void testParsingWithNullStatus() throws IOException {
XContentType contentType = randomFrom(XContentType.values());
XContentBuilder builder = XContentFactory.contentBuilder(contentType).startObject()
.nullField("status")
.endObject();
BytesReference bytes = BytesReference.bytes(builder);
expectThrows(XContentParseException.class, () -> parse(builder.contentType(), bytes));
}
public void testParsingWithUnknownKeys() throws IOException {
XContentType contentType = randomFrom(XContentType.values());
XContentBuilder builder = XContentFactory.contentBuilder(contentType).startObject()
.startObject("status")
.field("version", 42)
.field("execution_state", ExecutionState.ACKNOWLEDGED)
.endObject()
.endObject();
BytesReference bytes = BytesReference.bytes(builder);
Predicate<String> excludeFilter = field -> field.equals("status.actions");
BytesReference bytesWithRandomFields = XContentTestUtils.insertRandomFields(
builder.contentType(), bytes, excludeFilter, random());
AckWatchResponse response = parse(builder.contentType(), bytesWithRandomFields);
WatchStatus status = response.getStatus();
assertNotNull(status);
assertEquals(42, status.version());
assertEquals(ExecutionState.ACKNOWLEDGED, status.getExecutionState());
}
private AckWatchResponse parse(XContentType contentType, BytesReference bytes) throws IOException {
XContentParser parser = XContentFactory.xContent(contentType)
.createParser(NamedXContentRegistry.EMPTY, null, bytes.streamInput());
parser.nextToken();
return AckWatchResponse.fromXContent(parser);
}
}

View File

@ -0,0 +1,98 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.watcher;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.client.ValidationException;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.protocol.xpack.watcher.DeleteWatchRequest;
import org.elasticsearch.protocol.xpack.watcher.PutWatchRequest;
import org.elasticsearch.test.ESTestCase;
import java.util.Optional;
import static org.hamcrest.Matchers.hasItem;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.notNullValue;
public class WatchRequestValidationTests extends ESTestCase {
public void testAcknowledgeWatchInvalidWatchId() {
ValidationException e = expectThrows(ValidationException.class,
() -> new AckWatchRequest("id with whitespaces"));
assertThat(e.validationErrors(), hasItem("watch id contains whitespace"));
}
public void testAcknowledgeWatchInvalidActionId() {
ValidationException e = expectThrows(ValidationException.class,
() -> new AckWatchRequest("_id", "action id with whitespaces"));
assertThat(e.validationErrors(), hasItem("action id [action id with whitespaces] contains whitespace"));
}
public void testAcknowledgeWatchNullActionArray() {
// need this to prevent some compilation errors, i.e. in 1.8.0_91
String[] nullArray = null;
Optional<ValidationException> e = new AckWatchRequest("_id", nullArray).validate();
assertFalse(e.isPresent());
}
public void testAcknowledgeWatchNullActionId() {
ValidationException e = expectThrows(ValidationException.class,
() -> new AckWatchRequest("_id", new String[] {null}));
assertThat(e.validationErrors(), hasItem("action id may not be null"));
}
public void testDeleteWatchInvalidWatchId() {
ActionRequestValidationException e = new DeleteWatchRequest("id with whitespaces").validate();
assertThat(e, is(notNullValue()));
assertThat(e.validationErrors(), hasItem("watch id contains whitespace"));
}
public void testDeleteWatchNullId() {
ActionRequestValidationException e = new DeleteWatchRequest(null).validate();
assertThat(e, is(notNullValue()));
assertThat(e.validationErrors(), hasItem("watch id is missing"));
}
public void testPutWatchInvalidWatchId() {
ActionRequestValidationException e = new PutWatchRequest("id with whitespaces", BytesArray.EMPTY, XContentType.JSON).validate();
assertThat(e, is(notNullValue()));
assertThat(e.validationErrors(), hasItem("watch id contains whitespace"));
}
public void testPutWatchNullId() {
ActionRequestValidationException e = new PutWatchRequest(null, BytesArray.EMPTY, XContentType.JSON).validate();
assertThat(e, is(notNullValue()));
assertThat(e.validationErrors(), hasItem("watch id is missing"));
}
public void testPutWatchSourceNull() {
ActionRequestValidationException e = new PutWatchRequest("foo", null, XContentType.JSON).validate();
assertThat(e, is(notNullValue()));
assertThat(e.validationErrors(), hasItem("watch source is missing"));
}
public void testPutWatchContentNull() {
ActionRequestValidationException e = new PutWatchRequest("foo", BytesArray.EMPTY, null).validate();
assertThat(e, is(notNullValue()));
assertThat(e.validationErrors(), hasItem("request body is missing"));
}
}

View File

@ -157,8 +157,10 @@ subprojects {
environment('JAVA_HOME', getJavaHome(it, 8)) environment('JAVA_HOME', getJavaHome(it, 8))
} else if ("6.2".equals(bwcBranch)) { } else if ("6.2".equals(bwcBranch)) {
environment('JAVA_HOME', getJavaHome(it, 9)) environment('JAVA_HOME', getJavaHome(it, 9))
} else if (["6.3", "6.4", "6.x"].contains(bwcBranch)) { } else if (["6.3", "6.4"].contains(bwcBranch)) {
environment('JAVA_HOME', getJavaHome(it, 10)) environment('JAVA_HOME', getJavaHome(it, 10))
} else if (["6.x"].contains(bwcBranch)) {
environment('JAVA_HOME', getJavaHome(it, 11))
} else { } else {
environment('JAVA_HOME', project.compilerJavaHome) environment('JAVA_HOME', project.compilerJavaHome)
} }

View File

@ -310,9 +310,11 @@ The Java High Level REST Client supports the following Watcher APIs:
* <<java-rest-high-x-pack-watcher-put-watch>> * <<java-rest-high-x-pack-watcher-put-watch>>
* <<java-rest-high-x-pack-watcher-delete-watch>> * <<java-rest-high-x-pack-watcher-delete-watch>>
* <<java-rest-high-watcher-ack-watch>>
include::watcher/put-watch.asciidoc[] include::watcher/put-watch.asciidoc[]
include::watcher/delete-watch.asciidoc[] include::watcher/delete-watch.asciidoc[]
include::watcher/ack-watch.asciidoc[]
== Graph APIs == Graph APIs

View File

@ -0,0 +1,57 @@
[[java-rest-high-watcher-ack-watch]]
=== Ack Watch API
[[java-rest-high-watcher-ack-watch-execution]]
==== Execution
{xpack-ref}/actions.html#actions-ack-throttle[Acknowledging a watch] enables you
to manually throttle execution of a watch's actions. A watch can be acknowledged
through the following request:
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/WatcherDocumentationIT.java[ack-watch-execute]
--------------------------------------------------
<1> The ID of the watch to ack.
<2> An optional list of IDs representing the watch actions that should be acked.
If no action IDs are provided, then all of the watch's actions will be acked.
[[java-rest-high-watcher-ack-watch-response]]
==== Response
The returned `AckWatchResponse` contains the new status of the requested watch:
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/WatcherDocumentationIT.java[ack-watch-response]
--------------------------------------------------
<1> The status of a specific action that was acked.
<2> The acknowledgement state of the action. If the action was successfully
acked, this state will be equal to `AckStatus.State.ACKED`.
[[java-rest-high-watcher-ack-watch-async]]
==== Asynchronous Execution
This request can be executed asynchronously:
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/WatcherDocumentationIT.java[ack-watch-execute-async]
--------------------------------------------------
<1> The `AckWatchRequest` to execute and the `ActionListener` to use when
the execution completes.
The asynchronous method does not block and returns immediately. Once the request
completes, the `ActionListener` is called back using the `onResponse` method
if the execution successfully completed or using the `onFailure` method if
it failed.
A listener for `AckWatchResponse` can be constructed as follows:
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/WatcherDocumentationIT.java[ack-watch-execute-listener]
--------------------------------------------------
<1> Called when the execution is successfully completed. The response is
provided as an argument.
<2> Called in case of failure. The raised exception is provided as an argument.

View File

@ -26,12 +26,17 @@ The following gce settings (prefixed with `cloud.gce`) are supported:
`project_id`:: `project_id`::
Your Google project id (mandatory). Your Google project id.
By default the project id will be derived from the instance metadata.
Note: Deriving the project id from system properties or environment variables
(`GOOGLE_CLOUD_PROJECT` or `GCLOUD_PROJECT`) is not supported.
`zone`:: `zone`::
helps to retrieve instances running in a given zone (mandatory). It should be one of the helps to retrieve instances running in a given zone.
https://developers.google.com/compute/docs/zones#available[GCE supported zones]. It should be one of the https://developers.google.com/compute/docs/zones#available[GCE supported zones].
By default the zone will be derived from the instance metadata.
See also <<discovery-gce-usage-zones>>. See also <<discovery-gce-usage-zones>>.
`retry`:: `retry`::

View File

@ -26,3 +26,9 @@ has been removed. `missing_bucket` should be used instead.
The object used to share aggregation state between the scripts in a Scripted Metric The object used to share aggregation state between the scripts in a Scripted Metric
Aggregation is now a variable called `state` available in the script context, rather than Aggregation is now a variable called `state` available in the script context, rather than
being provided via the `params` object as `params._agg`. being provided via the `params` object as `params._agg`.
[float]
==== Make metric aggregation script parameters `reduce_script` and `combine_script` mandatory
The metric aggregation has been changed to require these two script parameters to ensure users are
explicitly defining how their data is processed.

View File

@ -14,6 +14,15 @@ now been removed. Instead, use `.value` on `date` fields, or explicitly
parse `long` fields into a date object using parse `long` fields into a date object using
`Instance.ofEpochMillis(doc["myfield"].value)`. `Instance.ofEpochMillis(doc["myfield"].value)`.
[float]
==== Accessing missing document values will throw an error
`doc['field'].value` will throw an exception if
the document is missing a value for the field `field`.
To check if a document is missing a value, you can use
`doc['field'].size() == 0`.
[float] [float]
==== Script errors will return as `400` error codes ==== Script errors will return as `400` error codes

View File

@ -26,6 +26,11 @@ dependencyLicenses {
mapping from: /google-.*/, to: 'google' mapping from: /google-.*/, to: 'google'
} }
check {
// also execute the QA tests when testing the plugin
dependsOn 'qa:gce:check'
}
test { test {
// this is needed for insecure plugins, remove if possible! // this is needed for insecure plugins, remove if possible!
systemProperty 'tests.artifact', project.name systemProperty 'tests.artifact', project.name

View File

@ -0,0 +1 @@
group = "${group}.plugins.discovery-gce.qa"

View File

@ -0,0 +1,80 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.elasticsearch.gradle.MavenFilteringHack
import org.elasticsearch.gradle.test.AntFixture
apply plugin: 'elasticsearch.standalone-rest-test'
apply plugin: 'elasticsearch.rest-test'
final int gceNumberOfNodes = 3
File gceDiscoveryFile = new File(project.buildDir, 'generated-resources/nodes.uri')
dependencies {
testCompile project(path: ':plugins:discovery-gce', configuration: 'runtime')
}
/** A task to start the GCEFixture which emulates a GCE service **/
task gceFixture(type: AntFixture) {
dependsOn compileTestJava
env 'CLASSPATH', "${ -> project.sourceSets.test.runtimeClasspath.asPath }"
executable = new File(project.runtimeJavaHome, 'bin/java')
args 'org.elasticsearch.cloud.gce.GCEFixture', baseDir, gceDiscoveryFile.getAbsolutePath()
}
Map<String, Object> expansions = [
'expected_nodes': gceNumberOfNodes
]
processTestResources {
inputs.properties(expansions)
MavenFilteringHack.filter(it, expansions)
}
integTestCluster {
dependsOn gceFixture
numNodes = gceNumberOfNodes
plugin ':plugins:discovery-gce'
setting 'discovery.zen.hosts_provider', 'gce'
// use gce fixture for Auth calls instead of http://metadata.google.internal
integTestCluster.environment 'GCE_METADATA_HOST', "http://${-> gceFixture.addressAndPort}"
// allows to configure hidden settings (`cloud.gce.host` and `cloud.gce.root_url`)
systemProperty 'es.allow_reroute_gce_settings', 'true'
// use gce fixture for metadata server calls instead of http://metadata.google.internal
setting 'cloud.gce.host', "http://${-> gceFixture.addressAndPort}"
// use gce fixture for API calls instead of https://www.googleapis.com
setting 'cloud.gce.root_url', "http://${-> gceFixture.addressAndPort}"
unicastTransportUri = { seedNode, node, ant -> return null }
waitCondition = { node, ant ->
gceDiscoveryFile.parentFile.mkdirs()
gceDiscoveryFile.setText(integTest.nodes.collect { n -> "${n.transportUri()}" }.join('\n'), 'UTF-8')
File tmpFile = new File(node.cwd, 'wait.success')
ant.get(src: "http://${node.httpUri()}/",
dest: tmpFile.toString(),
ignoreerrors: true,
retries: 10)
return tmpFile.exists()
}
}

View File

@ -0,0 +1,37 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.cloud.gce;
import com.carrotsearch.randomizedtesting.annotations.Name;
import com.carrotsearch.randomizedtesting.annotations.ParametersFactory;
import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate;
import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase;
public class GCEDiscoveryClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase {
public GCEDiscoveryClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) {
super(testCandidate);
}
@ParametersFactory
public static Iterable<Object[]> parameters() throws Exception {
return ESClientYamlSuiteTestCase.createParameters();
}
}

View File

@ -0,0 +1,214 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.cloud.gce;
import org.apache.http.client.methods.HttpGet;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.common.path.PathTrie;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.rest.RestUtils;
import org.elasticsearch.test.fixture.AbstractHttpFixture;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.concurrent.TimeUnit;
import java.util.function.Consumer;
import java.util.function.Function;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
/**
* {@link GCEFixture} is a fixture that emulates a GCE service.
*/
public class GCEFixture extends AbstractHttpFixture {
public static final String PROJECT_ID = "discovery-gce-test";
public static final String ZONE = "test-zone";
public static final String TOKEN = "1/fFAGRNJru1FTz70BzhT3Zg";
public static final String TOKEN_TYPE = "Bearer";
private final PathTrie<RequestHandler> handlers;
private final Path nodes;
private GCEFixture(final String workingDir, final String nodesUriPath) {
super(workingDir);
this.nodes = toPath(Objects.requireNonNull(nodesUriPath));
this.handlers = defaultHandlers();
}
public static void main(String[] args) throws Exception {
if (args == null || args.length != 2) {
throw new IllegalArgumentException("GCEFixture <working directory> <nodes transport uri file>");
}
final GCEFixture fixture = new GCEFixture(args[0], args[1]);
fixture.listen();
}
private static String nonAuthPath(Request request) {
return nonAuthPath(request.getMethod(), request.getPath());
}
private static String nonAuthPath(String method, String path) {
return "NONAUTH " + method + " " + path;
}
private static String authPath(Request request) {
return authPath(request.getMethod(), request.getPath());
}
private static String authPath(String method, String path) {
return "AUTH " + method + " " + path;
}
/** Builds the default request handlers **/
private PathTrie<RequestHandler> defaultHandlers() {
final PathTrie<RequestHandler> handlers = new PathTrie<>(RestUtils.REST_DECODER);
final Consumer<Map<String, String>> commonHeaderConsumer = headers -> headers.put("Metadata-Flavor", "Google");
final Function<String, Response> simpleValue = value -> {
final Map<String, String> headers = new HashMap<>(TEXT_PLAIN_CONTENT_TYPE);
commonHeaderConsumer.accept(headers);
final byte[] responseAsBytes = value.getBytes(StandardCharsets.UTF_8);
return new Response(RestStatus.OK.getStatus(), headers, responseAsBytes);
};
final Function<String, Response> jsonValue = value -> {
final Map<String, String> headers = new HashMap<>(JSON_CONTENT_TYPE);
commonHeaderConsumer.accept(headers);
final byte[] responseAsBytes = value.getBytes(StandardCharsets.UTF_8);
return new Response(RestStatus.OK.getStatus(), headers, responseAsBytes);
};
// https://cloud.google.com/compute/docs/storing-retrieving-metadata
handlers.insert(nonAuthPath(HttpGet.METHOD_NAME, "/computeMetadata/v1/project/project-id"),
request -> simpleValue.apply(PROJECT_ID));
handlers.insert(nonAuthPath(HttpGet.METHOD_NAME, "/computeMetadata/v1/project/attributes/google-compute-default-zone"),
request -> simpleValue.apply(ZONE));
// https://cloud.google.com/compute/docs/access/create-enable-service-accounts-for-instances
handlers.insert(nonAuthPath(HttpGet.METHOD_NAME, "/computeMetadata/v1/instance/service-accounts/default/token"),
request -> jsonValue.apply(Strings.toString(jsonBuilder()
.startObject()
.field("access_token", TOKEN)
.field("expires_in", TimeUnit.HOURS.toSeconds(1))
.field("token_type", TOKEN_TYPE)
.endObject())));
// https://cloud.google.com/compute/docs/reference/rest/v1/instances
handlers.insert(authPath(HttpGet.METHOD_NAME, "/compute/v1/projects/{project}/zones/{zone}/instances"),
request -> {
final List items = new ArrayList();
int count = 0;
for (String address : Files.readAllLines(nodes)) {
count++;
items.add(MapBuilder.<String, Object>newMapBuilder()
.put("id", Long.toString(9309873766405L + count))
.put("description", "ES node" + count)
.put("name", "test" + count)
.put("kind", "compute#instance")
.put("machineType", "n1-standard-1")
.put("networkInterfaces",
Collections.singletonList(MapBuilder.<String, Object>newMapBuilder()
.put("accessConfigs", Collections.emptyList())
.put("name", "nic0")
.put("network", "default")
.put("networkIP", address)
.immutableMap()))
.put("status", "RUNNING")
.put("zone", ZONE)
.immutableMap());
}
final String json = Strings.toString(jsonBuilder()
.startObject()
.field("id", "test-instances")
.field("items", items)
.endObject());
final byte[] responseAsBytes = json.getBytes(StandardCharsets.UTF_8);
final Map<String, String> headers = new HashMap<>(JSON_CONTENT_TYPE);
commonHeaderConsumer.accept(headers);
return new Response(RestStatus.OK.getStatus(), headers, responseAsBytes);
});
return handlers;
}
@Override
protected Response handle(final Request request) throws IOException {
final String nonAuthorizedPath = nonAuthPath(request);
final RequestHandler nonAuthorizedHandler = handlers.retrieve(nonAuthorizedPath, request.getParameters());
if (nonAuthorizedHandler != null) {
return nonAuthorizedHandler.handle(request);
}
final String authorizedPath = authPath(request);
final RequestHandler authorizedHandler = handlers.retrieve(authorizedPath, request.getParameters());
if (authorizedHandler != null) {
final String authorization = request.getHeader("Authorization");
if ((TOKEN_TYPE + " " + TOKEN).equals(authorization) == false) {
return newError(RestStatus.UNAUTHORIZED, "Authorization", "Login Required");
}
return authorizedHandler.handle(request);
}
return null;
}
private static Response newError(final RestStatus status, final String code, final String message) throws IOException {
final String response = Strings.toString(jsonBuilder()
.startObject()
.field("error", MapBuilder.<String, Object>newMapBuilder()
.put("errors", Collections.singletonList(
MapBuilder.<String, Object>newMapBuilder()
.put("domain", "global")
.put("reason", "required")
.put("message", message)
.put("locationType", "header")
.put("location", code)
.immutableMap()
))
.put("code", status.getStatus())
.put("message", message)
.immutableMap())
.endObject());
return new Response(status.getStatus(), JSON_CONTENT_TYPE, response.getBytes(UTF_8));
}
@SuppressForbidden(reason = "Paths#get is fine - we don't have environment here")
private static Path toPath(final String dir) {
return Paths.get(dir);
}
}

View File

@ -0,0 +1,15 @@
# Integration tests for discovery-gce
setup:
- do:
cluster.health:
wait_for_status: green
wait_for_nodes: ${expected_nodes}
---
"All nodes are correctly discovered":
- do:
nodes.info:
metric: [ transport ]
- match: { _nodes.total: ${expected_nodes} }

View File

@ -75,4 +75,8 @@ public interface GceInstancesService extends Closeable {
* @return a collection of running instances within the same GCE project * @return a collection of running instances within the same GCE project
*/ */
Collection<Instance> instances(); Collection<Instance> instances();
String projectId();
List<String> zones();
} }

View File

@ -29,6 +29,11 @@ import java.util.function.Function;
import com.google.api.client.googleapis.compute.ComputeCredential; import com.google.api.client.googleapis.compute.ComputeCredential;
import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport; import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport;
import com.google.api.client.http.GenericUrl;
import com.google.api.client.http.HttpHeaders;
import com.google.api.client.http.HttpRequest;
import com.google.api.client.http.HttpRequestFactory;
import com.google.api.client.http.HttpResponse;
import com.google.api.client.http.HttpTransport; import com.google.api.client.http.HttpTransport;
import com.google.api.client.http.javanet.NetHttpTransport; import com.google.api.client.http.javanet.NetHttpTransport;
import com.google.api.client.json.JsonFactory; import com.google.api.client.json.JsonFactory;
@ -103,9 +108,58 @@ public class GceInstancesServiceImpl extends AbstractComponent implements GceIns
public GceInstancesServiceImpl(Settings settings) { public GceInstancesServiceImpl(Settings settings) {
super(settings); super(settings);
this.project = PROJECT_SETTING.get(settings);
this.zones = ZONE_SETTING.get(settings);
this.validateCerts = GCE_VALIDATE_CERTIFICATES.get(settings); this.validateCerts = GCE_VALIDATE_CERTIFICATES.get(settings);
this.project = resolveProject();
this.zones = resolveZones();
}
private String resolveProject() {
if (PROJECT_SETTING.exists(settings)) {
return PROJECT_SETTING.get(settings);
}
try {
// this code is based on a private GCE method: {@link com.google.cloud.ServiceOptions#getAppEngineProjectIdFromMetadataServer()}
return getAppEngineValueFromMetadataServer("/computeMetadata/v1/project/project-id");
} catch (Exception e) {
logger.warn("unable to resolve project from metadata server for GCE discovery service", e);
}
return null;
}
private List<String> resolveZones() {
if (ZONE_SETTING.exists(settings)) {
return ZONE_SETTING.get(settings);
}
try {
final String defaultZone =
getAppEngineValueFromMetadataServer("/computeMetadata/v1/project/attributes/google-compute-default-zone");
return Collections.singletonList(defaultZone);
} catch (Exception e) {
logger.warn("unable to resolve default zone from metadata server for GCE discovery service", e);
}
return null;
}
String getAppEngineValueFromMetadataServer(String serviceURL) throws GeneralSecurityException, IOException {
String metadata = GceMetadataService.GCE_HOST.get(settings);
GenericUrl url = Access.doPrivileged(() -> new GenericUrl(metadata + serviceURL));
HttpTransport httpTransport = getGceHttpTransport();
HttpRequestFactory requestFactory = httpTransport.createRequestFactory();
HttpRequest request = requestFactory.buildGetRequest(url)
.setConnectTimeout(500)
.setReadTimeout(500)
.setHeaders(new HttpHeaders().set("Metadata-Flavor", "Google"));
HttpResponse response = Access.doPrivilegedIOException(() -> request.execute());
return headerContainsMetadataFlavor(response) ? response.parseAsString() : null;
}
private static boolean headerContainsMetadataFlavor(HttpResponse response) {
// com.google.cloud.ServiceOptions#headerContainsMetadataFlavor(HttpResponse)}
String metadataFlavorValue = response.getHeaders().getFirstHeaderStringValue("Metadata-Flavor");
return "Google".equals(metadataFlavorValue);
} }
protected synchronized HttpTransport getGceHttpTransport() throws GeneralSecurityException, IOException { protected synchronized HttpTransport getGceHttpTransport() throws GeneralSecurityException, IOException {
@ -180,6 +234,16 @@ public class GceInstancesServiceImpl extends AbstractComponent implements GceIns
return this.client; return this.client;
} }
@Override
public String projectId() {
return project;
}
@Override
public List<String> zones() {
return zones;
}
@Override @Override
public void close() throws IOException { public void close() throws IOException {
if (gceHttpTransport != null) { if (gceHttpTransport != null) {

View File

@ -79,8 +79,8 @@ public class GceUnicastHostsProvider extends AbstractComponent implements Unicas
this.networkService = networkService; this.networkService = networkService;
this.refreshInterval = GceInstancesService.REFRESH_SETTING.get(settings); this.refreshInterval = GceInstancesService.REFRESH_SETTING.get(settings);
this.project = GceInstancesService.PROJECT_SETTING.get(settings); this.project = gceInstancesService.projectId();
this.zones = GceInstancesService.ZONE_SETTING.get(settings); this.zones = gceInstancesService.zones();
this.tags = TAGS_SETTING.get(settings); this.tags = TAGS_SETTING.get(settings);
if (logger.isDebugEnabled()) { if (logger.isDebugEnabled()) {

View File

@ -22,6 +22,7 @@ package org.elasticsearch.plugin.discovery.gce;
import com.google.api.client.http.HttpHeaders; import com.google.api.client.http.HttpHeaders;
import com.google.api.client.util.ClassInfo; import com.google.api.client.util.ClassInfo;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.Booleans;
import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.core.internal.io.IOUtils;
import org.apache.lucene.util.SetOnce; import org.apache.lucene.util.SetOnce;
import org.elasticsearch.cloud.gce.GceInstancesService; import org.elasticsearch.cloud.gce.GceInstancesService;
@ -41,6 +42,7 @@ import org.elasticsearch.transport.TransportService;
import java.io.Closeable; import java.io.Closeable;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
@ -49,8 +51,12 @@ import java.util.function.Supplier;
public class GceDiscoveryPlugin extends Plugin implements DiscoveryPlugin, Closeable { public class GceDiscoveryPlugin extends Plugin implements DiscoveryPlugin, Closeable {
/** Determines whether settings those reroutes GCE call should be allowed (for testing purposes only). */
private static final boolean ALLOW_REROUTE_GCE_SETTINGS =
Booleans.parseBoolean(System.getProperty("es.allow_reroute_gce_settings", "false"));
public static final String GCE = "gce"; public static final String GCE = "gce";
private final Settings settings; protected final Settings settings;
private static final Logger logger = Loggers.getLogger(GceDiscoveryPlugin.class); private static final Logger logger = Loggers.getLogger(GceDiscoveryPlugin.class);
// stashed when created in order to properly close // stashed when created in order to properly close
private final SetOnce<GceInstancesService> gceInstancesService = new SetOnce<>(); private final SetOnce<GceInstancesService> gceInstancesService = new SetOnce<>();
@ -94,14 +100,22 @@ public class GceDiscoveryPlugin extends Plugin implements DiscoveryPlugin, Close
@Override @Override
public List<Setting<?>> getSettings() { public List<Setting<?>> getSettings() {
return Arrays.asList( List<Setting<?>> settings = new ArrayList<>(
// Register GCE settings Arrays.asList(
GceInstancesService.PROJECT_SETTING, // Register GCE settings
GceInstancesService.ZONE_SETTING, GceInstancesService.PROJECT_SETTING,
GceUnicastHostsProvider.TAGS_SETTING, GceInstancesService.ZONE_SETTING,
GceInstancesService.REFRESH_SETTING, GceUnicastHostsProvider.TAGS_SETTING,
GceInstancesService.RETRY_SETTING, GceInstancesService.REFRESH_SETTING,
GceInstancesService.MAX_WAIT_SETTING); GceInstancesService.RETRY_SETTING,
GceInstancesService.MAX_WAIT_SETTING)
);
if (ALLOW_REROUTE_GCE_SETTINGS) {
settings.add(GceMetadataService.GCE_HOST);
settings.add(GceInstancesServiceImpl.GCE_ROOT_URL);
}
return Collections.unmodifiableList(settings);
} }

View File

@ -0,0 +1,72 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.cloud.gce;
import com.google.api.client.http.HttpTransport;
import com.google.api.client.http.LowLevelHttpRequest;
import com.google.api.client.http.LowLevelHttpResponse;
import com.google.api.client.json.Json;
import com.google.api.client.testing.http.MockHttpTransport;
import com.google.api.client.testing.http.MockLowLevelHttpRequest;
import com.google.api.client.testing.http.MockLowLevelHttpResponse;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.test.ESTestCase;
import java.util.concurrent.atomic.AtomicBoolean;
import static org.hamcrest.CoreMatchers.nullValue;
import static org.hamcrest.core.Is.is;
public class GceInstancesServiceImplTests extends ESTestCase {
public void testHeaderContainsMetadataFlavor() throws Exception {
final AtomicBoolean addMetdataFlavor = new AtomicBoolean();
final MockHttpTransport transport = new MockHttpTransport() {
@Override
public LowLevelHttpRequest buildRequest(String method, final String url) {
return new MockLowLevelHttpRequest() {
@Override
public LowLevelHttpResponse execute() {
MockLowLevelHttpResponse response = new MockLowLevelHttpResponse();
response.setStatusCode(200);
response.setContentType(Json.MEDIA_TYPE);
response.setContent("value");
if (addMetdataFlavor.get()) {
response.addHeader("Metadata-Flavor", "Google");
}
return response;
}
};
}
};
final GceInstancesServiceImpl service = new GceInstancesServiceImpl(Settings.EMPTY) {
@Override
protected synchronized HttpTransport getGceHttpTransport() {
return transport;
}
};
final String serviceURL = "/computeMetadata/v1/project/project-id";
assertThat(service.getAppEngineValueFromMetadataServer(serviceURL), is(nullValue()));
addMetdataFlavor.set(true);
assertThat(service.getAppEngineValueFromMetadataServer(serviceURL), is("value"));
}
}

View File

@ -170,6 +170,16 @@ public class GceDiscoverTests extends ESIntegTestCase {
}); });
} }
@Override
public String projectId() {
return PROJECT_SETTING.get(settings);
}
@Override
public List<String> zones() {
return ZONE_SETTING.get(settings);
}
@Override @Override
public void close() throws IOException { public void close() throws IOException {
} }

View File

@ -21,6 +21,7 @@ package org.elasticsearch.discovery.gce;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.cloud.gce.GceInstancesServiceImpl; import org.elasticsearch.cloud.gce.GceInstancesServiceImpl;
import org.elasticsearch.cloud.gce.GceMetadataService;
import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.network.NetworkService;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.transport.TransportAddress;
@ -40,6 +41,7 @@ import java.util.Locale;
import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.not;
/** /**
* This test class uses a GCE HTTP Mock system which allows to simulate JSON Responses. * This test class uses a GCE HTTP Mock system which allows to simulate JSON Responses.
@ -211,7 +213,10 @@ public class GceDiscoveryTests extends ESTestCase {
} }
public void testIllegalSettingsMissingAllRequired() { public void testIllegalSettingsMissingAllRequired() {
Settings nodeSettings = Settings.EMPTY; Settings nodeSettings = Settings.builder()
// to prevent being resolved using default GCE host
.put(GceMetadataService.GCE_HOST.getKey(), "http://internal")
.build();
mock = new GceInstancesServiceMock(nodeSettings); mock = new GceInstancesServiceMock(nodeSettings);
try { try {
buildDynamicNodes(mock, nodeSettings); buildDynamicNodes(mock, nodeSettings);
@ -223,6 +228,8 @@ public class GceDiscoveryTests extends ESTestCase {
public void testIllegalSettingsMissingProject() { public void testIllegalSettingsMissingProject() {
Settings nodeSettings = Settings.builder() Settings nodeSettings = Settings.builder()
// to prevent being resolved using default GCE host
.put(GceMetadataService.GCE_HOST.getKey(), "http://internal")
.putList(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "us-central1-a", "us-central1-b") .putList(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "us-central1-a", "us-central1-b")
.build(); .build();
mock = new GceInstancesServiceMock(nodeSettings); mock = new GceInstancesServiceMock(nodeSettings);
@ -236,6 +243,8 @@ public class GceDiscoveryTests extends ESTestCase {
public void testIllegalSettingsMissingZone() { public void testIllegalSettingsMissingZone() {
Settings nodeSettings = Settings.builder() Settings nodeSettings = Settings.builder()
// to prevent being resolved using default GCE host
.put(GceMetadataService.GCE_HOST.getKey(), "http://internal")
.put(GceInstancesServiceImpl.PROJECT_SETTING.getKey(), projectName) .put(GceInstancesServiceImpl.PROJECT_SETTING.getKey(), projectName)
.build(); .build();
mock = new GceInstancesServiceMock(nodeSettings); mock = new GceInstancesServiceMock(nodeSettings);
@ -261,4 +270,13 @@ public class GceDiscoveryTests extends ESTestCase {
List<TransportAddress> dynamicHosts = buildDynamicNodes(mock, nodeSettings); List<TransportAddress> dynamicHosts = buildDynamicNodes(mock, nodeSettings);
assertThat(dynamicHosts, hasSize(1)); assertThat(dynamicHosts, hasSize(1));
} }
public void testMetadataServerValues() {
Settings nodeSettings = Settings.EMPTY;
mock = new GceInstancesServiceMock(nodeSettings);
assertThat(mock.projectId(), not(projectName));
List<TransportAddress> dynamicHosts = buildDynamicNodes(mock, nodeSettings);
assertThat(dynamicHosts, hasSize(1));
}
} }

View File

@ -32,11 +32,13 @@ public class GceInstancesServiceMock extends GceInstancesServiceImpl {
public GceInstancesServiceMock(Settings settings) { public GceInstancesServiceMock(Settings settings) {
super(settings); super(settings);
this.mockHttpTransport = GceMockUtils.configureMock();
} }
@Override @Override
protected HttpTransport getGceHttpTransport() throws GeneralSecurityException, IOException { protected HttpTransport getGceHttpTransport() throws GeneralSecurityException, IOException {
if (this.mockHttpTransport == null) {
this.mockHttpTransport = GceMockUtils.configureMock();
}
return this.mockHttpTransport; return this.mockHttpTransport;
} }
} }

View File

@ -39,7 +39,7 @@ import java.net.URL;
public class GceMockUtils { public class GceMockUtils {
protected static final Logger logger = Loggers.getLogger(GceMockUtils.class); protected static final Logger logger = Loggers.getLogger(GceMockUtils.class);
public static final String GCE_METADATA_URL = "http://metadata.google.internal/computeMetadata/v1/instance"; public static final String GCE_METADATA_URL = "http://metadata.google.internal/computeMetadata/v1/";
protected static HttpTransport configureMock() { protected static HttpTransport configureMock() {
return new MockHttpTransport() { return new MockHttpTransport() {
@ -54,6 +54,7 @@ public class GceMockUtils {
if (url.startsWith(GCE_METADATA_URL)) { if (url.startsWith(GCE_METADATA_URL)) {
logger.info("--> Simulate GCE Auth/Metadata response for [{}]", url); logger.info("--> Simulate GCE Auth/Metadata response for [{}]", url);
response.setContent(readGoogleInternalJsonResponse(url)); response.setContent(readGoogleInternalJsonResponse(url));
response.addHeader("Metadata-Flavor", "Google");
} else { } else {
logger.info("--> Simulate GCE API response for [{}]", url); logger.info("--> Simulate GCE API response for [{}]", url);
response.setContent(readGoogleApiJsonResponse(url)); response.setContent(readGoogleApiJsonResponse(url));

View File

@ -0,0 +1,36 @@
{
"id": "dummy",
"items":[
{
"description": "ES Node 1",
"id": "9309873766428965105",
"kind": "compute#instance",
"machineType": "n1-standard-1",
"name": "test1",
"networkInterfaces": [
{
"accessConfigs": [
{
"kind": "compute#accessConfig",
"name": "External NAT",
"natIP": "104.155.13.147",
"type": "ONE_TO_ONE_NAT"
}
],
"name": "nic0",
"network": "default",
"networkIP": "10.240.79.59"
}
],
"status": "RUNNING",
"tags": {
"fingerprint": "xA6QJb-rGtg=",
"items": [
"elasticsearch",
"dev"
]
},
"zone": "europe-west1-b"
}
]
}

View File

@ -0,0 +1,36 @@
{
"id": "dummy",
"items":[
{
"description": "ES Node 2",
"id": "9309873766428965105",
"kind": "compute#instance",
"machineType": "n1-standard-1",
"name": "test2",
"networkInterfaces": [
{
"accessConfigs": [
{
"kind": "compute#accessConfig",
"name": "External NAT",
"natIP": "104.155.13.147",
"type": "ONE_TO_ONE_NAT"
}
],
"name": "nic0",
"network": "default",
"networkIP": "10.240.79.59"
}
],
"status": "RUNNING",
"tags": {
"fingerprint": "xA6QJb-rGtg=",
"items": [
"elasticsearch",
"dev"
]
},
"zone": "us-central1-a"
}
]
}

View File

@ -186,6 +186,11 @@ public class NumberFieldMapper extends FieldMapper {
return result; return result;
} }
@Override
public Number parsePoint(byte[] value) {
return HalfFloatPoint.decodeDimension(value, 0);
}
@Override @Override
public Float parse(XContentParser parser, boolean coerce) throws IOException { public Float parse(XContentParser parser, boolean coerce) throws IOException {
float parsed = parser.floatValue(coerce); float parsed = parser.floatValue(coerce);
@ -278,6 +283,11 @@ public class NumberFieldMapper extends FieldMapper {
return result; return result;
} }
@Override
public Number parsePoint(byte[] value) {
return FloatPoint.decodeDimension(value, 0);
}
@Override @Override
public Float parse(XContentParser parser, boolean coerce) throws IOException { public Float parse(XContentParser parser, boolean coerce) throws IOException {
float parsed = parser.floatValue(coerce); float parsed = parser.floatValue(coerce);
@ -359,6 +369,11 @@ public class NumberFieldMapper extends FieldMapper {
return parsed; return parsed;
} }
@Override
public Number parsePoint(byte[] value) {
return DoublePoint.decodeDimension(value, 0);
}
@Override @Override
public Double parse(XContentParser parser, boolean coerce) throws IOException { public Double parse(XContentParser parser, boolean coerce) throws IOException {
double parsed = parser.doubleValue(coerce); double parsed = parser.doubleValue(coerce);
@ -451,6 +466,11 @@ public class NumberFieldMapper extends FieldMapper {
return (byte) doubleValue; return (byte) doubleValue;
} }
@Override
public Number parsePoint(byte[] value) {
return INTEGER.parsePoint(value).byteValue();
}
@Override @Override
public Short parse(XContentParser parser, boolean coerce) throws IOException { public Short parse(XContentParser parser, boolean coerce) throws IOException {
int value = parser.intValue(coerce); int value = parser.intValue(coerce);
@ -507,6 +527,11 @@ public class NumberFieldMapper extends FieldMapper {
return (short) doubleValue; return (short) doubleValue;
} }
@Override
public Number parsePoint(byte[] value) {
return INTEGER.parsePoint(value).shortValue();
}
@Override @Override
public Short parse(XContentParser parser, boolean coerce) throws IOException { public Short parse(XContentParser parser, boolean coerce) throws IOException {
return parser.shortValue(coerce); return parser.shortValue(coerce);
@ -559,6 +584,11 @@ public class NumberFieldMapper extends FieldMapper {
return (int) doubleValue; return (int) doubleValue;
} }
@Override
public Number parsePoint(byte[] value) {
return IntPoint.decodeDimension(value, 0);
}
@Override @Override
public Integer parse(XContentParser parser, boolean coerce) throws IOException { public Integer parse(XContentParser parser, boolean coerce) throws IOException {
return parser.intValue(coerce); return parser.intValue(coerce);
@ -673,6 +703,11 @@ public class NumberFieldMapper extends FieldMapper {
return Numbers.toLong(stringValue, coerce); return Numbers.toLong(stringValue, coerce);
} }
@Override
public Number parsePoint(byte[] value) {
return LongPoint.decodeDimension(value, 0);
}
@Override @Override
public Long parse(XContentParser parser, boolean coerce) throws IOException { public Long parse(XContentParser parser, boolean coerce) throws IOException {
return parser.longValue(coerce); return parser.longValue(coerce);
@ -789,6 +824,7 @@ public class NumberFieldMapper extends FieldMapper {
boolean hasDocValues); boolean hasDocValues);
public abstract Number parse(XContentParser parser, boolean coerce) throws IOException; public abstract Number parse(XContentParser parser, boolean coerce) throws IOException;
public abstract Number parse(Object value, boolean coerce); public abstract Number parse(Object value, boolean coerce);
public abstract Number parsePoint(byte[] value);
public abstract List<Field> createFields(String name, Number value, boolean indexed, public abstract List<Field> createFields(String name, Number value, boolean indexed,
boolean docValued, boolean stored); boolean docValued, boolean stored);
Number valueForSearch(Number value) { Number valueForSearch(Number value) {
@ -937,6 +973,10 @@ public class NumberFieldMapper extends FieldMapper {
} }
} }
public Number parsePoint(byte[] value) {
return type.parsePoint(value);
}
@Override @Override
public boolean equals(Object o) { public boolean equals(Object o) {
if (super.equals(o) == false) { if (super.equals(o) == false) {

View File

@ -24,6 +24,7 @@ import com.carrotsearch.hppc.ObjectObjectMap;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.IndexableField;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexSettings;
@ -32,6 +33,7 @@ import java.util.Collection;
import java.util.Collections; import java.util.Collections;
import java.util.HashSet; import java.util.HashSet;
import java.util.Iterator; import java.util.Iterator;
import java.util.LinkedList;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
@ -454,13 +456,40 @@ public abstract class ParseContext implements Iterable<ParseContext.Document>{
} }
void postParse() { void postParse() {
// reverse the order of docs for nested docs support, parent should be last
if (documents.size() > 1) { if (documents.size() > 1) {
docsReversed = true; docsReversed = true;
Collections.reverse(documents); if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_6_5_0)) {
/**
* For indices created on or after {@link Version#V_6_5_0} we preserve the order
* of the children while ensuring that parents appear after them.
*/
List<Document> newDocs = reorderParent(documents);
documents.clear();
documents.addAll(newDocs);
} else {
// reverse the order of docs for nested docs support, parent should be last
Collections.reverse(documents);
}
} }
} }
/**
* Returns a copy of the provided {@link List} where parent documents appear
* after their children.
*/
private List<Document> reorderParent(List<Document> docs) {
List<Document> newDocs = new ArrayList<>(docs.size());
LinkedList<Document> parents = new LinkedList<>();
for (Document doc : docs) {
while (parents.peek() != doc.getParent()){
newDocs.add(parents.poll());
}
parents.add(0, doc);
}
newDocs.addAll(parents);
return newDocs;
}
@Override @Override
public Iterator<Document> iterator() { public Iterator<Document> iterator() {
return documents.iterator(); return documents.iterator();

View File

@ -21,6 +21,9 @@ package org.elasticsearch.indices;
import com.carrotsearch.hppc.ObjectHashSet; import com.carrotsearch.hppc.ObjectHashSet;
import com.carrotsearch.hppc.ObjectSet; import com.carrotsearch.hppc.ObjectSet;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReader;
import org.apache.lucene.util.Accountable; import org.apache.lucene.util.Accountable;
@ -75,6 +78,8 @@ public final class IndicesRequestCache extends AbstractComponent implements Remo
public static final Setting<TimeValue> INDICES_CACHE_QUERY_EXPIRE = public static final Setting<TimeValue> INDICES_CACHE_QUERY_EXPIRE =
Setting.positiveTimeSetting("indices.requests.cache.expire", new TimeValue(0), Property.NodeScope); Setting.positiveTimeSetting("indices.requests.cache.expire", new TimeValue(0), Property.NodeScope);
private static final Logger LOGGER = LogManager.getLogger(IndicesRequestCache.class);
private final ConcurrentMap<CleanupKey, Boolean> registeredClosedListeners = ConcurrentCollections.newConcurrentMap(); private final ConcurrentMap<CleanupKey, Boolean> registeredClosedListeners = ConcurrentCollections.newConcurrentMap();
private final Set<CleanupKey> keysToClean = ConcurrentCollections.newConcurrentSet(); private final Set<CleanupKey> keysToClean = ConcurrentCollections.newConcurrentSet();
private final ByteSizeValue size; private final ByteSizeValue size;
@ -109,13 +114,19 @@ public final class IndicesRequestCache extends AbstractComponent implements Remo
notification.getKey().entity.onRemoval(notification); notification.getKey().entity.onRemoval(notification);
} }
// NORELEASE The cacheKeyRenderer has been added in order to debug
// https://github.com/elastic/elasticsearch/issues/32827, it should be
// removed when this issue is solved
BytesReference getOrCompute(CacheEntity cacheEntity, Supplier<BytesReference> loader, BytesReference getOrCompute(CacheEntity cacheEntity, Supplier<BytesReference> loader,
DirectoryReader reader, BytesReference cacheKey) throws Exception { DirectoryReader reader, BytesReference cacheKey, Supplier<String> cacheKeyRenderer) throws Exception {
final Key key = new Key(cacheEntity, reader.getVersion(), cacheKey); final Key key = new Key(cacheEntity, reader.getVersion(), cacheKey);
Loader cacheLoader = new Loader(cacheEntity, loader); Loader cacheLoader = new Loader(cacheEntity, loader);
BytesReference value = cache.computeIfAbsent(key, cacheLoader); BytesReference value = cache.computeIfAbsent(key, cacheLoader);
if (cacheLoader.isLoaded()) { if (cacheLoader.isLoaded()) {
key.entity.onMiss(); key.entity.onMiss();
if (logger.isTraceEnabled()) {
logger.trace("Cache miss for reader version [{}] and request:\n {}", reader.getVersion(), cacheKeyRenderer.get());
}
// see if its the first time we see this reader, and make sure to register a cleanup key // see if its the first time we see this reader, and make sure to register a cleanup key
CleanupKey cleanupKey = new CleanupKey(cacheEntity, reader.getVersion()); CleanupKey cleanupKey = new CleanupKey(cacheEntity, reader.getVersion());
if (!registeredClosedListeners.containsKey(cleanupKey)) { if (!registeredClosedListeners.containsKey(cleanupKey)) {
@ -126,6 +137,9 @@ public final class IndicesRequestCache extends AbstractComponent implements Remo
} }
} else { } else {
key.entity.onHit(); key.entity.onHit();
if (logger.isTraceEnabled()) {
logger.trace("Cache hit for reader version [{}] and request:\n {}", reader.getVersion(), cacheKeyRenderer.get());
}
} }
return value; return value;
} }

View File

@ -1191,7 +1191,9 @@ public class IndicesService extends AbstractLifecycleComponent
final DirectoryReader directoryReader = context.searcher().getDirectoryReader(); final DirectoryReader directoryReader = context.searcher().getDirectoryReader();
boolean[] loadedFromCache = new boolean[] { true }; boolean[] loadedFromCache = new boolean[] { true };
BytesReference bytesReference = cacheShardLevelResult(context.indexShard(), directoryReader, request.cacheKey(), out -> { BytesReference bytesReference = cacheShardLevelResult(context.indexShard(), directoryReader, request.cacheKey(), () -> {
return "Shard: " + request.shardId() + "\nSource:\n" + request.source();
}, out -> {
queryPhase.execute(context); queryPhase.execute(context);
try { try {
context.queryResult().writeToNoId(out); context.queryResult().writeToNoId(out);
@ -1217,6 +1219,10 @@ public class IndicesService extends AbstractLifecycleComponent
// running a search that times out concurrently will likely timeout again if it's run while we have this `stale` result in the // running a search that times out concurrently will likely timeout again if it's run while we have this `stale` result in the
// cache. One other option is to not cache requests with a timeout at all... // cache. One other option is to not cache requests with a timeout at all...
indicesRequestCache.invalidate(new IndexShardCacheEntity(context.indexShard()), directoryReader, request.cacheKey()); indicesRequestCache.invalidate(new IndexShardCacheEntity(context.indexShard()), directoryReader, request.cacheKey());
if (logger.isTraceEnabled()) {
logger.trace("Query timed out, invalidating cache entry for request on shard [{}]:\n {}", request.shardId(),
request.source());
}
} }
} }
@ -1232,8 +1238,8 @@ public class IndicesService extends AbstractLifecycleComponent
* @param loader loads the data into the cache if needed * @param loader loads the data into the cache if needed
* @return the contents of the cache or the result of calling the loader * @return the contents of the cache or the result of calling the loader
*/ */
private BytesReference cacheShardLevelResult(IndexShard shard, DirectoryReader reader, BytesReference cacheKey, Consumer<StreamOutput> loader) private BytesReference cacheShardLevelResult(IndexShard shard, DirectoryReader reader, BytesReference cacheKey,
throws Exception { Supplier<String> cacheKeyRenderer, Consumer<StreamOutput> loader) throws Exception {
IndexShardCacheEntity cacheEntity = new IndexShardCacheEntity(shard); IndexShardCacheEntity cacheEntity = new IndexShardCacheEntity(shard);
Supplier<BytesReference> supplier = () -> { Supplier<BytesReference> supplier = () -> {
/* BytesStreamOutput allows to pass the expected size but by default uses /* BytesStreamOutput allows to pass the expected size but by default uses
@ -1251,7 +1257,7 @@ public class IndicesService extends AbstractLifecycleComponent
return out.bytes(); return out.bytes();
} }
}; };
return indicesRequestCache.getOrCompute(cacheEntity, supplier, reader, cacheKey); return indicesRequestCache.getOrCompute(cacheEntity, supplier, reader, cacheKey, cacheKeyRenderer);
} }
static final class IndexShardCacheEntity extends AbstractIndexShardCacheEntity { static final class IndexShardCacheEntity extends AbstractIndexShardCacheEntity {

View File

@ -18,7 +18,12 @@
*/ */
package org.elasticsearch.search.aggregations.metrics; package org.elasticsearch.search.aggregations.metrics;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.PointValues;
import org.apache.lucene.search.CollectionTerminatedException;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.FutureArrays;
import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.ScoreMode;
import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BigArrays;
@ -33,30 +38,45 @@ import org.elasticsearch.search.aggregations.LeafBucketCollector;
import org.elasticsearch.search.aggregations.LeafBucketCollectorBase; import org.elasticsearch.search.aggregations.LeafBucketCollectorBase;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException; import java.io.IOException;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.function.Function;
import static org.elasticsearch.search.aggregations.metrics.MinAggregator.getPointReaderOrNull;
class MaxAggregator extends NumericMetricsAggregator.SingleValue { class MaxAggregator extends NumericMetricsAggregator.SingleValue {
final ValuesSource.Numeric valuesSource; final ValuesSource.Numeric valuesSource;
final DocValueFormat formatter; final DocValueFormat formatter;
final String pointField;
final Function<byte[], Number> pointConverter;
DoubleArray maxes; DoubleArray maxes;
MaxAggregator(String name, ValuesSource.Numeric valuesSource, DocValueFormat formatter, MaxAggregator(String name,
SearchContext context, ValuesSourceConfig<ValuesSource.Numeric> config,
Aggregator parent, List<PipelineAggregator> pipelineAggregators, ValuesSource.Numeric valuesSource,
Map<String, Object> metaData) throws IOException { SearchContext context,
Aggregator parent, List<PipelineAggregator> pipelineAggregators,
Map<String, Object> metaData) throws IOException {
super(name, context, parent, pipelineAggregators, metaData); super(name, context, parent, pipelineAggregators, metaData);
this.valuesSource = valuesSource; this.valuesSource = valuesSource;
this.formatter = formatter;
if (valuesSource != null) { if (valuesSource != null) {
maxes = context.bigArrays().newDoubleArray(1, false); maxes = context.bigArrays().newDoubleArray(1, false);
maxes.fill(0, maxes.size(), Double.NEGATIVE_INFINITY); maxes.fill(0, maxes.size(), Double.NEGATIVE_INFINITY);
} }
this.formatter = config.format();
this.pointConverter = getPointReaderOrNull(context, parent, config);
if (pointConverter != null) {
pointField = config.fieldContext().field();
} else {
pointField = null;
}
} }
@Override @Override
@ -68,8 +88,28 @@ class MaxAggregator extends NumericMetricsAggregator.SingleValue {
public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, public LeafBucketCollector getLeafCollector(LeafReaderContext ctx,
final LeafBucketCollector sub) throws IOException { final LeafBucketCollector sub) throws IOException {
if (valuesSource == null) { if (valuesSource == null) {
return LeafBucketCollector.NO_OP_COLLECTOR; if (parent != null) {
} return LeafBucketCollector.NO_OP_COLLECTOR;
} else {
// we have no parent and the values source is empty so we can skip collecting hits.
throw new CollectionTerminatedException();
}
}
if (pointConverter != null) {
Number segMax = findLeafMaxValue(ctx.reader(), pointField, pointConverter);
if (segMax != null) {
/**
* There is no parent aggregator (see {@link MinAggregator#getPointReaderOrNull}
* so the ordinal for the bucket is always 0.
*/
assert maxes.size() == 1;
double max = maxes.get(0);
max = Math.max(max, segMax.doubleValue());
maxes.set(0, max);
// the maximum value has been extracted, we don't need to collect hits on this segment.
throw new CollectionTerminatedException();
}
}
final BigArrays bigArrays = context.bigArrays(); final BigArrays bigArrays = context.bigArrays();
final SortedNumericDoubleValues allValues = valuesSource.doubleValues(ctx); final SortedNumericDoubleValues allValues = valuesSource.doubleValues(ctx);
final NumericDoubleValues values = MultiValueMode.MAX.select(allValues); final NumericDoubleValues values = MultiValueMode.MAX.select(allValues);
@ -118,4 +158,48 @@ class MaxAggregator extends NumericMetricsAggregator.SingleValue {
public void doClose() { public void doClose() {
Releasables.close(maxes); Releasables.close(maxes);
} }
/**
* Returns the maximum value indexed in the <code>fieldName</code> field or <code>null</code>
* if the value cannot be inferred from the indexed {@link PointValues}.
*/
static Number findLeafMaxValue(LeafReader reader, String fieldName, Function<byte[], Number> converter) throws IOException {
final PointValues pointValues = reader.getPointValues(fieldName);
if (pointValues == null) {
return null;
}
final Bits liveDocs = reader.getLiveDocs();
if (liveDocs == null) {
return converter.apply(pointValues.getMaxPackedValue());
}
int numBytes = pointValues.getBytesPerDimension();
final byte[] maxValue = pointValues.getMaxPackedValue();
final Number[] result = new Number[1];
pointValues.intersect(new PointValues.IntersectVisitor() {
@Override
public void visit(int docID) {
throw new UnsupportedOperationException();
}
@Override
public void visit(int docID, byte[] packedValue) {
if (liveDocs.get(docID)) {
// we need to collect all values in this leaf (the sort is ascending) where
// the last live doc is guaranteed to contain the max value for the segment.
result[0] = converter.apply(packedValue);
}
}
@Override
public PointValues.Relation compare(byte[] minPackedValue, byte[] maxPackedValue) {
if (FutureArrays.equals(maxValue, 0, numBytes, maxPackedValue, 0, numBytes)) {
// we only check leaves that contain the max value for the segment.
return PointValues.Relation.CELL_CROSSES_QUERY;
} else {
return PointValues.Relation.CELL_OUTSIDE_QUERY;
}
}
});
return result[0];
}
} }

View File

@ -43,13 +43,13 @@ class MaxAggregatorFactory extends ValuesSourceAggregatorFactory<ValuesSource.Nu
@Override @Override
protected Aggregator createUnmapped(Aggregator parent, protected Aggregator createUnmapped(Aggregator parent,
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException { List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
return new MaxAggregator(name, null, config.format(), context, parent, pipelineAggregators, metaData); return new MaxAggregator(name, config, null, context, parent, pipelineAggregators, metaData);
} }
@Override @Override
protected Aggregator doCreateInternal(ValuesSource.Numeric valuesSource, Aggregator parent, protected Aggregator doCreateInternal(ValuesSource.Numeric valuesSource, Aggregator parent,
boolean collectsFromSingleBucket, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) boolean collectsFromSingleBucket, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
throws IOException { throws IOException {
return new MaxAggregator(name, valuesSource, config.format(), context, parent, pipelineAggregators, metaData); return new MaxAggregator(name, config, valuesSource, context, parent, pipelineAggregators, metaData);
} }
} }

View File

@ -18,13 +18,23 @@
*/ */
package org.elasticsearch.search.aggregations.metrics; package org.elasticsearch.search.aggregations.metrics;
import org.apache.lucene.document.LongPoint;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.PointValues;
import org.apache.lucene.search.CollectionTerminatedException;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.util.Bits;
import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.ScoreMode;
import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.util.DoubleArray; import org.elasticsearch.common.util.DoubleArray;
import org.elasticsearch.index.fielddata.NumericDoubleValues; import org.elasticsearch.index.fielddata.NumericDoubleValues;
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.NumberFieldMapper;
import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.DocValueFormat;
import org.elasticsearch.search.MultiValueMode; import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.Aggregator;
@ -33,29 +43,44 @@ import org.elasticsearch.search.aggregations.LeafBucketCollector;
import org.elasticsearch.search.aggregations.LeafBucketCollectorBase; import org.elasticsearch.search.aggregations.LeafBucketCollectorBase;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException; import java.io.IOException;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.function.Function;
class MinAggregator extends NumericMetricsAggregator.SingleValue { class MinAggregator extends NumericMetricsAggregator.SingleValue {
final ValuesSource.Numeric valuesSource; final ValuesSource.Numeric valuesSource;
final DocValueFormat format; final DocValueFormat format;
final String pointField;
final Function<byte[], Number> pointConverter;
DoubleArray mins; DoubleArray mins;
MinAggregator(String name, ValuesSource.Numeric valuesSource, DocValueFormat formatter, MinAggregator(String name,
SearchContext context, Aggregator parent, List<PipelineAggregator> pipelineAggregators, ValuesSourceConfig<ValuesSource.Numeric> config,
Map<String, Object> metaData) throws IOException { ValuesSource.Numeric valuesSource,
SearchContext context,
Aggregator parent,
List<PipelineAggregator> pipelineAggregators,
Map<String, Object> metaData) throws IOException {
super(name, context, parent, pipelineAggregators, metaData); super(name, context, parent, pipelineAggregators, metaData);
this.valuesSource = valuesSource; this.valuesSource = valuesSource;
if (valuesSource != null) { if (valuesSource != null) {
mins = context.bigArrays().newDoubleArray(1, false); mins = context.bigArrays().newDoubleArray(1, false);
mins.fill(0, mins.size(), Double.POSITIVE_INFINITY); mins.fill(0, mins.size(), Double.POSITIVE_INFINITY);
} }
this.format = formatter; this.format = config.format();
this.pointConverter = getPointReaderOrNull(context, parent, config);
if (pointConverter != null) {
pointField = config.fieldContext().field();
} else {
pointField = null;
}
} }
@Override @Override
@ -67,7 +92,26 @@ class MinAggregator extends NumericMetricsAggregator.SingleValue {
public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, public LeafBucketCollector getLeafCollector(LeafReaderContext ctx,
final LeafBucketCollector sub) throws IOException { final LeafBucketCollector sub) throws IOException {
if (valuesSource == null) { if (valuesSource == null) {
return LeafBucketCollector.NO_OP_COLLECTOR; if (parent == null) {
return LeafBucketCollector.NO_OP_COLLECTOR;
} else {
// we have no parent and the values source is empty so we can skip collecting hits.
throw new CollectionTerminatedException();
}
}
if (pointConverter != null) {
Number segMin = findLeafMinValue(ctx.reader(), pointField, pointConverter);
if (segMin != null) {
/**
* There is no parent aggregator (see {@link MinAggregator#getPointReaderOrNull}
* so the ordinal for the bucket is always 0.
*/
double min = mins.get(0);
min = Math.min(min, segMin.doubleValue());
mins.set(0, min);
// the minimum value has been extracted, we don't need to collect hits on this segment.
throw new CollectionTerminatedException();
}
} }
final BigArrays bigArrays = context.bigArrays(); final BigArrays bigArrays = context.bigArrays();
final SortedNumericDoubleValues allValues = valuesSource.doubleValues(ctx); final SortedNumericDoubleValues allValues = valuesSource.doubleValues(ctx);
@ -117,4 +161,77 @@ class MinAggregator extends NumericMetricsAggregator.SingleValue {
public void doClose() { public void doClose() {
Releasables.close(mins); Releasables.close(mins);
} }
/**
* Returns a converter for point values if early termination is applicable to
* the context or <code>null</code> otherwise.
*
* @param context The {@link SearchContext} of the aggregation.
* @param parent The parent aggregator.
* @param config The config for the values source metric.
*/
static Function<byte[], Number> getPointReaderOrNull(SearchContext context, Aggregator parent,
ValuesSourceConfig<ValuesSource.Numeric> config) {
if (context.query() != null &&
context.query().getClass() != MatchAllDocsQuery.class) {
return null;
}
if (parent != null) {
return null;
}
if (config.fieldContext() != null && config.script() == null) {
MappedFieldType fieldType = config.fieldContext().fieldType();
if (fieldType == null || fieldType.indexOptions() == IndexOptions.NONE) {
return null;
}
Function<byte[], Number> converter = null;
if (fieldType instanceof NumberFieldMapper.NumberFieldType) {
converter = ((NumberFieldMapper.NumberFieldType) fieldType)::parsePoint;
} else if (fieldType.getClass() == DateFieldMapper.DateFieldType.class) {
converter = (in) -> LongPoint.decodeDimension(in, 0);
}
return converter;
}
return null;
}
/**
* Returns the minimum value indexed in the <code>fieldName</code> field or <code>null</code>
* if the value cannot be inferred from the indexed {@link PointValues}.
*/
static Number findLeafMinValue(LeafReader reader, String fieldName, Function<byte[], Number> converter) throws IOException {
final PointValues pointValues = reader.getPointValues(fieldName);
if (pointValues == null) {
return null;
}
final Bits liveDocs = reader.getLiveDocs();
if (liveDocs == null) {
return converter.apply(pointValues.getMinPackedValue());
}
final Number[] result = new Number[1];
try {
pointValues.intersect(new PointValues.IntersectVisitor() {
@Override
public void visit(int docID) {
throw new UnsupportedOperationException();
}
@Override
public void visit(int docID, byte[] packedValue) {
if (liveDocs.get(docID)) {
result[0] = converter.apply(packedValue);
// this is the first leaf with a live doc so the value is the minimum for this segment.
throw new CollectionTerminatedException();
}
}
@Override
public PointValues.Relation compare(byte[] minPackedValue, byte[] maxPackedValue) {
return PointValues.Relation.CELL_CROSSES_QUERY;
}
});
} catch (CollectionTerminatedException e) {}
return result[0];
}
} }

View File

@ -43,12 +43,12 @@ class MinAggregatorFactory extends ValuesSourceAggregatorFactory<ValuesSource.Nu
@Override @Override
protected Aggregator createUnmapped(Aggregator parent, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) protected Aggregator createUnmapped(Aggregator parent, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
throws IOException { throws IOException {
return new MinAggregator(name, null, config.format(), context, parent, pipelineAggregators, metaData); return new MinAggregator(name, config, null, context, parent, pipelineAggregators, metaData);
} }
@Override @Override
protected Aggregator doCreateInternal(ValuesSource.Numeric valuesSource, Aggregator parent, boolean collectsFromSingleBucket, protected Aggregator doCreateInternal(ValuesSource.Numeric valuesSource, Aggregator parent, boolean collectsFromSingleBucket,
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException { List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
return new MinAggregator(name, valuesSource, config.format(), context, parent, pipelineAggregators, metaData); return new MinAggregator(name, config, valuesSource, context, parent, pipelineAggregators, metaData);
} }
} }

View File

@ -196,6 +196,14 @@ public class ScriptedMetricAggregationBuilder extends AbstractAggregationBuilder
protected ScriptedMetricAggregatorFactory doBuild(SearchContext context, AggregatorFactory<?> parent, protected ScriptedMetricAggregatorFactory doBuild(SearchContext context, AggregatorFactory<?> parent,
Builder subfactoriesBuilder) throws IOException { Builder subfactoriesBuilder) throws IOException {
if (combineScript == null) {
throw new IllegalArgumentException("[combineScript] must not be null: [" + name + "]");
}
if(reduceScript == null) {
throw new IllegalArgumentException("[reduceScript] must not be null: [" + name + "]");
}
QueryShardContext queryShardContext = context.getQueryShardContext(); QueryShardContext queryShardContext = context.getQueryShardContext();
// Extract params from scripts and pass them along to ScriptedMetricAggregatorFactory, since it won't have // Extract params from scripts and pass them along to ScriptedMetricAggregatorFactory, since it won't have
@ -215,16 +223,14 @@ public class ScriptedMetricAggregationBuilder extends AbstractAggregationBuilder
ScriptedMetricAggContexts.MapScript.CONTEXT); ScriptedMetricAggContexts.MapScript.CONTEXT);
Map<String, Object> mapScriptParams = mapScript.getParams(); Map<String, Object> mapScriptParams = mapScript.getParams();
ScriptedMetricAggContexts.CombineScript.Factory compiledCombineScript; ScriptedMetricAggContexts.CombineScript.Factory compiledCombineScript;
Map<String, Object> combineScriptParams; Map<String, Object> combineScriptParams;
if (combineScript != null) {
compiledCombineScript = queryShardContext.getScriptService().compile(combineScript, compiledCombineScript = queryShardContext.getScriptService().compile(combineScript,
ScriptedMetricAggContexts.CombineScript.CONTEXT); ScriptedMetricAggContexts.CombineScript.CONTEXT);
combineScriptParams = combineScript.getParams(); combineScriptParams = combineScript.getParams();
} else {
compiledCombineScript = (p, a) -> null;
combineScriptParams = Collections.emptyMap();
}
return new ScriptedMetricAggregatorFactory(name, compiledMapScript, mapScriptParams, compiledInitScript, return new ScriptedMetricAggregatorFactory(name, compiledMapScript, mapScriptParams, compiledInitScript,
initScriptParams, compiledCombineScript, combineScriptParams, reduceScript, initScriptParams, compiledCombineScript, combineScriptParams, reduceScript,
params, queryShardContext.lookup(), context, parent, subfactoriesBuilder, metaData); params, queryShardContext.lookup(), context, parent, subfactoriesBuilder, metaData);

View File

@ -45,6 +45,10 @@ public abstract class ValuesSourceAggregatorFactory<VS extends ValuesSource, AF
return config.timezone(); return config.timezone();
} }
public ValuesSourceConfig<VS> getConfig() {
return config;
}
@Override @Override
public Aggregator createInternal(Aggregator parent, boolean collectsFromSingleBucket, public Aggregator createInternal(Aggregator parent, boolean collectsFromSingleBucket,
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException { List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {

View File

@ -30,6 +30,7 @@ import org.apache.lucene.search.TotalHits.Relation;
import org.apache.lucene.search.Weight; import org.apache.lucene.search.Weight;
import org.apache.lucene.util.BitSet; import org.apache.lucene.util.BitSet;
import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.Version;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.document.DocumentField;
@ -38,6 +39,7 @@ import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.fieldvisitor.CustomFieldsVisitor; import org.elasticsearch.index.fieldvisitor.CustomFieldsVisitor;
import org.elasticsearch.index.fieldvisitor.FieldsVisitor; import org.elasticsearch.index.fieldvisitor.FieldsVisitor;
import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapper;
@ -344,6 +346,7 @@ public class FetchPhase implements SearchPhase {
ObjectMapper current = nestedObjectMapper; ObjectMapper current = nestedObjectMapper;
String originalName = nestedObjectMapper.name(); String originalName = nestedObjectMapper.name();
SearchHit.NestedIdentity nestedIdentity = null; SearchHit.NestedIdentity nestedIdentity = null;
final IndexSettings indexSettings = context.getQueryShardContext().getIndexSettings();
do { do {
Query parentFilter; Query parentFilter;
nestedParentObjectMapper = current.getParentObjectMapper(mapperService); nestedParentObjectMapper = current.getParentObjectMapper(mapperService);
@ -374,12 +377,32 @@ public class FetchPhase implements SearchPhase {
BitSet parentBits = context.bitsetFilterCache().getBitSetProducer(parentFilter).getBitSet(subReaderContext); BitSet parentBits = context.bitsetFilterCache().getBitSetProducer(parentFilter).getBitSet(subReaderContext);
int offset = 0; int offset = 0;
int nextParent = parentBits.nextSetBit(currentParent); if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_6_5_0)) {
for (int docId = childIter.advance(currentParent + 1); docId < nextParent && docId != DocIdSetIterator.NO_MORE_DOCS; /**
docId = childIter.nextDoc()) { * Starts from the previous parent and finds the offset of the
offset++; * <code>nestedSubDocID</code> within the nested children. Nested documents
* are indexed in the same order than in the source array so the offset
* of the nested child is the number of nested document with the same parent
* that appear before him.
*/
int previousParent = parentBits.prevSetBit(currentParent);
for (int docId = childIter.advance(previousParent + 1); docId < nestedSubDocId && docId != DocIdSetIterator.NO_MORE_DOCS;
docId = childIter.nextDoc()) {
offset++;
}
currentParent = nestedSubDocId;
} else {
/**
* Nested documents are in reverse order in this version so we start from the current nested document
* and find the number of documents with the same parent that appear after it.
*/
int nextParent = parentBits.nextSetBit(currentParent);
for (int docId = childIter.advance(currentParent + 1); docId < nextParent && docId != DocIdSetIterator.NO_MORE_DOCS;
docId = childIter.nextDoc()) {
offset++;
}
currentParent = nextParent;
} }
currentParent = nextParent;
current = nestedObjectMapper = nestedParentObjectMapper; current = nestedObjectMapper = nestedParentObjectMapper;
int currentPrefix = current == null ? 0 : current.name().length() + 1; int currentPrefix = current == null ? 0 : current.name().length() + 1;
nestedIdentity = new SearchHit.NestedIdentity(originalName.substring(currentPrefix), offset, nestedIdentity); nestedIdentity = new SearchHit.NestedIdentity(originalName.substring(currentPrefix), offset, nestedIdentity);

View File

@ -389,28 +389,28 @@ public class CopyToMapperTests extends ESSingleNodeTestCase {
assertEquals(6, doc.docs().size()); assertEquals(6, doc.docs().size());
Document nested = doc.docs().get(0); Document nested = doc.docs().get(0);
assertFieldValue(nested, "n1.n2.target", 7L); assertFieldValue(nested, "n1.n2.target", 3L);
assertFieldValue(nested, "n1.target"); assertFieldValue(nested, "n1.target");
assertFieldValue(nested, "target"); assertFieldValue(nested, "target");
nested = doc.docs().get(2); nested = doc.docs().get(1);
assertFieldValue(nested, "n1.n2.target", 5L); assertFieldValue(nested, "n1.n2.target", 5L);
assertFieldValue(nested, "n1.target"); assertFieldValue(nested, "n1.target");
assertFieldValue(nested, "target"); assertFieldValue(nested, "target");
nested = doc.docs().get(3); nested = doc.docs().get(3);
assertFieldValue(nested, "n1.n2.target", 3L); assertFieldValue(nested, "n1.n2.target", 7L);
assertFieldValue(nested, "n1.target"); assertFieldValue(nested, "n1.target");
assertFieldValue(nested, "target"); assertFieldValue(nested, "target");
Document parent = doc.docs().get(1); Document parent = doc.docs().get(2);
assertFieldValue(parent, "target"); assertFieldValue(parent, "target");
assertFieldValue(parent, "n1.target", 7L); assertFieldValue(parent, "n1.target", 3L, 5L);
assertFieldValue(parent, "n1.n2.target"); assertFieldValue(parent, "n1.n2.target");
parent = doc.docs().get(4); parent = doc.docs().get(4);
assertFieldValue(parent, "target"); assertFieldValue(parent, "target");
assertFieldValue(parent, "n1.target", 3L, 5L); assertFieldValue(parent, "n1.target", 7L);
assertFieldValue(parent, "n1.n2.target"); assertFieldValue(parent, "n1.n2.target");
Document root = doc.docs().get(5); Document root = doc.docs().get(5);

View File

@ -21,6 +21,8 @@ package org.elasticsearch.index.mapper;
import java.util.HashSet; import java.util.HashSet;
import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.IndexableField;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressedXContent;
@ -33,6 +35,7 @@ import org.elasticsearch.index.mapper.ObjectMapper.Dynamic;
import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.ESSingleNodeTestCase;
import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.InternalSettingsPlugin;
import org.elasticsearch.test.VersionUtils;
import java.io.IOException; import java.io.IOException;
import java.io.UncheckedIOException; import java.io.UncheckedIOException;
@ -120,11 +123,11 @@ public class NestedObjectMapperTests extends ESSingleNodeTestCase {
assertThat(doc.docs().size(), equalTo(3)); assertThat(doc.docs().size(), equalTo(3));
assertThat(doc.docs().get(0).get(TypeFieldMapper.NAME), equalTo(nested1Mapper.nestedTypePathAsString())); assertThat(doc.docs().get(0).get(TypeFieldMapper.NAME), equalTo(nested1Mapper.nestedTypePathAsString()));
assertThat(doc.docs().get(0).get("nested1.field1"), equalTo("3")); assertThat(doc.docs().get(0).get("nested1.field1"), equalTo("1"));
assertThat(doc.docs().get(0).get("nested1.field2"), equalTo("4")); assertThat(doc.docs().get(0).get("nested1.field2"), equalTo("2"));
assertThat(doc.docs().get(1).get(TypeFieldMapper.NAME), equalTo(nested1Mapper.nestedTypePathAsString())); assertThat(doc.docs().get(1).get(TypeFieldMapper.NAME), equalTo(nested1Mapper.nestedTypePathAsString()));
assertThat(doc.docs().get(1).get("nested1.field1"), equalTo("1")); assertThat(doc.docs().get(1).get("nested1.field1"), equalTo("3"));
assertThat(doc.docs().get(1).get("nested1.field2"), equalTo("2")); assertThat(doc.docs().get(1).get("nested1.field2"), equalTo("4"));
assertThat(doc.docs().get(2).get("field"), equalTo("value")); assertThat(doc.docs().get(2).get("field"), equalTo("value"));
} }
@ -160,20 +163,20 @@ public class NestedObjectMapperTests extends ESSingleNodeTestCase {
XContentType.JSON)); XContentType.JSON));
assertThat(doc.docs().size(), equalTo(7)); assertThat(doc.docs().size(), equalTo(7));
assertThat(doc.docs().get(0).get("nested1.nested2.field2"), equalTo("6")); assertThat(doc.docs().get(0).get("nested1.nested2.field2"), equalTo("2"));
assertThat(doc.docs().get(0).get("nested1.field1"), nullValue()); assertThat(doc.docs().get(0).get("nested1.field1"), nullValue());
assertThat(doc.docs().get(0).get("field"), nullValue()); assertThat(doc.docs().get(0).get("field"), nullValue());
assertThat(doc.docs().get(1).get("nested1.nested2.field2"), equalTo("5")); assertThat(doc.docs().get(1).get("nested1.nested2.field2"), equalTo("3"));
assertThat(doc.docs().get(1).get("nested1.field1"), nullValue()); assertThat(doc.docs().get(1).get("nested1.field1"), nullValue());
assertThat(doc.docs().get(1).get("field"), nullValue()); assertThat(doc.docs().get(1).get("field"), nullValue());
assertThat(doc.docs().get(2).get("nested1.field1"), equalTo("4")); assertThat(doc.docs().get(2).get("nested1.field1"), equalTo("1"));
assertThat(doc.docs().get(2).get("nested1.nested2.field2"), nullValue()); assertThat(doc.docs().get(2).get("nested1.nested2.field2"), nullValue());
assertThat(doc.docs().get(2).get("field"), nullValue()); assertThat(doc.docs().get(2).get("field"), nullValue());
assertThat(doc.docs().get(3).get("nested1.nested2.field2"), equalTo("3")); assertThat(doc.docs().get(3).get("nested1.nested2.field2"), equalTo("5"));
assertThat(doc.docs().get(3).get("field"), nullValue()); assertThat(doc.docs().get(3).get("field"), nullValue());
assertThat(doc.docs().get(4).get("nested1.nested2.field2"), equalTo("2")); assertThat(doc.docs().get(4).get("nested1.nested2.field2"), equalTo("6"));
assertThat(doc.docs().get(4).get("field"), nullValue()); assertThat(doc.docs().get(4).get("field"), nullValue());
assertThat(doc.docs().get(5).get("nested1.field1"), equalTo("1")); assertThat(doc.docs().get(5).get("nested1.field1"), equalTo("4"));
assertThat(doc.docs().get(5).get("nested1.nested2.field2"), nullValue()); assertThat(doc.docs().get(5).get("nested1.nested2.field2"), nullValue());
assertThat(doc.docs().get(5).get("field"), nullValue()); assertThat(doc.docs().get(5).get("field"), nullValue());
assertThat(doc.docs().get(6).get("field"), equalTo("value")); assertThat(doc.docs().get(6).get("field"), equalTo("value"));
@ -212,21 +215,21 @@ public class NestedObjectMapperTests extends ESSingleNodeTestCase {
XContentType.JSON)); XContentType.JSON));
assertThat(doc.docs().size(), equalTo(7)); assertThat(doc.docs().size(), equalTo(7));
assertThat(doc.docs().get(0).get("nested1.nested2.field2"), equalTo("6")); assertThat(doc.docs().get(0).get("nested1.nested2.field2"), equalTo("2"));
assertThat(doc.docs().get(0).get("nested1.field1"), nullValue()); assertThat(doc.docs().get(0).get("nested1.field1"), nullValue());
assertThat(doc.docs().get(0).get("field"), nullValue()); assertThat(doc.docs().get(0).get("field"), nullValue());
assertThat(doc.docs().get(1).get("nested1.nested2.field2"), equalTo("5")); assertThat(doc.docs().get(1).get("nested1.nested2.field2"), equalTo("3"));
assertThat(doc.docs().get(1).get("nested1.field1"), nullValue()); assertThat(doc.docs().get(1).get("nested1.field1"), nullValue());
assertThat(doc.docs().get(1).get("field"), nullValue()); assertThat(doc.docs().get(1).get("field"), nullValue());
assertThat(doc.docs().get(2).get("nested1.field1"), equalTo("4")); assertThat(doc.docs().get(2).get("nested1.field1"), equalTo("1"));
assertThat(doc.docs().get(2).get("nested1.nested2.field2"), equalTo("5")); assertThat(doc.docs().get(2).get("nested1.nested2.field2"), equalTo("2"));
assertThat(doc.docs().get(2).get("field"), nullValue()); assertThat(doc.docs().get(2).get("field"), nullValue());
assertThat(doc.docs().get(3).get("nested1.nested2.field2"), equalTo("3")); assertThat(doc.docs().get(3).get("nested1.nested2.field2"), equalTo("5"));
assertThat(doc.docs().get(3).get("field"), nullValue()); assertThat(doc.docs().get(3).get("field"), nullValue());
assertThat(doc.docs().get(4).get("nested1.nested2.field2"), equalTo("2")); assertThat(doc.docs().get(4).get("nested1.nested2.field2"), equalTo("6"));
assertThat(doc.docs().get(4).get("field"), nullValue()); assertThat(doc.docs().get(4).get("field"), nullValue());
assertThat(doc.docs().get(5).get("nested1.field1"), equalTo("1")); assertThat(doc.docs().get(5).get("nested1.field1"), equalTo("4"));
assertThat(doc.docs().get(5).get("nested1.nested2.field2"), equalTo("2")); assertThat(doc.docs().get(5).get("nested1.nested2.field2"), equalTo("5"));
assertThat(doc.docs().get(5).get("field"), nullValue()); assertThat(doc.docs().get(5).get("field"), nullValue());
assertThat(doc.docs().get(6).get("field"), equalTo("value")); assertThat(doc.docs().get(6).get("field"), equalTo("value"));
assertThat(doc.docs().get(6).get("nested1.field1"), nullValue()); assertThat(doc.docs().get(6).get("nested1.field1"), nullValue());
@ -264,21 +267,21 @@ public class NestedObjectMapperTests extends ESSingleNodeTestCase {
XContentType.JSON)); XContentType.JSON));
assertThat(doc.docs().size(), equalTo(7)); assertThat(doc.docs().size(), equalTo(7));
assertThat(doc.docs().get(0).get("nested1.nested2.field2"), equalTo("6")); assertThat(doc.docs().get(0).get("nested1.nested2.field2"), equalTo("2"));
assertThat(doc.docs().get(0).get("nested1.field1"), nullValue()); assertThat(doc.docs().get(0).get("nested1.field1"), nullValue());
assertThat(doc.docs().get(0).get("field"), nullValue()); assertThat(doc.docs().get(0).get("field"), nullValue());
assertThat(doc.docs().get(1).get("nested1.nested2.field2"), equalTo("5")); assertThat(doc.docs().get(1).get("nested1.nested2.field2"), equalTo("3"));
assertThat(doc.docs().get(1).get("nested1.field1"), nullValue()); assertThat(doc.docs().get(1).get("nested1.field1"), nullValue());
assertThat(doc.docs().get(1).get("field"), nullValue()); assertThat(doc.docs().get(1).get("field"), nullValue());
assertThat(doc.docs().get(2).get("nested1.field1"), equalTo("4")); assertThat(doc.docs().get(2).get("nested1.field1"), equalTo("1"));
assertThat(doc.docs().get(2).get("nested1.nested2.field2"), equalTo("5")); assertThat(doc.docs().get(2).get("nested1.nested2.field2"), equalTo("2"));
assertThat(doc.docs().get(2).get("field"), nullValue()); assertThat(doc.docs().get(2).get("field"), nullValue());
assertThat(doc.docs().get(3).get("nested1.nested2.field2"), equalTo("3")); assertThat(doc.docs().get(3).get("nested1.nested2.field2"), equalTo("5"));
assertThat(doc.docs().get(3).get("field"), nullValue()); assertThat(doc.docs().get(3).get("field"), nullValue());
assertThat(doc.docs().get(4).get("nested1.nested2.field2"), equalTo("2")); assertThat(doc.docs().get(4).get("nested1.nested2.field2"), equalTo("6"));
assertThat(doc.docs().get(4).get("field"), nullValue()); assertThat(doc.docs().get(4).get("field"), nullValue());
assertThat(doc.docs().get(5).get("nested1.field1"), equalTo("1")); assertThat(doc.docs().get(5).get("nested1.field1"), equalTo("4"));
assertThat(doc.docs().get(5).get("nested1.nested2.field2"), equalTo("2")); assertThat(doc.docs().get(5).get("nested1.nested2.field2"), equalTo("5"));
assertThat(doc.docs().get(5).get("field"), nullValue()); assertThat(doc.docs().get(5).get("field"), nullValue());
assertThat(doc.docs().get(6).get("field"), equalTo("value")); assertThat(doc.docs().get(6).get("field"), equalTo("value"));
assertThat(doc.docs().get(6).getFields("nested1.field1").length, equalTo(2)); assertThat(doc.docs().get(6).getFields("nested1.field1").length, equalTo(2));
@ -316,20 +319,20 @@ public class NestedObjectMapperTests extends ESSingleNodeTestCase {
XContentType.JSON)); XContentType.JSON));
assertThat(doc.docs().size(), equalTo(7)); assertThat(doc.docs().size(), equalTo(7));
assertThat(doc.docs().get(0).get("nested1.nested2.field2"), equalTo("6")); assertThat(doc.docs().get(0).get("nested1.nested2.field2"), equalTo("2"));
assertThat(doc.docs().get(0).get("nested1.field1"), nullValue()); assertThat(doc.docs().get(0).get("nested1.field1"), nullValue());
assertThat(doc.docs().get(0).get("field"), nullValue()); assertThat(doc.docs().get(0).get("field"), nullValue());
assertThat(doc.docs().get(1).get("nested1.nested2.field2"), equalTo("5")); assertThat(doc.docs().get(1).get("nested1.nested2.field2"), equalTo("3"));
assertThat(doc.docs().get(1).get("nested1.field1"), nullValue()); assertThat(doc.docs().get(1).get("nested1.field1"), nullValue());
assertThat(doc.docs().get(1).get("field"), nullValue()); assertThat(doc.docs().get(1).get("field"), nullValue());
assertThat(doc.docs().get(2).get("nested1.field1"), equalTo("4")); assertThat(doc.docs().get(2).get("nested1.field1"), equalTo("1"));
assertThat(doc.docs().get(2).get("nested1.nested2.field2"), nullValue()); assertThat(doc.docs().get(2).get("nested1.nested2.field2"), nullValue());
assertThat(doc.docs().get(2).get("field"), nullValue()); assertThat(doc.docs().get(2).get("field"), nullValue());
assertThat(doc.docs().get(3).get("nested1.nested2.field2"), equalTo("3")); assertThat(doc.docs().get(3).get("nested1.nested2.field2"), equalTo("5"));
assertThat(doc.docs().get(3).get("field"), nullValue()); assertThat(doc.docs().get(3).get("field"), nullValue());
assertThat(doc.docs().get(4).get("nested1.nested2.field2"), equalTo("2")); assertThat(doc.docs().get(4).get("nested1.nested2.field2"), equalTo("6"));
assertThat(doc.docs().get(4).get("field"), nullValue()); assertThat(doc.docs().get(4).get("field"), nullValue());
assertThat(doc.docs().get(5).get("nested1.field1"), equalTo("1")); assertThat(doc.docs().get(5).get("nested1.field1"), equalTo("4"));
assertThat(doc.docs().get(5).get("nested1.nested2.field2"), nullValue()); assertThat(doc.docs().get(5).get("nested1.nested2.field2"), nullValue());
assertThat(doc.docs().get(5).get("field"), nullValue()); assertThat(doc.docs().get(5).get("field"), nullValue());
assertThat(doc.docs().get(6).get("field"), equalTo("value")); assertThat(doc.docs().get(6).get("field"), equalTo("value"));
@ -424,9 +427,9 @@ public class NestedObjectMapperTests extends ESSingleNodeTestCase {
XContentType.JSON)); XContentType.JSON));
assertThat(doc.docs().size(), equalTo(3)); assertThat(doc.docs().size(), equalTo(3));
assertThat(doc.docs().get(0).get("nested1.field1"), equalTo("4")); assertThat(doc.docs().get(0).get("nested1.field1"), equalTo("1"));
assertThat(doc.docs().get(0).get("field"), nullValue()); assertThat(doc.docs().get(0).get("field"), nullValue());
assertThat(doc.docs().get(1).get("nested1.field1"), equalTo("1")); assertThat(doc.docs().get(1).get("nested1.field1"), equalTo("4"));
assertThat(doc.docs().get(1).get("field"), nullValue()); assertThat(doc.docs().get(1).get("field"), nullValue());
assertThat(doc.docs().get(2).get("field"), equalTo("value")); assertThat(doc.docs().get(2).get("field"), equalTo("value"));
} }
@ -634,4 +637,63 @@ public class NestedObjectMapperTests extends ESSingleNodeTestCase {
); );
} }
@Override
protected boolean forbidPrivateIndexSettings() {
/**
* This is needed to force the index version with {@link IndexMetaData.SETTING_INDEX_VERSION_CREATED}.
*/
return false;
}
public void testReorderParentBWC() throws IOException {
String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")
.startObject("nested1").field("type", "nested").endObject()
.endObject().endObject().endObject());
Version bwcVersion = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0,
Version.V_6_5_0);
for (Version version : new Version[] {Version.V_6_5_0, bwcVersion}) {
DocumentMapper docMapper = createIndex("test-" + version,
Settings.builder().put(IndexMetaData.SETTING_INDEX_VERSION_CREATED.getKey(), version).build())
.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
assertThat(docMapper.hasNestedObjects(), equalTo(true));
ObjectMapper nested1Mapper = docMapper.objectMappers().get("nested1");
assertThat(nested1Mapper.nested().isNested(), equalTo(true));
ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", "value")
.startArray("nested1")
.startObject()
.field("field1", "1")
.field("field2", "2")
.endObject()
.startObject()
.field("field1", "3")
.field("field2", "4")
.endObject()
.endArray()
.endObject()),
XContentType.JSON));
assertThat(doc.docs().size(), equalTo(3));
if (version.onOrAfter(Version.V_6_5_0)) {
assertThat(doc.docs().get(0).get(TypeFieldMapper.NAME), equalTo(nested1Mapper.nestedTypePathAsString()));
assertThat(doc.docs().get(0).get("nested1.field1"), equalTo("1"));
assertThat(doc.docs().get(0).get("nested1.field2"), equalTo("2"));
assertThat(doc.docs().get(1).get("nested1.field1"), equalTo("3"));
assertThat(doc.docs().get(1).get("nested1.field2"), equalTo("4"));
assertThat(doc.docs().get(2).get("field"), equalTo("value"));
} else {
assertThat(doc.docs().get(0).get(TypeFieldMapper.NAME), equalTo(nested1Mapper.nestedTypePathAsString()));
assertThat(doc.docs().get(0).get("nested1.field1"), equalTo("3"));
assertThat(doc.docs().get(0).get("nested1.field2"), equalTo("4"));
assertThat(doc.docs().get(1).get("nested1.field1"), equalTo("1"));
assertThat(doc.docs().get(1).get("nested1.field2"), equalTo("2"));
assertThat(doc.docs().get(2).get("field"), equalTo("value"));
}
}
}
} }

View File

@ -21,6 +21,7 @@ package org.elasticsearch.index.mapper;
import com.carrotsearch.randomizedtesting.generators.RandomPicks; import com.carrotsearch.randomizedtesting.generators.RandomPicks;
import org.apache.lucene.document.Document; import org.apache.lucene.document.Document;
import org.apache.lucene.document.DoublePoint;
import org.apache.lucene.document.FloatPoint; import org.apache.lucene.document.FloatPoint;
import org.apache.lucene.document.HalfFloatPoint; import org.apache.lucene.document.HalfFloatPoint;
import org.apache.lucene.document.IntPoint; import org.apache.lucene.document.IntPoint;
@ -53,6 +54,7 @@ import java.util.List;
import java.util.function.Supplier; import java.util.function.Supplier;
import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
public class NumberFieldTypeTests extends FieldTypeTestCase { public class NumberFieldTypeTests extends FieldTypeTestCase {
@ -530,4 +532,49 @@ public class NumberFieldTypeTests extends FieldTypeTestCase {
assertEquals(Double.valueOf(1.2), assertEquals(Double.valueOf(1.2),
new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.DOUBLE).valueForDisplay(1.2)); new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.DOUBLE).valueForDisplay(1.2));
} }
public void testParsePoint() {
{
byte[] bytes = new byte[Integer.BYTES];
byte value = randomByte();
IntPoint.encodeDimension(value, bytes, 0);
assertThat(NumberType.BYTE.parsePoint(bytes), equalTo(value));
}
{
byte[] bytes = new byte[Integer.BYTES];
short value = randomShort();
IntPoint.encodeDimension(value, bytes, 0);
assertThat(NumberType.SHORT.parsePoint(bytes), equalTo(value));
}
{
byte[] bytes = new byte[Integer.BYTES];
int value = randomInt();
IntPoint.encodeDimension(value, bytes, 0);
assertThat(NumberType.INTEGER.parsePoint(bytes), equalTo(value));
}
{
byte[] bytes = new byte[Long.BYTES];
long value = randomLong();
LongPoint.encodeDimension(value, bytes, 0);
assertThat(NumberType.LONG.parsePoint(bytes), equalTo(value));
}
{
byte[] bytes = new byte[Float.BYTES];
float value = randomFloat();
FloatPoint.encodeDimension(value, bytes, 0);
assertThat(NumberType.FLOAT.parsePoint(bytes), equalTo(value));
}
{
byte[] bytes = new byte[Double.BYTES];
double value = randomDouble();
DoublePoint.encodeDimension(value, bytes, 0);
assertThat(NumberType.DOUBLE.parsePoint(bytes), equalTo(value));
}
{
byte[] bytes = new byte[Float.BYTES];
float value = 3f;
HalfFloatPoint.encodeDimension(value, bytes, 0);
assertThat(NumberType.HALF_FLOAT.parsePoint(bytes), equalTo(value));
}
}
} }

View File

@ -33,6 +33,7 @@ import org.elasticsearch.search.aggregations.bucket.histogram.Histogram;
import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket;
import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; import org.elasticsearch.test.hamcrest.ElasticsearchAssertions;
import org.elasticsearch.test.junit.annotations.TestLogging;
import org.joda.time.DateTimeZone; import org.joda.time.DateTimeZone;
import java.time.ZoneOffset; import java.time.ZoneOffset;
@ -49,6 +50,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSear
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThan;
@TestLogging(value = "org.elasticsearch.indices.IndicesRequestCache:TRACE")
public class IndicesRequestCacheIT extends ESIntegTestCase { public class IndicesRequestCacheIT extends ESIntegTestCase {
// One of the primary purposes of the query cache is to cache aggs results // One of the primary purposes of the query cache is to cache aggs results
@ -417,8 +419,8 @@ public class IndicesRequestCacheIT extends ESIntegTestCase {
.getRequestCache(); .getRequestCache();
// Check the hit count and miss count together so if they are not // Check the hit count and miss count together so if they are not
// correct we can see both values // correct we can see both values
assertEquals(Arrays.asList(expectedHits, expectedMisses), assertEquals(Arrays.asList(expectedHits, expectedMisses, 0L),
Arrays.asList(requestCacheStats.getHitCount(), requestCacheStats.getMissCount())); Arrays.asList(requestCacheStats.getHitCount(), requestCacheStats.getMissCount(), requestCacheStats.getEvictions()));
} }
} }

View File

@ -31,7 +31,6 @@ import org.apache.lucene.search.TopDocs;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.util.Accountable; import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.elasticsearch.core.internal.io.IOUtils;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
@ -39,6 +38,7 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.core.internal.io.IOUtils;
import org.elasticsearch.index.cache.request.ShardRequestCache; import org.elasticsearch.index.cache.request.ShardRequestCache;
import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.index.query.TermQueryBuilder;
import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardId;
@ -68,7 +68,7 @@ public class IndicesRequestCacheTests extends ESTestCase {
// initial cache // initial cache
TestEntity entity = new TestEntity(requestCacheStats, indexShard); TestEntity entity = new TestEntity(requestCacheStats, indexShard);
Loader loader = new Loader(reader, 0); Loader loader = new Loader(reader, 0);
BytesReference value = cache.getOrCompute(entity, loader, reader, termBytes); BytesReference value = cache.getOrCompute(entity, loader, reader, termBytes, () -> termQuery.toString());
assertEquals("foo", value.streamInput().readString()); assertEquals("foo", value.streamInput().readString());
assertEquals(0, requestCacheStats.stats().getHitCount()); assertEquals(0, requestCacheStats.stats().getHitCount());
assertEquals(1, requestCacheStats.stats().getMissCount()); assertEquals(1, requestCacheStats.stats().getMissCount());
@ -79,7 +79,7 @@ public class IndicesRequestCacheTests extends ESTestCase {
// cache hit // cache hit
entity = new TestEntity(requestCacheStats, indexShard); entity = new TestEntity(requestCacheStats, indexShard);
loader = new Loader(reader, 0); loader = new Loader(reader, 0);
value = cache.getOrCompute(entity, loader, reader, termBytes); value = cache.getOrCompute(entity, loader, reader, termBytes, () -> termQuery.toString());
assertEquals("foo", value.streamInput().readString()); assertEquals("foo", value.streamInput().readString());
assertEquals(1, requestCacheStats.stats().getHitCount()); assertEquals(1, requestCacheStats.stats().getHitCount());
assertEquals(1, requestCacheStats.stats().getMissCount()); assertEquals(1, requestCacheStats.stats().getMissCount());
@ -126,7 +126,7 @@ public class IndicesRequestCacheTests extends ESTestCase {
// initial cache // initial cache
TestEntity entity = new TestEntity(requestCacheStats, indexShard); TestEntity entity = new TestEntity(requestCacheStats, indexShard);
Loader loader = new Loader(reader, 0); Loader loader = new Loader(reader, 0);
BytesReference value = cache.getOrCompute(entity, loader, reader, termBytes); BytesReference value = cache.getOrCompute(entity, loader, reader, termBytes, () -> termQuery.toString());
assertEquals("foo", value.streamInput().readString()); assertEquals("foo", value.streamInput().readString());
assertEquals(0, requestCacheStats.stats().getHitCount()); assertEquals(0, requestCacheStats.stats().getHitCount());
assertEquals(1, requestCacheStats.stats().getMissCount()); assertEquals(1, requestCacheStats.stats().getMissCount());
@ -140,7 +140,7 @@ public class IndicesRequestCacheTests extends ESTestCase {
// cache the second // cache the second
TestEntity secondEntity = new TestEntity(requestCacheStats, indexShard); TestEntity secondEntity = new TestEntity(requestCacheStats, indexShard);
loader = new Loader(secondReader, 0); loader = new Loader(secondReader, 0);
value = cache.getOrCompute(entity, loader, secondReader, termBytes); value = cache.getOrCompute(entity, loader, secondReader, termBytes, () -> termQuery.toString());
assertEquals("bar", value.streamInput().readString()); assertEquals("bar", value.streamInput().readString());
assertEquals(0, requestCacheStats.stats().getHitCount()); assertEquals(0, requestCacheStats.stats().getHitCount());
assertEquals(2, requestCacheStats.stats().getMissCount()); assertEquals(2, requestCacheStats.stats().getMissCount());
@ -152,7 +152,7 @@ public class IndicesRequestCacheTests extends ESTestCase {
secondEntity = new TestEntity(requestCacheStats, indexShard); secondEntity = new TestEntity(requestCacheStats, indexShard);
loader = new Loader(secondReader, 0); loader = new Loader(secondReader, 0);
value = cache.getOrCompute(secondEntity, loader, secondReader, termBytes); value = cache.getOrCompute(secondEntity, loader, secondReader, termBytes, () -> termQuery.toString());
assertEquals("bar", value.streamInput().readString()); assertEquals("bar", value.streamInput().readString());
assertEquals(1, requestCacheStats.stats().getHitCount()); assertEquals(1, requestCacheStats.stats().getHitCount());
assertEquals(2, requestCacheStats.stats().getMissCount()); assertEquals(2, requestCacheStats.stats().getMissCount());
@ -162,7 +162,7 @@ public class IndicesRequestCacheTests extends ESTestCase {
entity = new TestEntity(requestCacheStats, indexShard); entity = new TestEntity(requestCacheStats, indexShard);
loader = new Loader(reader, 0); loader = new Loader(reader, 0);
value = cache.getOrCompute(entity, loader, reader, termBytes); value = cache.getOrCompute(entity, loader, reader, termBytes, () -> termQuery.toString());
assertEquals("foo", value.streamInput().readString()); assertEquals("foo", value.streamInput().readString());
assertEquals(2, requestCacheStats.stats().getHitCount()); assertEquals(2, requestCacheStats.stats().getHitCount());
assertEquals(2, requestCacheStats.stats().getMissCount()); assertEquals(2, requestCacheStats.stats().getMissCount());
@ -222,9 +222,9 @@ public class IndicesRequestCacheTests extends ESTestCase {
TestEntity secondEntity = new TestEntity(requestCacheStats, indexShard); TestEntity secondEntity = new TestEntity(requestCacheStats, indexShard);
Loader secondLoader = new Loader(secondReader, 0); Loader secondLoader = new Loader(secondReader, 0);
BytesReference value1 = cache.getOrCompute(entity, loader, reader, termBytes); BytesReference value1 = cache.getOrCompute(entity, loader, reader, termBytes, () -> termQuery.toString());
assertEquals("foo", value1.streamInput().readString()); assertEquals("foo", value1.streamInput().readString());
BytesReference value2 = cache.getOrCompute(secondEntity, secondLoader, secondReader, termBytes); BytesReference value2 = cache.getOrCompute(secondEntity, secondLoader, secondReader, termBytes, () -> termQuery.toString());
assertEquals("bar", value2.streamInput().readString()); assertEquals("bar", value2.streamInput().readString());
size = requestCacheStats.stats().getMemorySize(); size = requestCacheStats.stats().getMemorySize();
IOUtils.close(reader, secondReader, writer, dir, cache); IOUtils.close(reader, secondReader, writer, dir, cache);
@ -257,12 +257,12 @@ public class IndicesRequestCacheTests extends ESTestCase {
TestEntity thirddEntity = new TestEntity(requestCacheStats, indexShard); TestEntity thirddEntity = new TestEntity(requestCacheStats, indexShard);
Loader thirdLoader = new Loader(thirdReader, 0); Loader thirdLoader = new Loader(thirdReader, 0);
BytesReference value1 = cache.getOrCompute(entity, loader, reader, termBytes); BytesReference value1 = cache.getOrCompute(entity, loader, reader, termBytes, () -> termQuery.toString());
assertEquals("foo", value1.streamInput().readString()); assertEquals("foo", value1.streamInput().readString());
BytesReference value2 = cache.getOrCompute(secondEntity, secondLoader, secondReader, termBytes); BytesReference value2 = cache.getOrCompute(secondEntity, secondLoader, secondReader, termBytes, () -> termQuery.toString());
assertEquals("bar", value2.streamInput().readString()); assertEquals("bar", value2.streamInput().readString());
logger.info("Memory size: {}", requestCacheStats.stats().getMemorySize()); logger.info("Memory size: {}", requestCacheStats.stats().getMemorySize());
BytesReference value3 = cache.getOrCompute(thirddEntity, thirdLoader, thirdReader, termBytes); BytesReference value3 = cache.getOrCompute(thirddEntity, thirdLoader, thirdReader, termBytes, () -> termQuery.toString());
assertEquals("baz", value3.streamInput().readString()); assertEquals("baz", value3.streamInput().readString());
assertEquals(2, cache.count()); assertEquals(2, cache.count());
assertEquals(1, requestCacheStats.stats().getEvictions()); assertEquals(1, requestCacheStats.stats().getEvictions());
@ -298,12 +298,12 @@ public class IndicesRequestCacheTests extends ESTestCase {
TestEntity thirddEntity = new TestEntity(requestCacheStats, differentIdentity); TestEntity thirddEntity = new TestEntity(requestCacheStats, differentIdentity);
Loader thirdLoader = new Loader(thirdReader, 0); Loader thirdLoader = new Loader(thirdReader, 0);
BytesReference value1 = cache.getOrCompute(entity, loader, reader, termBytes); BytesReference value1 = cache.getOrCompute(entity, loader, reader, termBytes, () -> termQuery.toString());
assertEquals("foo", value1.streamInput().readString()); assertEquals("foo", value1.streamInput().readString());
BytesReference value2 = cache.getOrCompute(secondEntity, secondLoader, secondReader, termBytes); BytesReference value2 = cache.getOrCompute(secondEntity, secondLoader, secondReader, termBytes, () -> termQuery.toString());
assertEquals("bar", value2.streamInput().readString()); assertEquals("bar", value2.streamInput().readString());
logger.info("Memory size: {}", requestCacheStats.stats().getMemorySize()); logger.info("Memory size: {}", requestCacheStats.stats().getMemorySize());
BytesReference value3 = cache.getOrCompute(thirddEntity, thirdLoader, thirdReader, termBytes); BytesReference value3 = cache.getOrCompute(thirddEntity, thirdLoader, thirdReader, termBytes, () -> termQuery.toString());
assertEquals("baz", value3.streamInput().readString()); assertEquals("baz", value3.streamInput().readString());
assertEquals(3, cache.count()); assertEquals(3, cache.count());
final long hitCount = requestCacheStats.stats().getHitCount(); final long hitCount = requestCacheStats.stats().getHitCount();
@ -312,7 +312,7 @@ public class IndicesRequestCacheTests extends ESTestCase {
cache.cleanCache(); cache.cleanCache();
assertEquals(1, cache.count()); assertEquals(1, cache.count());
// third has not been validated since it's a different identity // third has not been validated since it's a different identity
value3 = cache.getOrCompute(thirddEntity, thirdLoader, thirdReader, termBytes); value3 = cache.getOrCompute(thirddEntity, thirdLoader, thirdReader, termBytes, () -> termQuery.toString());
assertEquals(hitCount + 1, requestCacheStats.stats().getHitCount()); assertEquals(hitCount + 1, requestCacheStats.stats().getHitCount());
assertEquals("baz", value3.streamInput().readString()); assertEquals("baz", value3.streamInput().readString());
@ -371,7 +371,7 @@ public class IndicesRequestCacheTests extends ESTestCase {
// initial cache // initial cache
TestEntity entity = new TestEntity(requestCacheStats, indexShard); TestEntity entity = new TestEntity(requestCacheStats, indexShard);
Loader loader = new Loader(reader, 0); Loader loader = new Loader(reader, 0);
BytesReference value = cache.getOrCompute(entity, loader, reader, termBytes); BytesReference value = cache.getOrCompute(entity, loader, reader, termBytes, () -> termQuery.toString());
assertEquals("foo", value.streamInput().readString()); assertEquals("foo", value.streamInput().readString());
assertEquals(0, requestCacheStats.stats().getHitCount()); assertEquals(0, requestCacheStats.stats().getHitCount());
assertEquals(1, requestCacheStats.stats().getMissCount()); assertEquals(1, requestCacheStats.stats().getMissCount());
@ -382,7 +382,7 @@ public class IndicesRequestCacheTests extends ESTestCase {
// cache hit // cache hit
entity = new TestEntity(requestCacheStats, indexShard); entity = new TestEntity(requestCacheStats, indexShard);
loader = new Loader(reader, 0); loader = new Loader(reader, 0);
value = cache.getOrCompute(entity, loader, reader, termBytes); value = cache.getOrCompute(entity, loader, reader, termBytes, () -> termQuery.toString());
assertEquals("foo", value.streamInput().readString()); assertEquals("foo", value.streamInput().readString());
assertEquals(1, requestCacheStats.stats().getHitCount()); assertEquals(1, requestCacheStats.stats().getHitCount());
assertEquals(1, requestCacheStats.stats().getMissCount()); assertEquals(1, requestCacheStats.stats().getMissCount());
@ -396,7 +396,7 @@ public class IndicesRequestCacheTests extends ESTestCase {
entity = new TestEntity(requestCacheStats, indexShard); entity = new TestEntity(requestCacheStats, indexShard);
loader = new Loader(reader, 0); loader = new Loader(reader, 0);
cache.invalidate(entity, reader, termBytes); cache.invalidate(entity, reader, termBytes);
value = cache.getOrCompute(entity, loader, reader, termBytes); value = cache.getOrCompute(entity, loader, reader, termBytes, () -> termQuery.toString());
assertEquals("foo", value.streamInput().readString()); assertEquals("foo", value.streamInput().readString());
assertEquals(1, requestCacheStats.stats().getHitCount()); assertEquals(1, requestCacheStats.stats().getHitCount());
assertEquals(2, requestCacheStats.stats().getMissCount()); assertEquals(2, requestCacheStats.stats().getMissCount());

View File

@ -19,27 +19,49 @@
package org.elasticsearch.search.aggregations.metrics; package org.elasticsearch.search.aggregations.metrics;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.DoublePoint;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FloatPoint;
import org.apache.lucene.document.IntPoint; import org.apache.lucene.document.IntPoint;
import org.apache.lucene.document.LongPoint;
import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.document.SortedNumericDocValuesField;
import org.apache.lucene.document.StringField;
import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.NoMergePolicy;
import org.apache.lucene.index.PointValues;
import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.DocValuesFieldExistsQuery; import org.apache.lucene.search.DocValuesFieldExistsQuery;
import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.FutureArrays;
import org.elasticsearch.common.CheckedConsumer; import org.elasticsearch.common.CheckedConsumer;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.NumberFieldMapper;
import org.elasticsearch.search.aggregations.AggregatorTestCase; import org.elasticsearch.search.aggregations.AggregatorTestCase;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.function.Consumer; import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.Supplier;
import static java.util.Collections.singleton; import static java.util.Collections.singleton;
import static org.hamcrest.Matchers.equalTo;
public class MaxAggregatorTests extends AggregatorTestCase { public class MaxAggregatorTests extends AggregatorTestCase {
public void testNoDocs() throws IOException { public void testNoDocs() throws IOException {
@ -77,7 +99,6 @@ public class MaxAggregatorTests extends AggregatorTestCase {
}); });
} }
public void testQueryFiltering() throws IOException { public void testQueryFiltering() throws IOException {
testCase(IntPoint.newRangeQuery("number", 0, 5), iw -> { testCase(IntPoint.newRangeQuery("number", 0, 5), iw -> {
iw.addDocument(Arrays.asList(new IntPoint("number", 7), new SortedNumericDocValuesField("number", 7))); iw.addDocument(Arrays.asList(new IntPoint("number", 7), new SortedNumericDocValuesField("number", 7)));
@ -96,8 +117,9 @@ public class MaxAggregatorTests extends AggregatorTestCase {
}); });
} }
private void testCase(Query query, CheckedConsumer<RandomIndexWriter, IOException> buildIndex, Consumer<InternalMax> verify) private void testCase(Query query,
throws IOException { CheckedConsumer<RandomIndexWriter, IOException> buildIndex,
Consumer<InternalMax> verify) throws IOException {
Directory directory = newDirectory(); Directory directory = newDirectory();
RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory);
buildIndex.accept(indexWriter); buildIndex.accept(indexWriter);
@ -107,10 +129,10 @@ public class MaxAggregatorTests extends AggregatorTestCase {
IndexSearcher indexSearcher = newSearcher(indexReader, true, true); IndexSearcher indexSearcher = newSearcher(indexReader, true, true);
MaxAggregationBuilder aggregationBuilder = new MaxAggregationBuilder("_name").field("number"); MaxAggregationBuilder aggregationBuilder = new MaxAggregationBuilder("_name").field("number");
MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.INTEGER);
fieldType.setName("number"); fieldType.setName("number");
MaxAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType); MaxAggregator aggregator = createAggregator(query, aggregationBuilder, indexSearcher, createIndexSettings(), fieldType);
aggregator.preCollection(); aggregator.preCollection();
indexSearcher.search(query, aggregator); indexSearcher.search(query, aggregator);
aggregator.postCollection(); aggregator.postCollection();
@ -119,4 +141,110 @@ public class MaxAggregatorTests extends AggregatorTestCase {
indexReader.close(); indexReader.close();
directory.close(); directory.close();
} }
public void testMaxShortcutRandom() throws Exception {
testMaxShortcutCase(
() -> randomLongBetween(Integer.MIN_VALUE, Integer.MAX_VALUE),
(n) -> new LongPoint("number", n.longValue()),
(v) -> LongPoint.decodeDimension(v, 0));
testMaxShortcutCase(
() -> randomInt(),
(n) -> new IntPoint("number", n.intValue()),
(v) -> IntPoint.decodeDimension(v, 0));
testMaxShortcutCase(
() -> randomFloat(),
(n) -> new FloatPoint("number", n.floatValue()),
(v) -> FloatPoint.decodeDimension(v, 0));
testMaxShortcutCase(
() -> randomDouble(),
(n) -> new DoublePoint("number", n.doubleValue()),
(v) -> DoublePoint.decodeDimension(v, 0));
}
private void testMaxShortcutCase(Supplier<Number> randomNumber,
Function<Number, Field> pointFieldFunc,
Function<byte[], Number> pointConvertFunc) throws IOException {
Directory directory = newDirectory();
IndexWriterConfig config = newIndexWriterConfig().setMergePolicy(NoMergePolicy.INSTANCE);
IndexWriter indexWriter = new IndexWriter(directory, config);
List<Document> documents = new ArrayList<>();
List<Tuple<Integer, Number>> values = new ArrayList<>();
int numValues = atLeast(50);
int docID = 0;
for (int i = 0; i < numValues; i++) {
int numDup = randomIntBetween(1, 3);
for (int j = 0; j < numDup; j++) {
Document document = new Document();
Number nextValue = randomNumber.get();
values.add(new Tuple<>(docID, nextValue));
document.add(new StringField("id", Integer.toString(docID), Field.Store.NO));
document.add(pointFieldFunc.apply(nextValue));
documents.add(document);
docID ++;
}
}
// insert some documents without a value for the metric field.
for (int i = 0; i < 3; i++) {
Document document = new Document();
documents.add(document);
}
indexWriter.addDocuments(documents);
Collections.sort(values, Comparator.comparingDouble(t -> t.v2().doubleValue()));
try (IndexReader reader = DirectoryReader.open(indexWriter)) {
LeafReaderContext ctx = reader.leaves().get(0);
Number res = MaxAggregator.findLeafMaxValue(ctx.reader(), "number" , pointConvertFunc);
assertThat(res, equalTo(values.get(values.size()-1).v2()));
}
for (int i = values.size()-1; i > 0; i--) {
indexWriter.deleteDocuments(new Term("id", values.get(i).v1().toString()));
try (IndexReader reader = DirectoryReader.open(indexWriter)) {
LeafReaderContext ctx = reader.leaves().get(0);
Number res = MaxAggregator.findLeafMaxValue(ctx.reader(), "number" , pointConvertFunc);
if (res != null) {
assertThat(res, equalTo(values.get(i - 1).v2()));
} else {
assertAllDeleted(ctx.reader().getLiveDocs(), ctx.reader().getPointValues("number"));
}
}
}
indexWriter.deleteDocuments(new Term("id", values.get(0).v1().toString()));
try (IndexReader reader = DirectoryReader.open(indexWriter)) {
LeafReaderContext ctx = reader.leaves().get(0);
Number res = MaxAggregator.findLeafMaxValue(ctx.reader(), "number" , pointConvertFunc);
assertThat(res, equalTo(null));
}
indexWriter.close();
directory.close();
}
// checks that documents inside the max leaves are all deleted
private void assertAllDeleted(Bits liveDocs, PointValues values) throws IOException {
final byte[] maxValue = values.getMaxPackedValue();
int numBytes = values.getBytesPerDimension();
final boolean[] seen = new boolean[1];
values.intersect(new PointValues.IntersectVisitor() {
@Override
public void visit(int docID) {
throw new AssertionError();
}
@Override
public void visit(int docID, byte[] packedValue) {
assertFalse(liveDocs.get(docID));
seen[0] = true;
}
@Override
public PointValues.Relation compare(byte[] minPackedValue, byte[] maxPackedValue) {
if (FutureArrays.equals(maxPackedValue, 0, numBytes, maxValue, 0, numBytes)) {
return PointValues.Relation.CELL_CROSSES_QUERY;
}
return PointValues.Relation.CELL_OUTSIDE_QUERY;
}
});
assertTrue(seen[0]);
}
} }

View File

@ -40,6 +40,7 @@ import java.util.Map;
import static java.util.Collections.emptyMap; import static java.util.Collections.emptyMap;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery;
import static org.elasticsearch.search.aggregations.AggregationBuilders.count;
import static org.elasticsearch.search.aggregations.AggregationBuilders.filter; import static org.elasticsearch.search.aggregations.AggregationBuilders.filter;
import static org.elasticsearch.search.aggregations.AggregationBuilders.global; import static org.elasticsearch.search.aggregations.AggregationBuilders.global;
import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram;
@ -392,4 +393,22 @@ public class MaxIT extends AbstractNumericTestCase {
assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache()
.getMissCount(), equalTo(1L)); .getMissCount(), equalTo(1L));
} }
public void testEarlyTermination() throws Exception {
SearchResponse searchResponse = client().prepareSearch("idx")
.setTrackTotalHits(false)
.setQuery(matchAllQuery())
.addAggregation(max("max").field("values"))
.addAggregation(count("count").field("values"))
.execute().actionGet();
Max max = searchResponse.getAggregations().get("max");
assertThat(max, notNullValue());
assertThat(max.getName(), equalTo("max"));
assertThat(max.getValue(), equalTo(12.0));
ValueCount count = searchResponse.getAggregations().get("count");
assertThat(count.getName(), equalTo("count"));
assertThat(count.getValue(), equalTo(20L));
}
} }

View File

@ -16,20 +16,59 @@
* specific language governing permissions and limitations * specific language governing permissions and limitations
* under the License. * under the License.
*/ */
package org.elasticsearch.search.aggregations.metrics; package org.elasticsearch.search.aggregations.metrics;
import org.apache.lucene.document.Document; import org.apache.lucene.document.Document;
import org.apache.lucene.document.DoublePoint;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FloatPoint;
import org.apache.lucene.document.IntPoint;
import org.apache.lucene.document.LongPoint;
import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.document.SortedNumericDocValuesField;
import org.apache.lucene.document.StringField;
import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.NoMergePolicy;
import org.apache.lucene.index.RandomIndexWriter; import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.DocValuesFieldExistsQuery;
import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.NumberFieldMapper; import org.elasticsearch.index.mapper.NumberFieldMapper;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorTestCase; import org.elasticsearch.search.aggregations.AggregatorTestCase;
import org.elasticsearch.search.aggregations.support.FieldContext;
import org.elasticsearch.search.aggregations.support.ValuesSource;
import org.elasticsearch.search.aggregations.support.ValuesSourceConfig;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.function.DoubleConsumer;
import java.util.function.Function;
import java.util.function.Supplier;
import static org.hamcrest.Matchers.equalTo;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class MinAggregatorTests extends AggregatorTestCase { public class MinAggregatorTests extends AggregatorTestCase {
@ -38,21 +77,27 @@ public class MinAggregatorTests extends AggregatorTestCase {
RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory);
Document document = new Document(); Document document = new Document();
document.add(new NumericDocValuesField("number", 9)); document.add(new NumericDocValuesField("number", 9));
document.add(new LongPoint("number", 9));
indexWriter.addDocument(document); indexWriter.addDocument(document);
document = new Document(); document = new Document();
document.add(new NumericDocValuesField("number", 7)); document.add(new NumericDocValuesField("number", 7));
document.add(new LongPoint("number", 7));
indexWriter.addDocument(document); indexWriter.addDocument(document);
document = new Document(); document = new Document();
document.add(new NumericDocValuesField("number", 5)); document.add(new NumericDocValuesField("number", 5));
document.add(new LongPoint("number", 5));
indexWriter.addDocument(document); indexWriter.addDocument(document);
document = new Document(); document = new Document();
document.add(new NumericDocValuesField("number", 3)); document.add(new NumericDocValuesField("number", 3));
document.add(new LongPoint("number", 3));
indexWriter.addDocument(document); indexWriter.addDocument(document);
document = new Document(); document = new Document();
document.add(new NumericDocValuesField("number", 1)); document.add(new NumericDocValuesField("number", 1));
document.add(new LongPoint("number", 1));
indexWriter.addDocument(document); indexWriter.addDocument(document);
document = new Document(); document = new Document();
document.add(new NumericDocValuesField("number", -1)); document.add(new NumericDocValuesField("number", -1));
document.add(new LongPoint("number", -1));
indexWriter.addDocument(document); indexWriter.addDocument(document);
indexWriter.close(); indexWriter.close();
@ -63,6 +108,8 @@ public class MinAggregatorTests extends AggregatorTestCase {
MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG); MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG);
fieldType.setName("number"); fieldType.setName("number");
testMinCase(indexSearcher, aggregationBuilder, fieldType, min -> assertEquals(-1.0d, min, 0));
MinAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType); MinAggregator aggregator = createAggregator(aggregationBuilder, indexSearcher, fieldType);
aggregator.preCollection(); aggregator.preCollection();
indexSearcher.search(new MatchAllDocsQuery(), aggregator); indexSearcher.search(new MatchAllDocsQuery(), aggregator);
@ -80,14 +127,20 @@ public class MinAggregatorTests extends AggregatorTestCase {
Document document = new Document(); Document document = new Document();
document.add(new SortedNumericDocValuesField("number", 9)); document.add(new SortedNumericDocValuesField("number", 9));
document.add(new SortedNumericDocValuesField("number", 7)); document.add(new SortedNumericDocValuesField("number", 7));
document.add(new LongPoint("number", 9));
document.add(new LongPoint("number", 7));
indexWriter.addDocument(document); indexWriter.addDocument(document);
document = new Document(); document = new Document();
document.add(new SortedNumericDocValuesField("number", 5)); document.add(new SortedNumericDocValuesField("number", 5));
document.add(new SortedNumericDocValuesField("number", 3)); document.add(new SortedNumericDocValuesField("number", 3));
document.add(new LongPoint("number", 5));
document.add(new LongPoint("number", 3));
indexWriter.addDocument(document); indexWriter.addDocument(document);
document = new Document(); document = new Document();
document.add(new SortedNumericDocValuesField("number", 1)); document.add(new SortedNumericDocValuesField("number", 1));
document.add(new SortedNumericDocValuesField("number", -1)); document.add(new SortedNumericDocValuesField("number", -1));
document.add(new LongPoint("number", 1));
document.add(new LongPoint("number", -1));
indexWriter.addDocument(document); indexWriter.addDocument(document);
indexWriter.close(); indexWriter.close();
@ -164,4 +217,207 @@ public class MinAggregatorTests extends AggregatorTestCase {
directory.close(); directory.close();
} }
public void testShortcutIsApplicable() {
for (NumberFieldMapper.NumberType type : NumberFieldMapper.NumberType.values()) {
assertNotNull(
MinAggregator.getPointReaderOrNull(
mockSearchContext(new MatchAllDocsQuery()),
null,
mockNumericValuesSourceConfig("number", type, true)
)
);
assertNotNull(
MinAggregator.getPointReaderOrNull(
mockSearchContext(null),
null,
mockNumericValuesSourceConfig("number", type, true)
)
);
assertNull(
MinAggregator.getPointReaderOrNull(
mockSearchContext(null),
mockAggregator(),
mockNumericValuesSourceConfig("number", type, true)
)
);
assertNull(
MinAggregator.getPointReaderOrNull(
mockSearchContext(new TermQuery(new Term("foo", "bar"))),
null,
mockNumericValuesSourceConfig("number", type, true)
)
);
assertNull(
MinAggregator.getPointReaderOrNull(
mockSearchContext(null),
mockAggregator(),
mockNumericValuesSourceConfig("number", type, true)
)
);
assertNull(
MinAggregator.getPointReaderOrNull(
mockSearchContext(null),
null,
mockNumericValuesSourceConfig("number", type, false)
)
);
}
assertNotNull(
MinAggregator.getPointReaderOrNull(
mockSearchContext(new MatchAllDocsQuery()),
null,
mockDateValuesSourceConfig("number", true)
)
);
assertNull(
MinAggregator.getPointReaderOrNull(
mockSearchContext(new MatchAllDocsQuery()),
mockAggregator(),
mockDateValuesSourceConfig("number", true)
)
);
assertNull(
MinAggregator.getPointReaderOrNull(
mockSearchContext(new TermQuery(new Term("foo", "bar"))),
null,
mockDateValuesSourceConfig("number", true)
)
);
assertNull(
MinAggregator.getPointReaderOrNull(
mockSearchContext(null),
mockAggregator(),
mockDateValuesSourceConfig("number", true)
)
);
assertNull(
MinAggregator.getPointReaderOrNull(
mockSearchContext(null),
null,
mockDateValuesSourceConfig("number", false)
)
);
}
public void testMinShortcutRandom() throws Exception {
testMinShortcutCase(
() -> randomLongBetween(Integer.MIN_VALUE, Integer.MAX_VALUE),
(n) -> new LongPoint("number", n.longValue()),
(v) -> LongPoint.decodeDimension(v, 0));
testMinShortcutCase(
() -> randomInt(),
(n) -> new IntPoint("number", n.intValue()),
(v) -> IntPoint.decodeDimension(v, 0));
testMinShortcutCase(
() -> randomFloat(),
(n) -> new FloatPoint("number", n.floatValue()),
(v) -> FloatPoint.decodeDimension(v, 0));
testMinShortcutCase(
() -> randomDouble(),
(n) -> new DoublePoint("number", n.doubleValue()),
(v) -> DoublePoint.decodeDimension(v, 0));
}
private void testMinCase(IndexSearcher searcher,
AggregationBuilder aggregationBuilder,
MappedFieldType ft,
DoubleConsumer testResult) throws IOException {
Collection<Query> queries = Arrays.asList(new MatchAllDocsQuery(), new DocValuesFieldExistsQuery(ft.name()));
for (Query query : queries) {
MinAggregator aggregator = createAggregator(query, aggregationBuilder, searcher, createIndexSettings(), ft);
aggregator.preCollection();
searcher.search(new MatchAllDocsQuery(), aggregator);
aggregator.postCollection();
InternalMin result = (InternalMin) aggregator.buildAggregation(0L);
testResult.accept(result.getValue());
}
}
private void testMinShortcutCase(Supplier<Number> randomNumber,
Function<Number, Field> pointFieldFunc,
Function<byte[], Number> pointConvertFunc) throws IOException {
Directory directory = newDirectory();
IndexWriterConfig config = newIndexWriterConfig().setMergePolicy(NoMergePolicy.INSTANCE);
IndexWriter indexWriter = new IndexWriter(directory, config);
List<Document> documents = new ArrayList<>();
List<Tuple<Integer, Number>> values = new ArrayList<>();
int numValues = atLeast(50);
int docID = 0;
for (int i = 0; i < numValues; i++) {
int numDup = randomIntBetween(1, 3);
for (int j = 0; j < numDup; j++) {
Document document = new Document();
Number nextValue = randomNumber.get();
values.add(new Tuple<>(docID, nextValue));
document.add(new StringField("id", Integer.toString(docID), Field.Store.NO));
document.add(pointFieldFunc.apply(nextValue));
document.add(pointFieldFunc.apply(nextValue));
documents.add(document);
docID ++;
}
}
// insert some documents without a value for the metric field.
for (int i = 0; i < 3; i++) {
Document document = new Document();
documents.add(document);
}
indexWriter.addDocuments(documents);
Collections.sort(values, Comparator.comparingDouble(t -> t.v2().doubleValue()));
try (IndexReader reader = DirectoryReader.open(indexWriter)) {
LeafReaderContext ctx = reader.leaves().get(0);
Number res = MinAggregator.findLeafMinValue(ctx.reader(), "number", pointConvertFunc);
assertThat(res, equalTo(values.get(0).v2()));
}
for (int i = 1; i < values.size(); i++) {
indexWriter.deleteDocuments(new Term("id", values.get(i-1).v1().toString()));
try (IndexReader reader = DirectoryReader.open(indexWriter)) {
LeafReaderContext ctx = reader.leaves().get(0);
Number res = MinAggregator.findLeafMinValue(ctx.reader(), "number", pointConvertFunc);
assertThat(res, equalTo(values.get(i).v2()));
}
}
indexWriter.deleteDocuments(new Term("id", values.get(values.size()-1).v1().toString()));
try (IndexReader reader = DirectoryReader.open(indexWriter)) {
LeafReaderContext ctx = reader.leaves().get(0);
Number res = MinAggregator.findLeafMinValue(ctx.reader(), "number", pointConvertFunc);
assertThat(res, equalTo(null));
}
indexWriter.close();
directory.close();
}
private SearchContext mockSearchContext(Query query) {
SearchContext searchContext = mock(SearchContext.class);
when(searchContext.query()).thenReturn(query);
return searchContext;
}
private Aggregator mockAggregator() {
return mock(Aggregator.class);
}
private ValuesSourceConfig<ValuesSource.Numeric> mockNumericValuesSourceConfig(String fieldName,
NumberFieldMapper.NumberType numType,
boolean indexed) {
ValuesSourceConfig<ValuesSource.Numeric> config = mock(ValuesSourceConfig.class);
MappedFieldType ft = new NumberFieldMapper.NumberFieldType(numType);
ft.setName(fieldName);
ft.setIndexOptions(indexed ? IndexOptions.DOCS : IndexOptions.NONE);
ft.freeze();
when(config.fieldContext()).thenReturn(new FieldContext(fieldName, null, ft));
return config;
}
private ValuesSourceConfig<ValuesSource.Numeric> mockDateValuesSourceConfig(String fieldName, boolean indexed) {
ValuesSourceConfig<ValuesSource.Numeric> config = mock(ValuesSourceConfig.class);
MappedFieldType ft = new DateFieldMapper.Builder(fieldName).fieldType();
ft.setName(fieldName);
ft.setIndexOptions(indexed ? IndexOptions.DOCS : IndexOptions.NONE);
ft.freeze();
when(config.fieldContext()).thenReturn(new FieldContext(fieldName, null, ft));
return config;
}
} }

View File

@ -40,6 +40,7 @@ import java.util.Map;
import static java.util.Collections.emptyMap; import static java.util.Collections.emptyMap;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery;
import static org.elasticsearch.search.aggregations.AggregationBuilders.count;
import static org.elasticsearch.search.aggregations.AggregationBuilders.filter; import static org.elasticsearch.search.aggregations.AggregationBuilders.filter;
import static org.elasticsearch.search.aggregations.AggregationBuilders.global; import static org.elasticsearch.search.aggregations.AggregationBuilders.global;
import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram;
@ -404,4 +405,22 @@ public class MinIT extends AbstractNumericTestCase {
assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache()
.getMissCount(), equalTo(1L)); .getMissCount(), equalTo(1L));
} }
public void testEarlyTermination() throws Exception {
SearchResponse searchResponse = client().prepareSearch("idx")
.setTrackTotalHits(false)
.setQuery(matchAllQuery())
.addAggregation(min("min").field("values"))
.addAggregation(count("count").field("values"))
.execute().actionGet();
Min min = searchResponse.getAggregations().get("min");
assertThat(min, notNullValue());
assertThat(min.getName(), equalTo("min"));
assertThat(min.getValue(), equalTo(2.0));
ValueCount count = searchResponse.getAggregations().get("count");
assertThat(count.getName(), equalTo("count"));
assertThat(count.getValue(), equalTo(20L));
}
} }

View File

@ -54,6 +54,8 @@ public class ScriptedMetricAggregatorTests extends AggregatorTestCase {
private static final Script MAP_SCRIPT = new Script(ScriptType.INLINE, MockScriptEngine.NAME, "mapScript", Collections.emptyMap()); private static final Script MAP_SCRIPT = new Script(ScriptType.INLINE, MockScriptEngine.NAME, "mapScript", Collections.emptyMap());
private static final Script COMBINE_SCRIPT = new Script(ScriptType.INLINE, MockScriptEngine.NAME, "combineScript", private static final Script COMBINE_SCRIPT = new Script(ScriptType.INLINE, MockScriptEngine.NAME, "combineScript",
Collections.emptyMap()); Collections.emptyMap());
private static final Script REDUCE_SCRIPT = new Script(ScriptType.INLINE, MockScriptEngine.NAME, "reduceScript",
Collections.emptyMap());
private static final Script INIT_SCRIPT_SCORE = new Script(ScriptType.INLINE, MockScriptEngine.NAME, "initScriptScore", private static final Script INIT_SCRIPT_SCORE = new Script(ScriptType.INLINE, MockScriptEngine.NAME, "initScriptScore",
Collections.emptyMap()); Collections.emptyMap());
@ -61,6 +63,8 @@ public class ScriptedMetricAggregatorTests extends AggregatorTestCase {
Collections.emptyMap()); Collections.emptyMap());
private static final Script COMBINE_SCRIPT_SCORE = new Script(ScriptType.INLINE, MockScriptEngine.NAME, "combineScriptScore", private static final Script COMBINE_SCRIPT_SCORE = new Script(ScriptType.INLINE, MockScriptEngine.NAME, "combineScriptScore",
Collections.emptyMap()); Collections.emptyMap());
private static final Script COMBINE_SCRIPT_NOOP = new Script(ScriptType.INLINE, MockScriptEngine.NAME, "combineScriptNoop",
Collections.emptyMap());
private static final Script INIT_SCRIPT_PARAMS = new Script(ScriptType.INLINE, MockScriptEngine.NAME, "initScriptParams", private static final Script INIT_SCRIPT_PARAMS = new Script(ScriptType.INLINE, MockScriptEngine.NAME, "initScriptParams",
Collections.singletonMap("initialValue", 24)); Collections.singletonMap("initialValue", 24));
@ -96,6 +100,14 @@ public class ScriptedMetricAggregatorTests extends AggregatorTestCase {
Map<String, Object> state = (Map<String, Object>) params.get("state"); Map<String, Object> state = (Map<String, Object>) params.get("state");
return ((List<Integer>) state.get("collector")).stream().mapToInt(Integer::intValue).sum(); return ((List<Integer>) state.get("collector")).stream().mapToInt(Integer::intValue).sum();
}); });
SCRIPTS.put("combineScriptNoop", params -> {
Map<String, Object> state = (Map<String, Object>) params.get("state");
return state;
});
SCRIPTS.put("reduceScript", params -> {
Map<String, Object> state = (Map<String, Object>) params.get("state");
return state;
});
SCRIPTS.put("initScriptScore", params -> { SCRIPTS.put("initScriptScore", params -> {
Map<String, Object> state = (Map<String, Object>) params.get("state"); Map<String, Object> state = (Map<String, Object>) params.get("state");
@ -160,7 +172,7 @@ public class ScriptedMetricAggregatorTests extends AggregatorTestCase {
} }
try (IndexReader indexReader = DirectoryReader.open(directory)) { try (IndexReader indexReader = DirectoryReader.open(directory)) {
ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME); ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME);
aggregationBuilder.mapScript(MAP_SCRIPT); // map script is mandatory, even if its not used in this case aggregationBuilder.mapScript(MAP_SCRIPT).combineScript(COMBINE_SCRIPT_NOOP).reduceScript(REDUCE_SCRIPT);
ScriptedMetric scriptedMetric = search(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder); ScriptedMetric scriptedMetric = search(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder);
assertEquals(AGG_NAME, scriptedMetric.getName()); assertEquals(AGG_NAME, scriptedMetric.getName());
assertNotNull(scriptedMetric.aggregation()); assertNotNull(scriptedMetric.aggregation());
@ -169,9 +181,6 @@ public class ScriptedMetricAggregatorTests extends AggregatorTestCase {
} }
} }
/**
* without combine script, the "states" map should contain a list of the size of the number of documents matched
*/
public void testScriptedMetricWithoutCombine() throws IOException { public void testScriptedMetricWithoutCombine() throws IOException {
try (Directory directory = newDirectory()) { try (Directory directory = newDirectory()) {
int numDocs = randomInt(100); int numDocs = randomInt(100);
@ -182,15 +191,28 @@ public class ScriptedMetricAggregatorTests extends AggregatorTestCase {
} }
try (IndexReader indexReader = DirectoryReader.open(directory)) { try (IndexReader indexReader = DirectoryReader.open(directory)) {
ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME); ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME);
aggregationBuilder.initScript(INIT_SCRIPT).mapScript(MAP_SCRIPT); aggregationBuilder.initScript(INIT_SCRIPT).mapScript(MAP_SCRIPT).reduceScript(REDUCE_SCRIPT);
ScriptedMetric scriptedMetric = search(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder); IllegalArgumentException exception = expectThrows(IllegalArgumentException.class,
assertEquals(AGG_NAME, scriptedMetric.getName()); () -> search(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder));
assertNotNull(scriptedMetric.aggregation()); assertEquals(exception.getMessage(), "[combineScript] must not be null: [scriptedMetric]");
@SuppressWarnings("unchecked") }
Map<String, Object> agg = (Map<String, Object>) scriptedMetric.aggregation(); }
@SuppressWarnings("unchecked") }
List<Integer> list = (List<Integer>) agg.get("collector");
assertEquals(numDocs, list.size()); public void testScriptedMetricWithoutReduce() throws IOException {
try (Directory directory = newDirectory()) {
int numDocs = randomInt(100);
try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) {
for (int i = 0; i < numDocs; i++) {
indexWriter.addDocument(singleton(new SortedNumericDocValuesField("number", i)));
}
}
try (IndexReader indexReader = DirectoryReader.open(directory)) {
ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME);
aggregationBuilder.initScript(INIT_SCRIPT).mapScript(MAP_SCRIPT).combineScript(COMBINE_SCRIPT);
IllegalArgumentException exception = expectThrows(IllegalArgumentException.class,
() -> search(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder));
assertEquals(exception.getMessage(), "[reduceScript] must not be null: [scriptedMetric]");
} }
} }
} }
@ -208,7 +230,8 @@ public class ScriptedMetricAggregatorTests extends AggregatorTestCase {
} }
try (IndexReader indexReader = DirectoryReader.open(directory)) { try (IndexReader indexReader = DirectoryReader.open(directory)) {
ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME); ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME);
aggregationBuilder.initScript(INIT_SCRIPT).mapScript(MAP_SCRIPT).combineScript(COMBINE_SCRIPT); aggregationBuilder.initScript(INIT_SCRIPT).mapScript(MAP_SCRIPT)
.combineScript(COMBINE_SCRIPT).reduceScript(REDUCE_SCRIPT);
ScriptedMetric scriptedMetric = search(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder); ScriptedMetric scriptedMetric = search(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder);
assertEquals(AGG_NAME, scriptedMetric.getName()); assertEquals(AGG_NAME, scriptedMetric.getName());
assertNotNull(scriptedMetric.aggregation()); assertNotNull(scriptedMetric.aggregation());
@ -230,7 +253,8 @@ public class ScriptedMetricAggregatorTests extends AggregatorTestCase {
} }
try (IndexReader indexReader = DirectoryReader.open(directory)) { try (IndexReader indexReader = DirectoryReader.open(directory)) {
ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME); ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME);
aggregationBuilder.initScript(INIT_SCRIPT_SCORE).mapScript(MAP_SCRIPT_SCORE).combineScript(COMBINE_SCRIPT_SCORE); aggregationBuilder.initScript(INIT_SCRIPT_SCORE).mapScript(MAP_SCRIPT_SCORE)
.combineScript(COMBINE_SCRIPT_SCORE).reduceScript(REDUCE_SCRIPT);
ScriptedMetric scriptedMetric = search(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder); ScriptedMetric scriptedMetric = search(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder);
assertEquals(AGG_NAME, scriptedMetric.getName()); assertEquals(AGG_NAME, scriptedMetric.getName());
assertNotNull(scriptedMetric.aggregation()); assertNotNull(scriptedMetric.aggregation());
@ -250,7 +274,8 @@ public class ScriptedMetricAggregatorTests extends AggregatorTestCase {
try (IndexReader indexReader = DirectoryReader.open(directory)) { try (IndexReader indexReader = DirectoryReader.open(directory)) {
ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME); ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME);
aggregationBuilder.initScript(INIT_SCRIPT_PARAMS).mapScript(MAP_SCRIPT_PARAMS).combineScript(COMBINE_SCRIPT_PARAMS); aggregationBuilder.initScript(INIT_SCRIPT_PARAMS).mapScript(MAP_SCRIPT_PARAMS)
.combineScript(COMBINE_SCRIPT_PARAMS).reduceScript(REDUCE_SCRIPT);
ScriptedMetric scriptedMetric = search(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder); ScriptedMetric scriptedMetric = search(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder);
// The result value depends on the script params. // The result value depends on the script params.
@ -270,8 +295,8 @@ public class ScriptedMetricAggregatorTests extends AggregatorTestCase {
try (IndexReader indexReader = DirectoryReader.open(directory)) { try (IndexReader indexReader = DirectoryReader.open(directory)) {
ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME); ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME);
Map<String, Object> aggParams = Collections.singletonMap(CONFLICTING_PARAM_NAME, "blah"); Map<String, Object> aggParams = Collections.singletonMap(CONFLICTING_PARAM_NAME, "blah");
aggregationBuilder.params(aggParams).initScript(INIT_SCRIPT_PARAMS).mapScript(MAP_SCRIPT_PARAMS). aggregationBuilder.params(aggParams).initScript(INIT_SCRIPT_PARAMS).mapScript(MAP_SCRIPT_PARAMS)
combineScript(COMBINE_SCRIPT_PARAMS); .combineScript(COMBINE_SCRIPT_PARAMS).reduceScript(REDUCE_SCRIPT);
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () ->
search(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder) search(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder)
@ -289,7 +314,8 @@ public class ScriptedMetricAggregatorTests extends AggregatorTestCase {
} }
try (IndexReader indexReader = DirectoryReader.open(directory)) { try (IndexReader indexReader = DirectoryReader.open(directory)) {
ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME); ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME);
aggregationBuilder.initScript(INIT_SCRIPT_SELF_REF).mapScript(MAP_SCRIPT); aggregationBuilder.initScript(INIT_SCRIPT_SELF_REF).mapScript(MAP_SCRIPT)
.combineScript(COMBINE_SCRIPT_PARAMS).reduceScript(REDUCE_SCRIPT);
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () ->
search(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder) search(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder)
@ -309,7 +335,8 @@ public class ScriptedMetricAggregatorTests extends AggregatorTestCase {
} }
try (IndexReader indexReader = DirectoryReader.open(directory)) { try (IndexReader indexReader = DirectoryReader.open(directory)) {
ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME); ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME);
aggregationBuilder.initScript(INIT_SCRIPT).mapScript(MAP_SCRIPT_SELF_REF); aggregationBuilder.initScript(INIT_SCRIPT).mapScript(MAP_SCRIPT_SELF_REF)
.combineScript(COMBINE_SCRIPT_PARAMS).reduceScript(REDUCE_SCRIPT);
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () ->
search(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder) search(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder)
@ -326,7 +353,8 @@ public class ScriptedMetricAggregatorTests extends AggregatorTestCase {
} }
try (IndexReader indexReader = DirectoryReader.open(directory)) { try (IndexReader indexReader = DirectoryReader.open(directory)) {
ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME); ScriptedMetricAggregationBuilder aggregationBuilder = new ScriptedMetricAggregationBuilder(AGG_NAME);
aggregationBuilder.initScript(INIT_SCRIPT).mapScript(MAP_SCRIPT).combineScript(COMBINE_SCRIPT_SELF_REF); aggregationBuilder.initScript(INIT_SCRIPT).mapScript(MAP_SCRIPT)
.combineScript(COMBINE_SCRIPT_SELF_REF).reduceScript(REDUCE_SCRIPT);
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () -> IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, () ->
search(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder) search(newSearcher(indexReader, true, true), new MatchAllDocsQuery(), aggregationBuilder)

View File

@ -153,6 +153,14 @@ public class ScriptedMetricIT extends ESIntegTestCase {
return newAggregation; return newAggregation;
}); });
scripts.put("no-op aggregation", vars -> {
return (Map<String, Object>) vars.get("state");
});
scripts.put("no-op list aggregation", vars -> {
return (List<List<?>>) vars.get("states");
});
// Equivalent to: // Equivalent to:
// //
// newaggregation = []; // newaggregation = [];
@ -188,6 +196,11 @@ public class ScriptedMetricIT extends ESIntegTestCase {
Integer sum = 0; Integer sum = 0;
List<Map<String, Object>> states = (List<Map<String, Object>>) vars.get("states"); List<Map<String, Object>> states = (List<Map<String, Object>>) vars.get("states");
if(states == null) {
return newAggregation;
}
for (Map<String, Object> state : states) { for (Map<String, Object> state : states) {
List<?> list = (List<?>) state.get("list"); List<?> list = (List<?>) state.get("list");
if (list != null) { if (list != null) {
@ -328,10 +341,14 @@ public class ScriptedMetricIT extends ESIntegTestCase {
public void testMap() { public void testMap() {
Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state['count'] = 1", Collections.emptyMap()); Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state['count'] = 1", Collections.emptyMap());
Script combineScript =
new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "no-op aggregation", Collections.emptyMap());
Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME,
"no-op list aggregation", Collections.emptyMap());
SearchResponse response = client().prepareSearch("idx") SearchResponse response = client().prepareSearch("idx")
.setQuery(matchAllQuery()) .setQuery(matchAllQuery())
.addAggregation(scriptedMetric("scripted").mapScript(mapScript)) .addAggregation(scriptedMetric("scripted").mapScript(mapScript).combineScript(combineScript).reduceScript(reduceScript))
.get(); .get();
assertSearchResponse(response); assertSearchResponse(response);
assertThat(response.getHits().getTotalHits(), equalTo(numDocs)); assertThat(response.getHits().getTotalHits(), equalTo(numDocs));
@ -369,10 +386,18 @@ public class ScriptedMetricIT extends ESIntegTestCase {
Map<String, Object> aggregationParams = Collections.singletonMap("param2", 1); Map<String, Object> aggregationParams = Collections.singletonMap("param2", 1);
Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state[param1] = param2", scriptParams); Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state[param1] = param2", scriptParams);
Script combineScript =
new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "no-op aggregation", Collections.emptyMap());
Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME,
"no-op list aggregation", Collections.emptyMap());
SearchResponse response = client().prepareSearch("idx") SearchResponse response = client().prepareSearch("idx")
.setQuery(matchAllQuery()) .setQuery(matchAllQuery())
.addAggregation(scriptedMetric("scripted").params(aggregationParams).mapScript(mapScript)) .addAggregation(scriptedMetric("scripted")
.params(aggregationParams)
.mapScript(mapScript)
.combineScript(combineScript)
.reduceScript(reduceScript))
.get(); .get();
assertSearchResponse(response); assertSearchResponse(response);
assertThat(response.getHits().getTotalHits(), equalTo(numDocs)); assertThat(response.getHits().getTotalHits(), equalTo(numDocs));
@ -423,7 +448,11 @@ public class ScriptedMetricIT extends ESIntegTestCase {
.initScript( .initScript(
new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "vars.multiplier = 3", Collections.emptyMap())) new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "vars.multiplier = 3", Collections.emptyMap()))
.mapScript(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, .mapScript(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME,
"state.list.add(vars.multiplier)", Collections.emptyMap()))) "state.list.add(vars.multiplier)", Collections.emptyMap()))
.combineScript(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME,
"no-op aggregation", Collections.emptyMap()))
.reduceScript(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME,
"no-op list aggregation", Collections.emptyMap())))
.get(); .get();
assertSearchResponse(response); assertSearchResponse(response);
assertThat(response.getHits().getTotalHits(), equalTo(numDocs)); assertThat(response.getHits().getTotalHits(), equalTo(numDocs));
@ -466,6 +495,8 @@ public class ScriptedMetricIT extends ESIntegTestCase {
Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state.list.add(1)", Collections.emptyMap()); Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state.list.add(1)", Collections.emptyMap());
Script combineScript = Script combineScript =
new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum state values as a new aggregation", Collections.emptyMap()); new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum state values as a new aggregation", Collections.emptyMap());
Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME,
"no-op list aggregation", Collections.emptyMap());
SearchResponse response = client() SearchResponse response = client()
.prepareSearch("idx") .prepareSearch("idx")
@ -474,7 +505,8 @@ public class ScriptedMetricIT extends ESIntegTestCase {
scriptedMetric("scripted") scriptedMetric("scripted")
.params(params) .params(params)
.mapScript(mapScript) .mapScript(mapScript)
.combineScript(combineScript)) .combineScript(combineScript)
.reduceScript(reduceScript))
.execute().actionGet(); .execute().actionGet();
assertSearchResponse(response); assertSearchResponse(response);
assertThat(response.getHits().getTotalHits(), equalTo(numDocs)); assertThat(response.getHits().getTotalHits(), equalTo(numDocs));
@ -519,6 +551,8 @@ public class ScriptedMetricIT extends ESIntegTestCase {
Collections.emptyMap()); Collections.emptyMap());
Script combineScript = Script combineScript =
new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum state values as a new aggregation", Collections.emptyMap()); new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum state values as a new aggregation", Collections.emptyMap());
Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME,
"no-op list aggregation", Collections.emptyMap());
SearchResponse response = client() SearchResponse response = client()
.prepareSearch("idx") .prepareSearch("idx")
@ -528,7 +562,8 @@ public class ScriptedMetricIT extends ESIntegTestCase {
.params(params) .params(params)
.initScript(initScript) .initScript(initScript)
.mapScript(mapScript) .mapScript(mapScript)
.combineScript(combineScript)) .combineScript(combineScript)
.reduceScript(reduceScript))
.get(); .get();
assertSearchResponse(response); assertSearchResponse(response);
assertThat(response.getHits().getTotalHits(), equalTo(numDocs)); assertThat(response.getHits().getTotalHits(), equalTo(numDocs));
@ -713,6 +748,8 @@ public class ScriptedMetricIT extends ESIntegTestCase {
Script initScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "vars.multiplier = 3", Collections.emptyMap()); Script initScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "vars.multiplier = 3", Collections.emptyMap());
Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state.list.add(vars.multiplier)", Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state.list.add(vars.multiplier)",
Collections.emptyMap()); Collections.emptyMap());
Script combineScript =
new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "no-op aggregation", Collections.emptyMap());
Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME,
"sum all states' state.list values as a new aggregation", Collections.emptyMap()); "sum all states' state.list values as a new aggregation", Collections.emptyMap());
@ -724,6 +761,7 @@ public class ScriptedMetricIT extends ESIntegTestCase {
.params(params) .params(params)
.initScript(initScript) .initScript(initScript)
.mapScript(mapScript) .mapScript(mapScript)
.combineScript(combineScript)
.reduceScript(reduceScript)) .reduceScript(reduceScript))
.get(); .get();
assertSearchResponse(response); assertSearchResponse(response);
@ -752,6 +790,8 @@ public class ScriptedMetricIT extends ESIntegTestCase {
Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state.list.add(vars.multiplier)", Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state.list.add(vars.multiplier)",
Collections.emptyMap()); Collections.emptyMap());
Script combineScript =
new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "no-op aggregation", Collections.emptyMap());
Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME,
"sum all states' state.list values as a new aggregation", Collections.emptyMap()); "sum all states' state.list values as a new aggregation", Collections.emptyMap());
@ -762,6 +802,7 @@ public class ScriptedMetricIT extends ESIntegTestCase {
scriptedMetric("scripted") scriptedMetric("scripted")
.params(params) .params(params)
.mapScript(mapScript) .mapScript(mapScript)
.combineScript(combineScript)
.reduceScript(reduceScript)) .reduceScript(reduceScript))
.get(); .get();
assertSearchResponse(response); assertSearchResponse(response);
@ -980,6 +1021,11 @@ public class ScriptedMetricIT extends ESIntegTestCase {
*/ */
public void testDontCacheScripts() throws Exception { public void testDontCacheScripts() throws Exception {
Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state['count'] = 1", Collections.emptyMap()); Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state['count'] = 1", Collections.emptyMap());
Script combineScript =
new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "no-op aggregation", Collections.emptyMap());
Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME,
"no-op list aggregation", Collections.emptyMap());
assertAcked(prepareCreate("cache_test_idx").addMapping("type", "d", "type=long") assertAcked(prepareCreate("cache_test_idx").addMapping("type", "d", "type=long")
.setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1)) .setSettings(Settings.builder().put("requests.cache.enable", true).put("number_of_shards", 1).put("number_of_replicas", 1))
.get()); .get());
@ -994,7 +1040,7 @@ public class ScriptedMetricIT extends ESIntegTestCase {
// Test that a request using a script does not get cached // Test that a request using a script does not get cached
SearchResponse r = client().prepareSearch("cache_test_idx").setSize(0) SearchResponse r = client().prepareSearch("cache_test_idx").setSize(0)
.addAggregation(scriptedMetric("foo").mapScript(mapScript)).get(); .addAggregation(scriptedMetric("foo").mapScript(mapScript).combineScript(combineScript).reduceScript(reduceScript)).get();
assertSearchResponse(r); assertSearchResponse(r);
assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache() assertThat(client().admin().indices().prepareStats("cache_test_idx").setRequestCache(true).get().getTotal().getRequestCache()
@ -1006,10 +1052,17 @@ public class ScriptedMetricIT extends ESIntegTestCase {
public void testConflictingAggAndScriptParams() { public void testConflictingAggAndScriptParams() {
Map<String, Object> params = Collections.singletonMap("param1", "12"); Map<String, Object> params = Collections.singletonMap("param1", "12");
Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state.list.add(1)", params); Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state.list.add(1)", params);
Script combineScript =
new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "no-op aggregation", Collections.emptyMap());
Script reduceScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME,
"no-op list aggregation", Collections.emptyMap());
SearchRequestBuilder builder = client().prepareSearch("idx") SearchRequestBuilder builder = client().prepareSearch("idx")
.setQuery(matchAllQuery()) .setQuery(matchAllQuery())
.addAggregation(scriptedMetric("scripted").params(params).mapScript(mapScript)); .addAggregation(scriptedMetric("scripted")
.params(params).mapScript(mapScript)
.combineScript(combineScript)
.reduceScript(reduceScript));
SearchPhaseExecutionException ex = expectThrows(SearchPhaseExecutionException.class, builder::get); SearchPhaseExecutionException ex = expectThrows(SearchPhaseExecutionException.class, builder::get);
assertThat(ex.getCause().getMessage(), containsString("Parameter name \"param1\" used in both aggregation and script parameters")); assertThat(ex.getCause().getMessage(), containsString("Parameter name \"param1\" used in both aggregation and script parameters"));

View File

@ -826,16 +826,16 @@ public class TopHitsIT extends ESIntegTestCase {
assertThat(topReviewers.getHits().getAt(2).getId(), equalTo("1")); assertThat(topReviewers.getHits().getAt(2).getId(), equalTo("1"));
assertThat(extractValue("name", topReviewers.getHits().getAt(2).getSourceAsMap()), equalTo("user c")); assertThat(extractValue("name", topReviewers.getHits().getAt(2).getSourceAsMap()), equalTo("user c"));
assertThat(topReviewers.getHits().getAt(2).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(topReviewers.getHits().getAt(2).getNestedIdentity().getField().string(), equalTo("comments"));
assertThat(topReviewers.getHits().getAt(2).getNestedIdentity().getOffset(), equalTo(0)); assertThat(topReviewers.getHits().getAt(2).getNestedIdentity().getOffset(), equalTo(1));
assertThat(topReviewers.getHits().getAt(2).getNestedIdentity().getChild().getField().string(), equalTo("reviewers")); assertThat(topReviewers.getHits().getAt(2).getNestedIdentity().getChild().getField().string(), equalTo("reviewers"));
assertThat(topReviewers.getHits().getAt(2).getNestedIdentity().getChild().getOffset(), equalTo(2)); assertThat(topReviewers.getHits().getAt(2).getNestedIdentity().getChild().getOffset(), equalTo(0));
assertThat(topReviewers.getHits().getAt(3).getId(), equalTo("1")); assertThat(topReviewers.getHits().getAt(3).getId(), equalTo("1"));
assertThat(extractValue("name", topReviewers.getHits().getAt(3).getSourceAsMap()), equalTo("user c")); assertThat(extractValue("name", topReviewers.getHits().getAt(3).getSourceAsMap()), equalTo("user c"));
assertThat(topReviewers.getHits().getAt(3).getNestedIdentity().getField().string(), equalTo("comments")); assertThat(topReviewers.getHits().getAt(3).getNestedIdentity().getField().string(), equalTo("comments"));
assertThat(topReviewers.getHits().getAt(3).getNestedIdentity().getOffset(), equalTo(1)); assertThat(topReviewers.getHits().getAt(3).getNestedIdentity().getOffset(), equalTo(0));
assertThat(topReviewers.getHits().getAt(3).getNestedIdentity().getChild().getField().string(), equalTo("reviewers")); assertThat(topReviewers.getHits().getAt(3).getNestedIdentity().getChild().getField().string(), equalTo("reviewers"));
assertThat(topReviewers.getHits().getAt(3).getNestedIdentity().getChild().getOffset(), equalTo(0)); assertThat(topReviewers.getHits().getAt(3).getNestedIdentity().getChild().getOffset(), equalTo(2));
assertThat(topReviewers.getHits().getAt(4).getId(), equalTo("1")); assertThat(topReviewers.getHits().getAt(4).getId(), equalTo("1"));
assertThat(extractValue("name", topReviewers.getHits().getAt(4).getSourceAsMap()), equalTo("user d")); assertThat(extractValue("name", topReviewers.getHits().getAt(4).getSourceAsMap()), equalTo("user d"));

View File

@ -208,9 +208,9 @@ public class InnerHitsIT extends ESIntegTestCase {
int size = randomIntBetween(0, numDocs); int size = randomIntBetween(0, numDocs);
BoolQueryBuilder boolQuery = new BoolQueryBuilder(); BoolQueryBuilder boolQuery = new BoolQueryBuilder();
boolQuery.should(nestedQuery("field1", matchAllQuery(), ScoreMode.Avg).innerHit(new InnerHitBuilder("a").setSize(size) boolQuery.should(nestedQuery("field1", matchAllQuery(), ScoreMode.Avg).innerHit(new InnerHitBuilder("a").setSize(size)
.addSort(new FieldSortBuilder("_doc").order(SortOrder.DESC)))); .addSort(new FieldSortBuilder("_doc").order(SortOrder.ASC))));
boolQuery.should(nestedQuery("field2", matchAllQuery(), ScoreMode.Avg).innerHit(new InnerHitBuilder("b") boolQuery.should(nestedQuery("field2", matchAllQuery(), ScoreMode.Avg).innerHit(new InnerHitBuilder("b")
.addSort(new FieldSortBuilder("_doc").order(SortOrder.DESC)).setSize(size))); .addSort(new FieldSortBuilder("_doc").order(SortOrder.ASC)).setSize(size)));
SearchResponse searchResponse = client().prepareSearch("idx") SearchResponse searchResponse = client().prepareSearch("idx")
.setQuery(boolQuery) .setQuery(boolQuery)
.setSize(numDocs) .setSize(numDocs)

View File

@ -8,14 +8,17 @@ package org.elasticsearch.xpack.security.authc.support;
import java.io.IOException; import java.io.IOException;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Collection; import java.util.Collection;
import java.util.Collections; import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Objects; import java.util.Objects;
import java.util.Set; import java.util.Set;
import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CopyOnWriteArrayList;
import java.util.stream.Collectors;
import com.unboundid.ldap.sdk.DN; import com.unboundid.ldap.sdk.DN;
import com.unboundid.ldap.sdk.LDAPException; import com.unboundid.ldap.sdk.LDAPException;
@ -51,7 +54,7 @@ public class DnRoleMapper implements UserRoleMapper {
private final Path file; private final Path file;
private final boolean useUnmappedGroupsAsRoles; private final boolean useUnmappedGroupsAsRoles;
private final CopyOnWriteArrayList<Runnable> listeners = new CopyOnWriteArrayList<>(); private final CopyOnWriteArrayList<Runnable> listeners = new CopyOnWriteArrayList<>();
private volatile Map<DN, Set<String>> dnRoles; private volatile Map<String, List<String>> dnRoles;
public DnRoleMapper(RealmConfig config, ResourceWatcherService watcherService) { public DnRoleMapper(RealmConfig config, ResourceWatcherService watcherService) {
this.config = config; this.config = config;
@ -87,7 +90,7 @@ public class DnRoleMapper implements UserRoleMapper {
* logging the error and skipping/removing all mappings. This is aligned with how we handle other auto-loaded files * logging the error and skipping/removing all mappings. This is aligned with how we handle other auto-loaded files
* in security. * in security.
*/ */
public static Map<DN, Set<String>> parseFileLenient(Path path, Logger logger, String realmType, String realmName) { public static Map<String, List<String>> parseFileLenient(Path path, Logger logger, String realmType, String realmName) {
try { try {
return parseFile(path, logger, realmType, realmName, false); return parseFile(path, logger, realmType, realmName, false);
} catch (Exception e) { } catch (Exception e) {
@ -98,7 +101,7 @@ public class DnRoleMapper implements UserRoleMapper {
} }
} }
public static Map<DN, Set<String>> parseFile(Path path, Logger logger, String realmType, String realmName, boolean strict) { public static Map<String, List<String>> parseFile(Path path, Logger logger, String realmType, String realmName, boolean strict) {
logger.trace("reading realm [{}/{}] role mappings file [{}]...", realmType, realmName, path.toAbsolutePath()); logger.trace("reading realm [{}/{}] role mappings file [{}]...", realmType, realmName, path.toAbsolutePath());
@ -149,7 +152,10 @@ public class DnRoleMapper implements UserRoleMapper {
logger.debug("[{}] role mappings found in file [{}] for realm [{}/{}]", dnToRoles.size(), path.toAbsolutePath(), realmType, logger.debug("[{}] role mappings found in file [{}] for realm [{}/{}]", dnToRoles.size(), path.toAbsolutePath(), realmType,
realmName); realmName);
return unmodifiableMap(dnToRoles); Map<String, List<String>> normalizedMap = dnToRoles.entrySet().stream().collect(Collectors.toMap(
entry -> entry.getKey().toNormalizedString(),
entry -> Collections.unmodifiableList(new ArrayList<>(entry.getValue()))));
return unmodifiableMap(normalizedMap);
} catch (IOException | SettingsException e) { } catch (IOException | SettingsException e) {
throw new ElasticsearchException("could not read realm [" + realmType + "/" + realmName + "] role mappings file [" + throw new ElasticsearchException("could not read realm [" + realmType + "/" + realmName + "] role mappings file [" +
path.toAbsolutePath() + "]", e); path.toAbsolutePath() + "]", e);
@ -176,8 +182,9 @@ public class DnRoleMapper implements UserRoleMapper {
Set<String> roles = new HashSet<>(); Set<String> roles = new HashSet<>();
for (String groupDnString : groupDns) { for (String groupDnString : groupDns) {
DN groupDn = dn(groupDnString); DN groupDn = dn(groupDnString);
if (dnRoles.containsKey(groupDn)) { String normalizedGroupDn = groupDn.toNormalizedString();
roles.addAll(dnRoles.get(groupDn)); if (dnRoles.containsKey(normalizedGroupDn)) {
roles.addAll(dnRoles.get(normalizedGroupDn));
} else if (useUnmappedGroupsAsRoles) { } else if (useUnmappedGroupsAsRoles) {
roles.add(relativeName(groupDn)); roles.add(relativeName(groupDn));
} }
@ -187,14 +194,14 @@ public class DnRoleMapper implements UserRoleMapper {
groupDns, file.getFileName(), config.type(), config.name()); groupDns, file.getFileName(), config.type(), config.name());
} }
DN userDn = dn(userDnString); String normalizedUserDn = dn(userDnString).toNormalizedString();
Set<String> rolesMappedToUserDn = dnRoles.get(userDn); List<String> rolesMappedToUserDn = dnRoles.get(normalizedUserDn);
if (rolesMappedToUserDn != null) { if (rolesMappedToUserDn != null) {
roles.addAll(rolesMappedToUserDn); roles.addAll(rolesMappedToUserDn);
} }
if (logger.isDebugEnabled()) { if (logger.isDebugEnabled()) {
logger.debug("the roles [{}], are mapped from the user [{}] using file [{}] for realm [{}/{}]", logger.debug("the roles [{}], are mapped from the user [{}] using file [{}] for realm [{}/{}]",
(rolesMappedToUserDn == null) ? Collections.emptySet() : rolesMappedToUserDn, userDnString, file.getFileName(), (rolesMappedToUserDn == null) ? Collections.emptySet() : rolesMappedToUserDn, normalizedUserDn, file.getFileName(),
config.type(), config.name()); config.type(), config.name());
} }
return roles; return roles;

View File

@ -862,6 +862,9 @@ public class DocumentLevelSecurityTests extends SecurityIntegTestCase {
.startObject() .startObject()
.field("field2", "value2") .field("field2", "value2")
.endObject() .endObject()
.startObject()
.array("field2", "value2", "value3")
.endObject()
.endArray() .endArray()
.endObject()) .endObject())
.get(); .get();
@ -889,6 +892,9 @@ public class DocumentLevelSecurityTests extends SecurityIntegTestCase {
assertThat(response.getHits().getAt(0).getInnerHits().get("nested_field").getAt(0).getNestedIdentity().getOffset(), equalTo(0)); assertThat(response.getHits().getAt(0).getInnerHits().get("nested_field").getAt(0).getNestedIdentity().getOffset(), equalTo(0));
assertThat(response.getHits().getAt(0).getInnerHits().get("nested_field").getAt(0).getSourceAsString(), assertThat(response.getHits().getAt(0).getInnerHits().get("nested_field").getAt(0).getSourceAsString(),
equalTo("{\"field2\":\"value2\"}")); equalTo("{\"field2\":\"value2\"}"));
assertThat(response.getHits().getAt(0).getInnerHits().get("nested_field").getAt(1).getNestedIdentity().getOffset(), equalTo(1));
assertThat(response.getHits().getAt(0).getInnerHits().get("nested_field").getAt(1).getSourceAsString(),
equalTo("{\"field2\":[\"value2\",\"value3\"]}"));
} }
public void testSuggesters() throws Exception { public void testSuggesters() throws Exception {

View File

@ -200,27 +200,27 @@ public class DnRoleMapperTests extends ESTestCase {
public void testParseFile() throws Exception { public void testParseFile() throws Exception {
Path file = getDataPath("role_mapping.yml"); Path file = getDataPath("role_mapping.yml");
Logger logger = CapturingLogger.newCapturingLogger(Level.INFO, null); Logger logger = CapturingLogger.newCapturingLogger(Level.INFO, null);
Map<DN, Set<String>> mappings = DnRoleMapper.parseFile(file, logger, "_type", "_name", false); Map<String, List<String>> mappings = DnRoleMapper.parseFile(file, logger, "_type", "_name", false);
assertThat(mappings, notNullValue()); assertThat(mappings, notNullValue());
assertThat(mappings.size(), is(3)); assertThat(mappings.size(), is(3));
DN dn = new DN("cn=avengers,ou=marvel,o=superheros"); DN dn = new DN("cn=avengers,ou=marvel,o=superheros");
assertThat(mappings, hasKey(dn)); assertThat(mappings, hasKey(dn.toNormalizedString()));
Set<String> roles = mappings.get(dn); List<String> roles = mappings.get(dn.toNormalizedString());
assertThat(roles, notNullValue()); assertThat(roles, notNullValue());
assertThat(roles, hasSize(2)); assertThat(roles, hasSize(2));
assertThat(roles, containsInAnyOrder("security", "avenger")); assertThat(roles, containsInAnyOrder("security", "avenger"));
dn = new DN("cn=shield,ou=marvel,o=superheros"); dn = new DN("cn=shield,ou=marvel,o=superheros");
assertThat(mappings, hasKey(dn)); assertThat(mappings, hasKey(dn.toNormalizedString()));
roles = mappings.get(dn); roles = mappings.get(dn.toNormalizedString());
assertThat(roles, notNullValue()); assertThat(roles, notNullValue());
assertThat(roles, hasSize(1)); assertThat(roles, hasSize(1));
assertThat(roles, contains("security")); assertThat(roles, contains("security"));
dn = new DN("cn=Horatio Hornblower,ou=people,o=sevenSeas"); dn = new DN("cn=Horatio Hornblower,ou=people,o=sevenSeas");
assertThat(mappings, hasKey(dn)); assertThat(mappings, hasKey(dn.toNormalizedString()));
roles = mappings.get(dn); roles = mappings.get(dn.toNormalizedString());
assertThat(roles, notNullValue()); assertThat(roles, notNullValue());
assertThat(roles, hasSize(1)); assertThat(roles, hasSize(1));
assertThat(roles, contains("avenger")); assertThat(roles, contains("avenger"));
@ -230,7 +230,7 @@ public class DnRoleMapperTests extends ESTestCase {
Path file = createTempDir().resolve("foo.yaml"); Path file = createTempDir().resolve("foo.yaml");
Files.createFile(file); Files.createFile(file);
Logger logger = CapturingLogger.newCapturingLogger(Level.DEBUG, null); Logger logger = CapturingLogger.newCapturingLogger(Level.DEBUG, null);
Map<DN, Set<String>> mappings = DnRoleMapper.parseFile(file, logger, "_type", "_name", false); Map<String, List<String>> mappings = DnRoleMapper.parseFile(file, logger, "_type", "_name", false);
assertThat(mappings, notNullValue()); assertThat(mappings, notNullValue());
assertThat(mappings.isEmpty(), is(true)); assertThat(mappings.isEmpty(), is(true));
List<String> events = CapturingLogger.output(logger.getName(), Level.DEBUG); List<String> events = CapturingLogger.output(logger.getName(), Level.DEBUG);
@ -242,7 +242,7 @@ public class DnRoleMapperTests extends ESTestCase {
public void testParseFile_WhenFileDoesNotExist() throws Exception { public void testParseFile_WhenFileDoesNotExist() throws Exception {
Path file = createTempDir().resolve(randomAlphaOfLength(10)); Path file = createTempDir().resolve(randomAlphaOfLength(10));
Logger logger = CapturingLogger.newCapturingLogger(Level.INFO, null); Logger logger = CapturingLogger.newCapturingLogger(Level.INFO, null);
Map<DN, Set<String>> mappings = DnRoleMapper.parseFile(file, logger, "_type", "_name", false); Map<String, List<String>> mappings = DnRoleMapper.parseFile(file, logger, "_type", "_name", false);
assertThat(mappings, notNullValue()); assertThat(mappings, notNullValue());
assertThat(mappings.isEmpty(), is(true)); assertThat(mappings.isEmpty(), is(true));
@ -272,7 +272,7 @@ public class DnRoleMapperTests extends ESTestCase {
// writing in utf_16 should cause a parsing error as we try to read the file in utf_8 // writing in utf_16 should cause a parsing error as we try to read the file in utf_8
Files.write(file, Collections.singletonList("aldlfkjldjdflkjd"), StandardCharsets.UTF_16); Files.write(file, Collections.singletonList("aldlfkjldjdflkjd"), StandardCharsets.UTF_16);
Logger logger = CapturingLogger.newCapturingLogger(Level.INFO, null); Logger logger = CapturingLogger.newCapturingLogger(Level.INFO, null);
Map<DN, Set<String>> mappings = DnRoleMapper.parseFileLenient(file, logger, "_type", "_name"); Map<String, List<String>> mappings = DnRoleMapper.parseFileLenient(file, logger, "_type", "_name");
assertThat(mappings, notNullValue()); assertThat(mappings, notNullValue());
assertThat(mappings.isEmpty(), is(true)); assertThat(mappings.isEmpty(), is(true));
List<String> events = CapturingLogger.output(logger.getName(), Level.ERROR); List<String> events = CapturingLogger.output(logger.getName(), Level.ERROR);