Merge branch 'master' into index-lifecycle

This commit is contained in:
Tal Levy 2018-05-24 17:20:53 -07:00
commit b0ab71e2fe
83 changed files with 2015 additions and 692 deletions

View File

@ -25,6 +25,8 @@ import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest;
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse;
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest;
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse;
import org.elasticsearch.action.ingest.PutPipelineRequest;
import org.elasticsearch.action.ingest.PutPipelineResponse;
import java.io.IOException; import java.io.IOException;
@ -87,4 +89,26 @@ public final class ClusterClient {
restHighLevelClient.performRequestAsyncAndParseEntity(request, RequestConverters::listTasks, ListTasksResponse::fromXContent, restHighLevelClient.performRequestAsyncAndParseEntity(request, RequestConverters::listTasks, ListTasksResponse::fromXContent,
listener, emptySet(), headers); listener, emptySet(), headers);
} }
/**
* Add a pipeline or update an existing pipeline in the cluster
* <p>
* See
* <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/put-pipeline-api.html"> Put Pipeline API on elastic.co</a>
*/
public PutPipelineResponse putPipeline(PutPipelineRequest request, Header... headers) throws IOException {
return restHighLevelClient.performRequestAndParseEntity( request, RequestConverters::putPipeline,
PutPipelineResponse::fromXContent, emptySet(), headers);
}
/**
* Asynchronously add a pipeline or update an existing pipeline in the cluster
* <p>
* See
* <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/put-pipeline-api.html"> Put Pipeline API on elastic.co</a>
*/
public void putPipelineAsync(PutPipelineRequest request, ActionListener<PutPipelineResponse> listener, Header... headers) {
restHighLevelClient.performRequestAsyncAndParseEntity( request, RequestConverters::putPipeline,
PutPipelineResponse::fromXContent, listener, emptySet(), headers);
}
} }

View File

@ -58,6 +58,7 @@ import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest;
import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetRequest;
import org.elasticsearch.action.get.MultiGetRequest; import org.elasticsearch.action.get.MultiGetRequest;
import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.ingest.PutPipelineRequest;
import org.elasticsearch.action.search.ClearScrollRequest; import org.elasticsearch.action.search.ClearScrollRequest;
import org.elasticsearch.action.search.MultiSearchRequest; import org.elasticsearch.action.search.MultiSearchRequest;
import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchRequest;
@ -609,6 +610,21 @@ final class RequestConverters {
return request; return request;
} }
static Request putPipeline(PutPipelineRequest putPipelineRequest) throws IOException {
String endpoint = new EndpointBuilder()
.addPathPartAsIs("_ingest/pipeline")
.addPathPart(putPipelineRequest.getId())
.build();
Request request = new Request(HttpPut.METHOD_NAME, endpoint);
Params parameters = new Params(request);
parameters.withTimeout(putPipelineRequest.timeout());
parameters.withMasterTimeout(putPipelineRequest.masterNodeTimeout());
request.setEntity(createEntity(putPipelineRequest, REQUEST_BODY_CONTENT_TYPE));
return request;
}
static Request listTasks(ListTasksRequest listTaskRequest) { static Request listTasks(ListTasksRequest listTaskRequest) {
if (listTaskRequest.getTaskId() != null && listTaskRequest.getTaskId().isSet()) { if (listTaskRequest.getTaskId() != null && listTaskRequest.getTaskId().isSet()) {
throw new IllegalArgumentException("TaskId cannot be used for list tasks request"); throw new IllegalArgumentException("TaskId cannot be used for list tasks request");

View File

@ -25,12 +25,17 @@ import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse;
import org.elasticsearch.action.admin.cluster.node.tasks.list.TaskGroup; import org.elasticsearch.action.admin.cluster.node.tasks.list.TaskGroup;
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest;
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse;
import org.elasticsearch.action.ingest.PutPipelineRequest;
import org.elasticsearch.action.ingest.PutPipelineResponse;
import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.indices.recovery.RecoverySettings;
import org.elasticsearch.ingest.Pipeline;
import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.tasks.TaskInfo; import org.elasticsearch.tasks.TaskInfo;
@ -136,4 +141,41 @@ public class ClusterClientIT extends ESRestHighLevelClientTestCase {
} }
assertTrue("List tasks were not found", listTasksFound); assertTrue("List tasks were not found", listTasksFound);
} }
public void testPutPipeline() throws IOException {
String id = "some_pipeline_id";
XContentType xContentType = randomFrom(XContentType.values());
XContentBuilder pipelineBuilder = XContentBuilder.builder(xContentType.xContent());
pipelineBuilder.startObject();
{
pipelineBuilder.field(Pipeline.DESCRIPTION_KEY, "some random set of processors");
pipelineBuilder.startArray(Pipeline.PROCESSORS_KEY);
{
pipelineBuilder.startObject().startObject("set");
{
pipelineBuilder
.field("field", "foo")
.field("value", "bar");
}
pipelineBuilder.endObject().endObject();
pipelineBuilder.startObject().startObject("convert");
{
pipelineBuilder
.field("field", "rank")
.field("type", "integer");
}
pipelineBuilder.endObject().endObject();
}
pipelineBuilder.endArray();
}
pipelineBuilder.endObject();
PutPipelineRequest request = new PutPipelineRequest(
id,
BytesReference.bytes(pipelineBuilder),
pipelineBuilder.contentType());
PutPipelineResponse putPipelineResponse =
execute(request, highLevelClient().cluster()::putPipeline, highLevelClient().cluster()::putPipelineAsync);
assertTrue(putPipelineResponse.isAcknowledged());
}
} }

View File

@ -61,6 +61,7 @@ import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest;
import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetRequest;
import org.elasticsearch.action.get.MultiGetRequest; import org.elasticsearch.action.get.MultiGetRequest;
import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.ingest.PutPipelineRequest;
import org.elasticsearch.action.search.ClearScrollRequest; import org.elasticsearch.action.search.ClearScrollRequest;
import org.elasticsearch.action.search.MultiSearchRequest; import org.elasticsearch.action.search.MultiSearchRequest;
import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchRequest;
@ -91,6 +92,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.index.RandomCreateIndexGenerator; import org.elasticsearch.index.RandomCreateIndexGenerator;
import org.elasticsearch.index.VersionType; import org.elasticsearch.index.VersionType;
import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.index.query.TermQueryBuilder;
@ -119,6 +121,7 @@ import org.elasticsearch.test.RandomObjects;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import java.nio.file.Path; import java.nio.file.Path;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
@ -1402,6 +1405,26 @@ public class RequestConvertersTests extends ESTestCase {
assertEquals(expectedParams, expectedRequest.getParameters()); assertEquals(expectedParams, expectedRequest.getParameters());
} }
public void testPutPipeline() throws IOException {
String pipelineId = "some_pipeline_id";
PutPipelineRequest request = new PutPipelineRequest(
"some_pipeline_id",
new BytesArray("{}".getBytes(StandardCharsets.UTF_8)),
XContentType.JSON
);
Map<String, String> expectedParams = new HashMap<>();
setRandomMasterTimeout(request, expectedParams);
setRandomTimeout(request::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams);
Request expectedRequest = RequestConverters.putPipeline(request);
StringJoiner endpoint = new StringJoiner("/", "/", "");
endpoint.add("_ingest/pipeline");
endpoint.add(pipelineId);
assertEquals(endpoint.toString(), expectedRequest.getEndpoint());
assertEquals(HttpPut.METHOD_NAME, expectedRequest.getMethod());
assertEquals(expectedParams, expectedRequest.getParameters());
}
public void testRollover() throws IOException { public void testRollover() throws IOException {
RolloverRequest rolloverRequest = new RolloverRequest(randomAlphaOfLengthBetween(3, 10), RolloverRequest rolloverRequest = new RolloverRequest(randomAlphaOfLengthBetween(3, 10),
randomBoolean() ? null : randomAlphaOfLengthBetween(3, 10)); randomBoolean() ? null : randomAlphaOfLengthBetween(3, 10));

View File

@ -21,7 +21,6 @@ package org.elasticsearch.client.documentation;
import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.FailedNodeException;
import org.elasticsearch.action.LatchedActionListener; import org.elasticsearch.action.LatchedActionListener;
import org.elasticsearch.action.TaskOperationFailure; import org.elasticsearch.action.TaskOperationFailure;
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest;
@ -29,9 +28,12 @@ import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse;
import org.elasticsearch.action.admin.cluster.node.tasks.list.TaskGroup; import org.elasticsearch.action.admin.cluster.node.tasks.list.TaskGroup;
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest;
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse;
import org.elasticsearch.action.ingest.PutPipelineRequest;
import org.elasticsearch.action.ingest.PutPipelineResponse;
import org.elasticsearch.client.ESRestHighLevelClientTestCase; import org.elasticsearch.client.ESRestHighLevelClientTestCase;
import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider; import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
@ -41,6 +43,7 @@ import org.elasticsearch.tasks.TaskId;
import org.elasticsearch.tasks.TaskInfo; import org.elasticsearch.tasks.TaskInfo;
import java.io.IOException; import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@ -272,4 +275,80 @@ public class ClusterClientDocumentationIT extends ESRestHighLevelClientTestCase
assertTrue(latch.await(30L, TimeUnit.SECONDS)); assertTrue(latch.await(30L, TimeUnit.SECONDS));
} }
} }
public void testPutPipeline() throws IOException {
RestHighLevelClient client = highLevelClient();
{
// tag::put-pipeline-request
String source =
"{\"description\":\"my set of processors\"," +
"\"processors\":[{\"set\":{\"field\":\"foo\",\"value\":\"bar\"}}]}";
PutPipelineRequest request = new PutPipelineRequest(
"my-pipeline-id", // <1>
new BytesArray(source.getBytes(StandardCharsets.UTF_8)), // <2>
XContentType.JSON // <3>
);
// end::put-pipeline-request
// tag::put-pipeline-request-timeout
request.timeout(TimeValue.timeValueMinutes(2)); // <1>
request.timeout("2m"); // <2>
// end::put-pipeline-request-timeout
// tag::put-pipeline-request-masterTimeout
request.masterNodeTimeout(TimeValue.timeValueMinutes(1)); // <1>
request.masterNodeTimeout("1m"); // <2>
// end::put-pipeline-request-masterTimeout
// tag::put-pipeline-execute
PutPipelineResponse response = client.cluster().putPipeline(request); // <1>
// end::put-pipeline-execute
// tag::put-pipeline-response
boolean acknowledged = response.isAcknowledged(); // <1>
// end::put-pipeline-response
assertTrue(acknowledged);
}
}
public void testPutPipelineAsync() throws Exception {
RestHighLevelClient client = highLevelClient();
{
String source =
"{\"description\":\"my set of processors\"," +
"\"processors\":[{\"set\":{\"field\":\"foo\",\"value\":\"bar\"}}]}";
PutPipelineRequest request = new PutPipelineRequest(
"my-pipeline-id",
new BytesArray(source.getBytes(StandardCharsets.UTF_8)),
XContentType.JSON
);
// tag::put-pipeline-execute-listener
ActionListener<PutPipelineResponse> listener =
new ActionListener<PutPipelineResponse>() {
@Override
public void onResponse(PutPipelineResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::put-pipeline-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::put-pipeline-execute-async
client.cluster().putPipelineAsync(request, listener); // <1>
// end::put-pipeline-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
} }

View File

@ -0,0 +1,83 @@
[[java-rest-high-cluster-put-pipeline]]
=== Put Pipeline API
[[java-rest-high-cluster-put-pipeline-request]]
==== Put Pipeline Request
A `PutPipelineRequest` requires an `id` argument, a source and a `XContentType`. The source consists
of a description and a list of `Processor` objects.
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[put-pipeline-request]
--------------------------------------------------
<1> The pipeline id
<2> The source for the pipeline as a `ByteArray`.
<3> The XContentType for the pipeline source supplied above.
==== Optional arguments
The following arguments can optionally be provided:
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[put-pipeline-request-timeout]
--------------------------------------------------
<1> Timeout to wait for the all the nodes to acknowledge the index creation as a `TimeValue`
<2> Timeout to wait for the all the nodes to acknowledge the index creation as a `String`
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[put-pipeline-request-masterTimeout]
--------------------------------------------------
<1> Timeout to connect to the master node as a `TimeValue`
<2> Timeout to connect to the master node as a `String`
[[java-rest-high-cluster-put-pipeline-sync]]
==== Synchronous Execution
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[put-pipeline-execute]
--------------------------------------------------
<1> Execute the request and get back the response in a PutPipelineResponse object.
[[java-rest-high-cluster-put-pipeline-async]]
==== Asynchronous Execution
The asynchronous execution of a put pipeline request requires both the `PutPipelineRequest`
instance and an `ActionListener` instance to be passed to the asynchronous
method:
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[put-pipeline-execute-async]
--------------------------------------------------
<1> The `PutPipelineRequest` to execute and the `ActionListener` to use when
the execution completes
The asynchronous method does not block and returns immediately. Once it is
completed the `ActionListener` is called back using the `onResponse` method
if the execution successfully completed or using the `onFailure` method if
it failed.
A typical listener for `PutPipelineResponse` looks like:
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[put-pipeline-execute-listener]
--------------------------------------------------
<1> Called when the execution is successfully completed. The response is
provided as an argument
<2> Called in case of failure. The raised exception is provided as an argument
[[java-rest-high-cluster-put-pipeline-response]]
==== Put Pipeline Response
The returned `PutPipelineResponse` allows to retrieve information about the executed
operation as follows:
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[put-pipeline-response]
--------------------------------------------------
<1> Indicates whether all of the nodes have acknowledged the request

View File

@ -105,9 +105,11 @@ The Java High Level REST Client supports the following Cluster APIs:
* <<java-rest-high-cluster-put-settings>> * <<java-rest-high-cluster-put-settings>>
* <<java-rest-high-cluster-list-tasks>> * <<java-rest-high-cluster-list-tasks>>
* <<java-rest-high-cluster-put-pipeline>>
include::cluster/put_settings.asciidoc[] include::cluster/put_settings.asciidoc[]
include::cluster/list_tasks.asciidoc[] include::cluster/list_tasks.asciidoc[]
include::cluster/put_pipeline.asciidoc[]
== Snapshot APIs == Snapshot APIs

View File

@ -37,10 +37,12 @@ Response:
"ip_ranges": { "ip_ranges": {
"buckets" : [ "buckets" : [
{ {
"key": "*-10.0.0.5",
"to": "10.0.0.5", "to": "10.0.0.5",
"doc_count": 10 "doc_count": 10
}, },
{ {
"key": "10.0.0.5-*",
"from": "10.0.0.5", "from": "10.0.0.5",
"doc_count": 260 "doc_count": 260
} }

View File

@ -12,6 +12,9 @@
* Purely negative queries (only MUST_NOT clauses) now return a score of `0` * Purely negative queries (only MUST_NOT clauses) now return a score of `0`
rather than `1`. rather than `1`.
* The boundary specified using geohashes in the `geo_bounding_box` query
now include entire geohash cell, instead of just geohash center.
==== Adaptive replica selection enabled by default ==== Adaptive replica selection enabled by default
Adaptive replica selection has been enabled by default. If you wish to return to Adaptive replica selection has been enabled by default. If you wish to return to

View File

@ -231,6 +231,38 @@ GET /_search
-------------------------------------------------- --------------------------------------------------
// CONSOLE // CONSOLE
When geohashes are used to specify the bounding the edges of the
bounding box, the geohashes are treated as rectangles. The bounding
box is defined in such a way that its top left corresponds to the top
left corner of the geohash specified in the `top_left` parameter and
its bottom right is defined as the bottom right of the geohash
specified in the `bottom_right` parameter.
In order to specify a bounding box that would match entire area of a
geohash the geohash can be specified in both `top_left` and
`bottom_right` parameters:
[source,js]
--------------------------------------------------
GET /_search
{
"query": {
"geo_bounding_box" : {
"pin.location" : {
"top_left" : "dr",
"bottom_right" : "dr"
}
}
}
}
--------------------------------------------------
// CONSOLE
In this example, the geohash `dr` will produce the bounding box
query with the top left corner at `45.0,-78.75` and the bottom right
corner at `39.375,-67.5`.
[float] [float]
==== Vertices ==== Vertices

View File

@ -109,6 +109,12 @@ GET _search
-------------------------------------------------- --------------------------------------------------
// CONSOLE // CONSOLE
Note that if the date misses some of the year, month and day coordinates, the
missing parts are filled with the start of
https://en.wikipedia.org/wiki/Unix_time[unix time], which is January 1st, 1970.
This means, that when e.g. specifying `dd` as the format, a value like `"gte" : 10`
will translate to `1970-01-10T00:00:00.000Z`.
===== Time zone in range queries ===== Time zone in range queries
Dates can be converted from another timezone to UTC either by specifying the Dates can be converted from another timezone to UTC either by specifying the

View File

@ -0,0 +1,63 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.concurrent;
import java.util.concurrent.CompletableFuture;
import java.util.function.BiConsumer;
/**
* A thread-safe completable context that allows listeners to be attached. This class relies on the
* {@link CompletableFuture} for the concurrency logic. However, it does not accept {@link Throwable} as
* an exceptional result. This allows attaching listeners that only handle {@link Exception}.
*
* @param <T> the result type
*/
public class CompletableContext<T> {
private final CompletableFuture<T> completableFuture = new CompletableFuture<>();
public void addListener(BiConsumer<T, ? super Exception> listener) {
BiConsumer<T, Throwable> castThrowable = (v, t) -> {
if (t == null) {
listener.accept(v, null);
} else {
assert !(t instanceof Error) : "Cannot be error";
listener.accept(v, (Exception) t);
}
};
completableFuture.whenComplete(castThrowable);
}
public boolean isDone() {
return completableFuture.isDone();
}
public boolean isCompletedExceptionally() {
return completableFuture.isCompletedExceptionally();
}
public boolean completeExceptionally(Exception ex) {
return completableFuture.completeExceptionally(ex);
}
public boolean complete(T value) {
return completableFuture.complete(value);
}
}

View File

@ -33,6 +33,8 @@ publishing {
} }
dependencies { dependencies {
compile "org.elasticsearch:elasticsearch-core:${version}"
testCompile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}" testCompile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}"
testCompile "junit:junit:${versions.junit}" testCompile "junit:junit:${versions.junit}"
testCompile "org.hamcrest:hamcrest-all:${versions.hamcrest}" testCompile "org.hamcrest:hamcrest-all:${versions.hamcrest}"

View File

@ -28,7 +28,7 @@ public abstract class BytesWriteHandler implements ReadWriteHandler {
private static final List<FlushOperation> EMPTY_LIST = Collections.emptyList(); private static final List<FlushOperation> EMPTY_LIST = Collections.emptyList();
public WriteOperation createWriteOperation(SocketChannelContext context, Object message, BiConsumer<Void, Throwable> listener) { public WriteOperation createWriteOperation(SocketChannelContext context, Object message, BiConsumer<Void, Exception> listener) {
assert message instanceof ByteBuffer[] : "This channel only supports messages that are of type: " + ByteBuffer[].class assert message instanceof ByteBuffer[] : "This channel only supports messages that are of type: " + ByteBuffer[].class
+ ". Found type: " + message.getClass() + "."; + ". Found type: " + message.getClass() + ".";
return new FlushReadyWrite(context, (ByteBuffer[]) message, listener); return new FlushReadyWrite(context, (ByteBuffer[]) message, listener);

View File

@ -19,11 +19,12 @@
package org.elasticsearch.nio; package org.elasticsearch.nio;
import org.elasticsearch.common.concurrent.CompletableContext;
import java.io.IOException; import java.io.IOException;
import java.nio.channels.NetworkChannel; import java.nio.channels.NetworkChannel;
import java.nio.channels.SelectableChannel; import java.nio.channels.SelectableChannel;
import java.nio.channels.SelectionKey; import java.nio.channels.SelectionKey;
import java.util.concurrent.CompletableFuture;
import java.util.function.BiConsumer; import java.util.function.BiConsumer;
import java.util.function.Consumer; import java.util.function.Consumer;
@ -37,7 +38,7 @@ public abstract class ChannelContext<S extends SelectableChannel & NetworkChanne
protected final S rawChannel; protected final S rawChannel;
private final Consumer<Exception> exceptionHandler; private final Consumer<Exception> exceptionHandler;
private final CompletableFuture<Void> closeContext = new CompletableFuture<>(); private final CompletableContext<Void> closeContext = new CompletableContext<>();
private volatile SelectionKey selectionKey; private volatile SelectionKey selectionKey;
ChannelContext(S rawChannel, Consumer<Exception> exceptionHandler) { ChannelContext(S rawChannel, Consumer<Exception> exceptionHandler) {
@ -81,8 +82,8 @@ public abstract class ChannelContext<S extends SelectableChannel & NetworkChanne
* *
* @param listener to be called * @param listener to be called
*/ */
public void addCloseListener(BiConsumer<Void, Throwable> listener) { public void addCloseListener(BiConsumer<Void, Exception> listener) {
closeContext.whenComplete(listener); closeContext.addListener(listener);
} }
public boolean isOpen() { public boolean isOpen() {

View File

@ -25,13 +25,13 @@ import java.util.function.BiConsumer;
public class FlushOperation { public class FlushOperation {
private final BiConsumer<Void, Throwable> listener; private final BiConsumer<Void, Exception> listener;
private final ByteBuffer[] buffers; private final ByteBuffer[] buffers;
private final int[] offsets; private final int[] offsets;
private final int length; private final int length;
private int internalIndex; private int internalIndex;
public FlushOperation(ByteBuffer[] buffers, BiConsumer<Void, Throwable> listener) { public FlushOperation(ByteBuffer[] buffers, BiConsumer<Void, Exception> listener) {
this.listener = listener; this.listener = listener;
this.buffers = buffers; this.buffers = buffers;
this.offsets = new int[buffers.length]; this.offsets = new int[buffers.length];
@ -44,7 +44,7 @@ public class FlushOperation {
length = offset; length = offset;
} }
public BiConsumer<Void, Throwable> getListener() { public BiConsumer<Void, Exception> getListener() {
return listener; return listener;
} }

View File

@ -27,7 +27,7 @@ public class FlushReadyWrite extends FlushOperation implements WriteOperation {
private final SocketChannelContext channelContext; private final SocketChannelContext channelContext;
private final ByteBuffer[] buffers; private final ByteBuffer[] buffers;
FlushReadyWrite(SocketChannelContext channelContext, ByteBuffer[] buffers, BiConsumer<Void, Throwable> listener) { FlushReadyWrite(SocketChannelContext channelContext, ByteBuffer[] buffers, BiConsumer<Void, Exception> listener) {
super(buffers, listener); super(buffers, listener);
this.channelContext = channelContext; this.channelContext = channelContext;
this.buffers = buffers; this.buffers = buffers;

View File

@ -53,7 +53,7 @@ public abstract class NioChannel {
* *
* @param listener to be called at close * @param listener to be called at close
*/ */
public void addCloseListener(BiConsumer<Void, Throwable> listener) { public void addCloseListener(BiConsumer<Void, Exception> listener) {
getContext().addCloseListener(listener); getContext().addCloseListener(listener);
} }

View File

@ -60,7 +60,7 @@ public class NioSocketChannel extends NioChannel {
return remoteAddress; return remoteAddress;
} }
public void addConnectListener(BiConsumer<Void, Throwable> listener) { public void addConnectListener(BiConsumer<Void, Exception> listener) {
context.addConnectListener(listener); context.addConnectListener(listener);
} }

View File

@ -38,7 +38,7 @@ public interface ReadWriteHandler {
* @param listener the listener to be called when the message is sent * @param listener the listener to be called when the message is sent
* @return the write operation to be queued * @return the write operation to be queued
*/ */
WriteOperation createWriteOperation(SocketChannelContext context, Object message, BiConsumer<Void, Throwable> listener); WriteOperation createWriteOperation(SocketChannelContext context, Object message, BiConsumer<Void, Exception> listener);
/** /**
* This method is called on the event loop thread. It should serialize a write operation object to bytes * This method is called on the event loop thread. It should serialize a write operation object to bytes

View File

@ -19,6 +19,7 @@
package org.elasticsearch.nio; package org.elasticsearch.nio;
import org.elasticsearch.common.concurrent.CompletableContext;
import org.elasticsearch.nio.utils.ExceptionsHelper; import org.elasticsearch.nio.utils.ExceptionsHelper;
import java.io.IOException; import java.io.IOException;
@ -27,7 +28,6 @@ import java.nio.channels.ClosedChannelException;
import java.nio.channels.SocketChannel; import java.nio.channels.SocketChannel;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.LinkedList; import java.util.LinkedList;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.BiConsumer; import java.util.function.BiConsumer;
import java.util.function.Consumer; import java.util.function.Consumer;
@ -48,7 +48,7 @@ public abstract class SocketChannelContext extends ChannelContext<SocketChannel>
protected final AtomicBoolean isClosing = new AtomicBoolean(false); protected final AtomicBoolean isClosing = new AtomicBoolean(false);
private final ReadWriteHandler readWriteHandler; private final ReadWriteHandler readWriteHandler;
private final SocketSelector selector; private final SocketSelector selector;
private final CompletableFuture<Void> connectContext = new CompletableFuture<>(); private final CompletableContext<Void> connectContext = new CompletableContext<>();
private final LinkedList<FlushOperation> pendingFlushes = new LinkedList<>(); private final LinkedList<FlushOperation> pendingFlushes = new LinkedList<>();
private boolean ioException; private boolean ioException;
private boolean peerClosed; private boolean peerClosed;
@ -73,8 +73,8 @@ public abstract class SocketChannelContext extends ChannelContext<SocketChannel>
return channel; return channel;
} }
public void addConnectListener(BiConsumer<Void, Throwable> listener) { public void addConnectListener(BiConsumer<Void, Exception> listener) {
connectContext.whenComplete(listener); connectContext.addListener(listener);
} }
public boolean isConnectComplete() { public boolean isConnectComplete() {
@ -121,7 +121,7 @@ public abstract class SocketChannelContext extends ChannelContext<SocketChannel>
return isConnected; return isConnected;
} }
public void sendMessage(Object message, BiConsumer<Void, Throwable> listener) { public void sendMessage(Object message, BiConsumer<Void, Exception> listener) {
if (isClosing.get()) { if (isClosing.get()) {
listener.accept(null, new ClosedChannelException()); listener.accept(null, new ClosedChannelException());
return; return;

View File

@ -138,7 +138,7 @@ public class SocketSelector extends ESSelector {
* @param listener to be executed * @param listener to be executed
* @param value to provide to listener * @param value to provide to listener
*/ */
public <V> void executeListener(BiConsumer<V, Throwable> listener, V value) { public <V> void executeListener(BiConsumer<V, Exception> listener, V value) {
assertOnSelectorThread(); assertOnSelectorThread();
try { try {
listener.accept(value, null); listener.accept(value, null);
@ -154,7 +154,7 @@ public class SocketSelector extends ESSelector {
* @param listener to be executed * @param listener to be executed
* @param exception to provide to listener * @param exception to provide to listener
*/ */
public <V> void executeFailedListener(BiConsumer<V, Throwable> listener, Exception exception) { public <V> void executeFailedListener(BiConsumer<V, Exception> listener, Exception exception) {
assertOnSelectorThread(); assertOnSelectorThread();
try { try {
listener.accept(null, exception); listener.accept(null, exception);

View File

@ -27,7 +27,7 @@ import java.util.function.BiConsumer;
*/ */
public interface WriteOperation { public interface WriteOperation {
BiConsumer<Void, Throwable> getListener(); BiConsumer<Void, Exception> getListener();
SocketChannelContext getChannel(); SocketChannelContext getChannel();

View File

@ -45,7 +45,7 @@ public class BytesChannelContextTests extends ESTestCase {
private BytesChannelContext context; private BytesChannelContext context;
private InboundChannelBuffer channelBuffer; private InboundChannelBuffer channelBuffer;
private SocketSelector selector; private SocketSelector selector;
private BiConsumer<Void, Throwable> listener; private BiConsumer<Void, Exception> listener;
private int messageLength; private int messageLength;
@Before @Before
@ -191,7 +191,7 @@ public class BytesChannelContextTests extends ESTestCase {
public void testMultipleWritesPartialFlushes() throws IOException { public void testMultipleWritesPartialFlushes() throws IOException {
assertFalse(context.readyForFlush()); assertFalse(context.readyForFlush());
BiConsumer<Void, Throwable> listener2 = mock(BiConsumer.class); BiConsumer<Void, Exception> listener2 = mock(BiConsumer.class);
FlushReadyWrite flushOperation1 = mock(FlushReadyWrite.class); FlushReadyWrite flushOperation1 = mock(FlushReadyWrite.class);
FlushReadyWrite flushOperation2 = mock(FlushReadyWrite.class); FlushReadyWrite flushOperation2 = mock(FlushReadyWrite.class);
when(flushOperation1.getBuffersToWrite()).thenReturn(new ByteBuffer[0]); when(flushOperation1.getBuffersToWrite()).thenReturn(new ByteBuffer[0]);

View File

@ -83,7 +83,7 @@ public class ChannelContextTests extends ESTestCase {
if (t == null) { if (t == null) {
throw new AssertionError("Close should not fail"); throw new AssertionError("Close should not fail");
} else { } else {
exception.set((Exception) t); exception.set(t);
} }
}); });

View File

@ -31,7 +31,7 @@ import static org.mockito.Mockito.mock;
public class FlushOperationTests extends ESTestCase { public class FlushOperationTests extends ESTestCase {
private BiConsumer<Void, Throwable> listener; private BiConsumer<Void, Exception> listener;
@Before @Before
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")

View File

@ -50,7 +50,7 @@ public class SocketChannelContextTests extends ESTestCase {
private TestSocketChannelContext context; private TestSocketChannelContext context;
private Consumer<Exception> exceptionHandler; private Consumer<Exception> exceptionHandler;
private NioSocketChannel channel; private NioSocketChannel channel;
private BiConsumer<Void, Throwable> listener; private BiConsumer<Void, Exception> listener;
private SocketSelector selector; private SocketSelector selector;
private ReadWriteHandler readWriteHandler; private ReadWriteHandler readWriteHandler;
@ -125,7 +125,7 @@ public class SocketChannelContextTests extends ESTestCase {
if (t == null) { if (t == null) {
throw new AssertionError("Connection should not succeed"); throw new AssertionError("Connection should not succeed");
} else { } else {
exception.set((Exception) t); exception.set(t);
} }
}); });
@ -206,7 +206,7 @@ public class SocketChannelContextTests extends ESTestCase {
ByteBuffer[] buffer = {ByteBuffer.allocate(10)}; ByteBuffer[] buffer = {ByteBuffer.allocate(10)};
WriteOperation writeOperation = mock(WriteOperation.class); WriteOperation writeOperation = mock(WriteOperation.class);
BiConsumer<Void, Throwable> listener2 = mock(BiConsumer.class); BiConsumer<Void, Exception> listener2 = mock(BiConsumer.class);
when(readWriteHandler.writeToBytes(writeOperation)).thenReturn(Arrays.asList(new FlushOperation(buffer, listener), when(readWriteHandler.writeToBytes(writeOperation)).thenReturn(Arrays.asList(new FlushOperation(buffer, listener),
new FlushOperation(buffer, listener2))); new FlushOperation(buffer, listener2)));
context.queueWriteOperation(writeOperation); context.queueWriteOperation(writeOperation);
@ -232,7 +232,7 @@ public class SocketChannelContextTests extends ESTestCase {
ByteBuffer[] buffer = {ByteBuffer.allocate(10)}; ByteBuffer[] buffer = {ByteBuffer.allocate(10)};
BiConsumer<Void, Throwable> listener2 = mock(BiConsumer.class); BiConsumer<Void, Exception> listener2 = mock(BiConsumer.class);
assertFalse(context.readyForFlush()); assertFalse(context.readyForFlush());
when(channel.isOpen()).thenReturn(true); when(channel.isOpen()).thenReturn(true);

View File

@ -50,7 +50,7 @@ public class SocketSelectorTests extends ESTestCase {
private NioSocketChannel channel; private NioSocketChannel channel;
private TestSelectionKey selectionKey; private TestSelectionKey selectionKey;
private SocketChannelContext channelContext; private SocketChannelContext channelContext;
private BiConsumer<Void, Throwable> listener; private BiConsumer<Void, Exception> listener;
private ByteBuffer[] buffers = {ByteBuffer.allocate(1)}; private ByteBuffer[] buffers = {ByteBuffer.allocate(1)};
private Selector rawSelector; private Selector rawSelector;

View File

@ -20,26 +20,21 @@
package org.elasticsearch.transport.netty4; package org.elasticsearch.transport.netty4;
import io.netty.channel.Channel; import io.netty.channel.Channel;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelOption; import io.netty.channel.ChannelOption;
import io.netty.channel.ChannelPromise; import io.netty.channel.ChannelPromise;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListener;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.concurrent.CompletableContext;
import org.elasticsearch.transport.TcpChannel; import org.elasticsearch.transport.TcpChannel;
import org.elasticsearch.transport.TransportException; import org.elasticsearch.transport.TransportException;
import java.net.InetSocketAddress; import java.net.InetSocketAddress;
import java.nio.channels.ClosedSelectorException;
import java.util.concurrent.CompletableFuture;
public class NettyTcpChannel implements TcpChannel { public class NettyTcpChannel implements TcpChannel {
private final Channel channel; private final Channel channel;
private final String profile; private final String profile;
private final CompletableFuture<Void> closeContext = new CompletableFuture<>(); private final CompletableContext<Void> closeContext = new CompletableContext<>();
NettyTcpChannel(Channel channel, String profile) { NettyTcpChannel(Channel channel, String profile) {
this.channel = channel; this.channel = channel;
@ -51,9 +46,9 @@ public class NettyTcpChannel implements TcpChannel {
Throwable cause = f.cause(); Throwable cause = f.cause();
if (cause instanceof Error) { if (cause instanceof Error) {
Netty4Utils.maybeDie(cause); Netty4Utils.maybeDie(cause);
closeContext.completeExceptionally(cause); closeContext.completeExceptionally(new Exception(cause));
} else { } else {
closeContext.completeExceptionally(cause); closeContext.completeExceptionally((Exception) cause);
} }
} }
}); });
@ -71,7 +66,7 @@ public class NettyTcpChannel implements TcpChannel {
@Override @Override
public void addCloseListener(ActionListener<Void> listener) { public void addCloseListener(ActionListener<Void> listener) {
closeContext.whenComplete(ActionListener.toBiConsumer(listener)); closeContext.addListener(ActionListener.toBiConsumer(listener));
} }
@Override @Override

View File

@ -96,7 +96,7 @@ public class HttpReadWriteHandler implements ReadWriteHandler {
} }
@Override @Override
public WriteOperation createWriteOperation(SocketChannelContext context, Object message, BiConsumer<Void, Throwable> listener) { public WriteOperation createWriteOperation(SocketChannelContext context, Object message, BiConsumer<Void, Exception> listener) {
assert message instanceof NioHttpResponse : "This channel only supports messages that are of type: " assert message instanceof NioHttpResponse : "This channel only supports messages that are of type: "
+ NioHttpResponse.class + ". Found type: " + message.getClass() + "."; + NioHttpResponse.class + ". Found type: " + message.getClass() + ".";
return new HttpWriteOperation(context, (NioHttpResponse) message, listener); return new HttpWriteOperation(context, (NioHttpResponse) message, listener);

View File

@ -28,16 +28,16 @@ public class HttpWriteOperation implements WriteOperation {
private final SocketChannelContext channelContext; private final SocketChannelContext channelContext;
private final NioHttpResponse response; private final NioHttpResponse response;
private final BiConsumer<Void, Throwable> listener; private final BiConsumer<Void, Exception> listener;
HttpWriteOperation(SocketChannelContext channelContext, NioHttpResponse response, BiConsumer<Void, Throwable> listener) { HttpWriteOperation(SocketChannelContext channelContext, NioHttpResponse response, BiConsumer<Void, Exception> listener) {
this.channelContext = channelContext; this.channelContext = channelContext;
this.response = response; this.response = response;
this.listener = listener; this.listener = listener;
} }
@Override @Override
public BiConsumer<Void, Throwable> getListener() { public BiConsumer<Void, Exception> getListener() {
return listener; return listener;
} }

View File

@ -36,7 +36,7 @@ import java.util.function.BiConsumer;
* complete that promise when accept is called. It delegates the normal promise methods to the underlying * complete that promise when accept is called. It delegates the normal promise methods to the underlying
* promise. * promise.
*/ */
public class NettyListener implements BiConsumer<Void, Throwable>, ChannelPromise { public class NettyListener implements BiConsumer<Void, Exception>, ChannelPromise {
private final ChannelPromise promise; private final ChannelPromise promise;
@ -45,11 +45,11 @@ public class NettyListener implements BiConsumer<Void, Throwable>, ChannelPromis
} }
@Override @Override
public void accept(Void v, Throwable throwable) { public void accept(Void v, Exception exception) {
if (throwable == null) { if (exception == null) {
promise.setSuccess(); promise.setSuccess();
} else { } else {
promise.setFailure(throwable); promise.setFailure(exception);
} }
} }
@ -212,17 +212,22 @@ public class NettyListener implements BiConsumer<Void, Throwable>, ChannelPromis
return promise.unvoid(); return promise.unvoid();
} }
public static NettyListener fromBiConsumer(BiConsumer<Void, Throwable> biConsumer, Channel channel) { public static NettyListener fromBiConsumer(BiConsumer<Void, Exception> biConsumer, Channel channel) {
if (biConsumer instanceof NettyListener) { if (biConsumer instanceof NettyListener) {
return (NettyListener) biConsumer; return (NettyListener) biConsumer;
} else { } else {
ChannelPromise channelPromise = channel.newPromise(); ChannelPromise channelPromise = channel.newPromise();
channelPromise.addListener(f -> { channelPromise.addListener(f -> {
if (f.cause() == null) { Throwable cause = f.cause();
if (cause == null) {
biConsumer.accept(null, null); biConsumer.accept(null, null);
} else { } else {
ExceptionsHelper.dieOnError(f.cause()); if (cause instanceof Error) {
biConsumer.accept(null, f.cause()); ExceptionsHelper.dieOnError(cause);
biConsumer.accept(null, new Exception(cause));
} else {
biConsumer.accept(null, (Exception) cause);
}
} }
}); });

View File

@ -120,7 +120,7 @@ public class NioHttpChannel extends AbstractRestChannel {
toClose.add(nioChannel::close); toClose.add(nioChannel::close);
} }
BiConsumer<Void, Throwable> listener = (aVoid, throwable) -> Releasables.close(toClose); BiConsumer<Void, Exception> listener = (aVoid, ex) -> Releasables.close(toClose);
nioChannel.getContext().sendMessage(new NioHttpResponse(sequence, resp), listener); nioChannel.getContext().sendMessage(new NioHttpResponse(sequence, resp), listener);
success = true; success = true;
} finally { } finally {

View File

@ -31,6 +31,3 @@ dependencies {
* and will be fixed later. * and will be fixed later.
* Tracked by https://github.com/elastic/elasticsearch/issues/30628 * Tracked by https://github.com/elastic/elasticsearch/issues/30628
*/ */
if ("zip".equals(integTestCluster.distribution)) {
integTestRunner.enabled = false
}

View File

@ -1,159 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.rankeval;
import org.elasticsearch.index.rankeval.RankEvalSpec.ScriptWithId;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptType;
import org.elasticsearch.test.ESIntegTestCase;
import org.junit.Before;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
public class SmokeMultipleTemplatesIT extends ESIntegTestCase {
private static final String MATCH_TEMPLATE = "match_template";
@Override
protected Collection<Class<? extends Plugin>> transportClientPlugins() {
return Arrays.asList(RankEvalPlugin.class);
}
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return Arrays.asList(RankEvalPlugin.class);
}
@Before
public void setup() {
createIndex("test");
ensureGreen();
client().prepareIndex("test", "testtype").setId("1")
.setSource("text", "berlin", "title", "Berlin, Germany").get();
client().prepareIndex("test", "testtype").setId("2")
.setSource("text", "amsterdam").get();
client().prepareIndex("test", "testtype").setId("3")
.setSource("text", "amsterdam").get();
client().prepareIndex("test", "testtype").setId("4")
.setSource("text", "amsterdam").get();
client().prepareIndex("test", "testtype").setId("5")
.setSource("text", "amsterdam").get();
client().prepareIndex("test", "testtype").setId("6")
.setSource("text", "amsterdam").get();
refresh();
}
public void testPrecisionAtRequest() throws IOException {
List<RatedRequest> specifications = new ArrayList<>();
Map<String, Object> ams_params = new HashMap<>();
ams_params.put("querystring", "amsterdam");
RatedRequest amsterdamRequest = new RatedRequest(
"amsterdam_query", createRelevant("2", "3", "4", "5"), ams_params, MATCH_TEMPLATE);
specifications.add(amsterdamRequest);
Map<String, Object> berlin_params = new HashMap<>();
berlin_params.put("querystring", "berlin");
RatedRequest berlinRequest = new RatedRequest(
"berlin_query", createRelevant("1"), berlin_params, MATCH_TEMPLATE);
specifications.add(berlinRequest);
PrecisionAtK metric = new PrecisionAtK();
ScriptWithId template =
new ScriptWithId(
MATCH_TEMPLATE,
new Script(
ScriptType.INLINE,
"mustache", "{\"query\": {\"match\": {\"text\": \"{{querystring}}\"}}}",
new HashMap<>()));
Set<ScriptWithId> templates = new HashSet<>();
templates.add(template);
RankEvalSpec task = new RankEvalSpec(specifications, metric, templates);
RankEvalRequestBuilder builder = new RankEvalRequestBuilder(client(), RankEvalAction.INSTANCE, new RankEvalRequest());
builder.setRankEvalSpec(task);
RankEvalResponse response = client().execute(RankEvalAction.INSTANCE, builder.request().indices("test")).actionGet();
assertEquals(0.9, response.getEvaluationResult(), Double.MIN_VALUE);
}
public void testTemplateWithAggsFails() {
String template = "{ \"aggs\" : { \"avg_grade\" : { \"avg\" : { \"field\" : \"grade\" }}}}";
assertTemplatedRequestFailures(template, "Query in rated requests should not contain aggregations.");
}
public void testTemplateWithSuggestFails() {
String template = "{\"suggest\" : {\"my-suggestion\" : {\"text\" : \"Elastic\",\"term\" : {\"field\" : \"message\"}}}}";
assertTemplatedRequestFailures(template, "Query in rated requests should not contain a suggest section.");
}
public void testTemplateWithHighlighterFails() {
String template = "{\"highlight\" : { \"fields\" : {\"content\" : {}}}}";
assertTemplatedRequestFailures(template, "Query in rated requests should not contain a highlighter section.");
}
public void testTemplateWithProfileFails() {
String template = "{\"profile\" : \"true\" }";
assertTemplatedRequestFailures(template, "Query in rated requests should not use profile.");
}
public void testTemplateWithExplainFails() {
String template = "{\"explain\" : \"true\" }";
assertTemplatedRequestFailures(template, "Query in rated requests should not use explain.");
}
private static void assertTemplatedRequestFailures(String template, String expectedMessage) {
List<RatedDocument> ratedDocs = Arrays.asList(new RatedDocument("index1", "id1", 1));
RatedRequest ratedRequest = new RatedRequest("id", ratedDocs, Collections.singletonMap("param1", "value1"), "templateId");
Collection<ScriptWithId> templates = Collections.singletonList(new ScriptWithId("templateId",
new Script(ScriptType.INLINE, Script.DEFAULT_TEMPLATE_LANG, template, Collections.emptyMap())));
RankEvalSpec rankEvalSpec = new RankEvalSpec(Collections.singletonList(ratedRequest), new PrecisionAtK(), templates);
RankEvalRequest rankEvalRequest = new RankEvalRequest(rankEvalSpec, new String[] { "test" });
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> client().execute(RankEvalAction.INSTANCE, rankEvalRequest).actionGet());
assertEquals(expectedMessage, e.getMessage());
}
private static List<RatedDocument> createRelevant(String... docs) {
List<RatedDocument> relevant = new ArrayList<>();
for (String doc : docs) {
relevant.add(new RatedDocument("test", doc, Rating.RELEVANT.ordinal()));
}
return relevant;
}
public enum Rating {
IRRELEVANT, RELEVANT;
}
}

View File

@ -0,0 +1,171 @@
setup:
- do:
indices.create:
index: test
body:
settings:
index:
number_of_shards: 1
- do:
index:
index: test
type: _doc
id: 1
body: { "text": "berlin", "title" : "Berlin, Germany" }
- do:
index:
index: test
type: _doc
id: 2
body: { "text": "amsterdam" }
- do:
index:
index: test
type: _doc
id: 3
body: { "text": "amsterdam" }
- do:
index:
index: test
type: _doc
id: 4
body: { "text": "amsterdam" }
- do:
index:
index: test
type: _doc
id: 5
body: { "text": "amsterdam" }
- do:
index:
index: test
type: _doc
id: 6
body: { "text": "amsterdam" }
- do:
indices.refresh: {}
---
"Basic rank-eval request with template":
- skip:
version: " - 6.1.99"
reason: the ranking evaluation feature is available since 6.2
- do:
rank_eval:
body: {
"templates": [ { "id": "match", "template": {"source": "{\"query\": { \"match\" : {\"text\" : \"{{query_string}}\" }}}" }} ],
"requests" : [
{
"id": "amsterdam_query",
"params": { "query_string": "amsterdam" },
"template_id": "match",
"ratings": [
{"_index": "test", "_id": "2", "rating": 1},
{"_index": "test", "_id": "3", "rating": 1},
{"_index": "test", "_id": "4", "rating": 1},
{"_index": "test", "_id": "5", "rating": 1},]
},
{
"id" : "berlin_query",
"params": { "query_string": "berlin" },
"template_id": "match",
"ratings": [{"_index": "test", "_id": "1", "rating": 1}]
}
],
"metric" : { "precision": { }}
}
- match: {quality_level: 0.9}
- match: {details.amsterdam_query.unknown_docs.0._id: "6"}
---
"Test illegal request parts":
- do:
catch: /Query in rated requests should not contain aggregations./
rank_eval:
body: {
"templates": [ { "id": "match", "template": {"source": "{ \"aggs\" : { \"avg_grade\" : { \"avg\" : { \"field\" : \"grade\" }}}}" }} ],
"requests" : [
{
"id": "amsterdam_query",
"params": { "query_string": "amsterdam" },
"template_id": "match",
"ratings": []
}
],
"metric" : { "precision": { }}
}
- do:
catch: /Query in rated requests should not contain a suggest section./
rank_eval:
body: {
"templates": [ { "id": "match", "template": {"source": "{\"suggest\" : {\"my-suggestion\" : {\"text\" : \"Elastic\",\"term\" : {\"field\" : \"message\"}}}}" }} ],
"requests" : [
{
"id": "amsterdam_query",
"params": { "query_string": "amsterdam" },
"template_id": "match",
"ratings": []
}
],
"metric" : { "precision": { }}
}
- do:
catch: /Query in rated requests should not contain a highlighter section./
rank_eval:
body: {
"templates": [ { "id": "match", "template": {"source": "{\"highlight\" : { \"fields\" : {\"content\" : {}}}}" }} ],
"requests" : [
{
"id": "amsterdam_query",
"params": { "query_string": "amsterdam" },
"template_id": "match",
"ratings": []
}
],
"metric" : { "precision": { }}
}
- do:
catch: /Query in rated requests should not use profile./
rank_eval:
body: {
"templates": [ { "id": "match", "template": {"source": "{\"profile\" : \"true\" }" }} ],
"requests" : [
{
"id": "amsterdam_query",
"params": { "query_string": "amsterdam" },
"template_id": "match",
"ratings": []
}
],
"metric" : { "precision": { }}
}
- do:
catch: /Query in rated requests should not use explain./
rank_eval:
body: {
"templates": [ { "id": "match", "template": {"source": "{\"explain\" : \"true\" }" }} ],
"requests" : [
{
"id": "amsterdam_query",
"params": { "query_string": "amsterdam" },
"template_id": "match",
"ratings": []
}
],
"metric" : { "precision": { }}
}

View File

@ -1,72 +0,0 @@
---
"Template request":
- skip:
version: " - 6.1.99"
reason: the ranking evaluation feature is available since 6.2
- do:
indices.create:
index: foo
body:
settings:
index:
number_of_shards: 1
- do:
index:
index: foo
type: bar
id: doc1
body: { "text": "berlin" }
- do:
index:
index: foo
type: bar
id: doc2
body: { "text": "amsterdam" }
- do:
index:
index: foo
type: bar
id: doc3
body: { "text": "amsterdam" }
- do:
index:
index: foo
type: bar
id: doc4
body: { "text": "something about amsterdam and berlin" }
- do:
indices.refresh: {}
- do:
rank_eval:
body: {
"templates": [ { "id": "match", "template": {"source": "{\"query\": { \"match\" : {\"text\" : \"{{query_string}}\" }}}" }} ],
"requests" : [
{
"id": "amsterdam_query",
"params": { "query_string": "amsterdam" },
"template_id": "match",
"ratings": [
{"_index": "foo", "_id": "doc1", "rating": 0},
{"_index": "foo", "_id": "doc2", "rating": 1},
{"_index": "foo", "_id": "doc3", "rating": 1}]
},
{
"id" : "berlin_query",
"params": { "query_string": "berlin" },
"template_id": "match",
"ratings": [{"_index": "foo", "_id": "doc1", "rating": 1}]
}
],
"metric" : { "precision": { }}
}
- match: {quality_level: 0.5833333333333333}
- match: {details.berlin_query.unknown_docs.0._id: "doc4"}
- match: {details.amsterdam_query.unknown_docs.0._id: "doc4"}

119
qa/vagrant/README.md Normal file
View File

@ -0,0 +1,119 @@
# packaging tests
This project contains tests that verify the distributions we build work
correctly on the operating systems we support. They're intended to cover the
steps a user would take when installing and configuring an Elasticsearch
distribution. They're not intended to have significant coverage of the behavior
of Elasticsearch's features.
There are two types of tests in this project. The old tests live in
`src/test/` and are written in [Bats](https://github.com/sstephenson/bats),
which is a flavor of bash scripts that run as unit tests. These tests are
deprecated because Bats is unmaintained and cannot run on Windows.
The new tests live in `src/main/` and are written in Java. Like the old tests,
this project's tests are run inside the VM, not on your host. All new packaging
tests should be added to this set of tests if possible.
## Running these tests
See the section in [TESTING.asciidoc](../../TESTING.asciidoc#testing-packaging)
## Adding a new test class
When gradle runs the packaging tests on a VM, it runs the full suite by
default. To add a test class to the suite, add its `class` to the
`@SuiteClasses` annotation in [PackagingTests.java](src/main/java/org/elasticsearch/packaging/PackagingTests.java).
If a test class is added to the project but not to this annotation, it will not
run in CI jobs. The test classes are run in the order they are listed in the
annotation.
## Choosing which distributions to test
Distributions are represented by [enum values](src/main/java/org/elasticsearch/packaging/util/Distribution.java)
which know if they are compatible with the platform the tests are currently
running on. To skip a test if the distribution it's using isn't compatible with
the current platform, put this [assumption](https://github.com/junit-team/junit4/wiki/assumptions-with-assume)
in your test method or in a `@Before` method
```java
assumeTrue(distribution.packaging.compatible);
```
Similarly if you write a test that is intended only for particular platforms,
you can make an assumption using the constants and methods in [Platforms.java](src/main/java/org/elasticsearch/packaging/util/Platforms.java)
```java
assumeTrue("only run on windows", Platforms.WINDOWS);
assumeTrue("only run if using systemd", Platforms.isSystemd());
```
## Writing a test that covers multiple distributions
It seems like the way to do this that makes it the most straightforward to run
and reproduce specific test cases is to create a test case class with an
abstract method that provides the distribution
```java
public abstract class MyTestCase {
@Test
public void myTest() { /* do something with the value of #distribution() */ }
abstract Distribution distribution();
}
```
and then for each distribution you want to test, create a subclass
```java
public class MyTestDefaultTar extends MyTestCase {
@Override
Distribution distribution() { return Distribution.DEFAULT_TAR; }
}
```
That way when a test fails the user gets told explicitly that `MyTestDefaultTar`
failed, and to reproduce it they should run that class. See [ArchiveTestCase](src/main/java/org/elasticsearch/packaging/test/ArchiveTestCase.java)
and its children for an example of this.
## Running external commands
In general it's probably best to avoid running external commands when a good
Java alternative exists. For example most filesystem operations can be done with
the java.nio.file APIs. For those that aren't, use an instance of [Shell](src/main/java/org/elasticsearch/packaging/util/Shell.java)
Despite the name, commands run with this class are not run in a shell, and any
familiar features of shells like variables or expansion won't work.
If you do need the shell, you must explicitly invoke the shell's command. For
example to run a command with Bash, use the `bash -c command` syntax. Note that
the entire script must be in a single string argument
```java
Shell sh = new Shell();
sh.run("bash", "-c", "echo $foo; echo $bar");
```
Similary for powershell - again, the entire powershell script must go in a
single string argument
```java
sh.run("powershell.exe", "-Command", "Write-Host $foo; Write-Host $bar");
```
On Linux, most commands you'll want to use will be executable files and will
work fine without a shell
```java
sh.run("tar", "-xzpf", "elasticsearch-6.1.0.tar.gz");
```
On Windows you'll mostly want to use powershell as it can do a lot more and
gives much better feedback than Windows' legacy command line. Unfortunately that
means that you'll need to use the `powershell.exe -Command` syntax as
powershell's [Cmdlets](https://msdn.microsoft.com/en-us/library/ms714395.aspx)
don't correspond to executable files and are not runnable by `Runtime` directly.
When writing powershell commands this way, make sure to test them as some types
of formatting can cause it to return a successful exit code but not run
anything.

View File

@ -31,8 +31,7 @@ import org.elasticsearch.packaging.util.Installation;
import static org.elasticsearch.packaging.util.Cleanup.cleanEverything; import static org.elasticsearch.packaging.util.Cleanup.cleanEverything;
import static org.elasticsearch.packaging.util.Archives.installArchive; import static org.elasticsearch.packaging.util.Archives.installArchive;
import static org.elasticsearch.packaging.util.Archives.verifyArchiveInstallation; import static org.elasticsearch.packaging.util.Archives.verifyArchiveInstallation;
import static org.hamcrest.CoreMatchers.is; import static org.junit.Assume.assumeTrue;
import static org.junit.Assume.assumeThat;
/** /**
* Tests that apply to the archive distributions (tar, zip). To add a case for a distribution, subclass and * Tests that apply to the archive distributions (tar, zip). To add a case for a distribution, subclass and
@ -54,7 +53,7 @@ public abstract class ArchiveTestCase {
@Before @Before
public void onlyCompatibleDistributions() { public void onlyCompatibleDistributions() {
assumeThat(distribution().packaging.compatible, is(true)); assumeTrue("only compatible distributions", distribution().packaging.compatible);
} }
@Test @Test

View File

@ -144,25 +144,18 @@ setup:
- length: { aggregations.ip_range.buckets: 3 } - length: { aggregations.ip_range.buckets: 3 }
# ip_range does not automatically add keys to buckets, see #21045
# - match: { aggregations.ip_range.buckets.0.key: "*-192.168.0.0" }
- is_false: aggregations.ip_range.buckets.0.from - is_false: aggregations.ip_range.buckets.0.from
- match: { aggregations.ip_range.buckets.0.to: "192.168.0.0" } - match: { aggregations.ip_range.buckets.0.to: "192.168.0.0" }
- match: { aggregations.ip_range.buckets.0.doc_count: 1 } - match: { aggregations.ip_range.buckets.0.doc_count: 1 }
# - match: { aggregations.ip_range.buckets.1.key: "192.168.0.0-192.169.0.0" }
- match: { aggregations.ip_range.buckets.1.from: "192.168.0.0" } - match: { aggregations.ip_range.buckets.1.from: "192.168.0.0" }
- match: { aggregations.ip_range.buckets.1.to: "192.169.0.0" } - match: { aggregations.ip_range.buckets.1.to: "192.169.0.0" }
- match: { aggregations.ip_range.buckets.1.doc_count: 2 } - match: { aggregations.ip_range.buckets.1.doc_count: 2 }
# - match: { aggregations.ip_range.buckets.2.key: "192.169.0.0-*" }
- match: { aggregations.ip_range.buckets.2.from: "192.169.0.0" } - match: { aggregations.ip_range.buckets.2.from: "192.169.0.0" }
- is_false: aggregations.ip_range.buckets.2.to - is_false: aggregations.ip_range.buckets.2.to
@ -177,24 +170,18 @@ setup:
- length: { aggregations.ip_range.buckets: 3 } - length: { aggregations.ip_range.buckets: 3 }
# - match: { aggregations.ip_range.buckets.0.key: "*-192.168.0.0" }
- is_false: aggregations.ip_range.buckets.0.from - is_false: aggregations.ip_range.buckets.0.from
- match: { aggregations.ip_range.buckets.0.to: "192.168.0.0" } - match: { aggregations.ip_range.buckets.0.to: "192.168.0.0" }
- match: { aggregations.ip_range.buckets.0.doc_count: 1 } - match: { aggregations.ip_range.buckets.0.doc_count: 1 }
# - match: { aggregations.ip_range.buckets.1.key: "192.168.0.0-192.169.0.0" }
- match: { aggregations.ip_range.buckets.1.from: "192.168.0.0" } - match: { aggregations.ip_range.buckets.1.from: "192.168.0.0" }
- match: { aggregations.ip_range.buckets.1.to: "192.169.0.0" } - match: { aggregations.ip_range.buckets.1.to: "192.169.0.0" }
- match: { aggregations.ip_range.buckets.1.doc_count: 2 } - match: { aggregations.ip_range.buckets.1.doc_count: 2 }
# - match: { aggregations.ip_range.buckets.2.key: "192.169.0.0-*" }
- match: { aggregations.ip_range.buckets.2.from: "192.169.0.0" } - match: { aggregations.ip_range.buckets.2.from: "192.169.0.0" }
- is_false: aggregations.ip_range.buckets.2.to - is_false: aggregations.ip_range.buckets.2.to
@ -223,6 +210,21 @@ setup:
- match: { aggregations.ip_range.buckets.1.doc_count: 2 } - match: { aggregations.ip_range.buckets.1.doc_count: 2 }
---
"IP Range Key Generation":
- skip:
version: " - 6.3.99"
reason: "Before 6.4.0, ip_range did not always generate bucket keys (see #21045)."
- do:
search:
body: { "size" : 0, "aggs" : { "ip_range" : { "ip_range" : { "field" : "ip", "ranges": [ { "to": "192.168.0.0" }, { "from": "192.168.0.0", "to": "192.169.0.0" }, { "from": "192.169.0.0" } ] } } } }
- length: { aggregations.ip_range.buckets: 3 }
- match: { aggregations.ip_range.buckets.0.key: "*-192.168.0.0" }
- match: { aggregations.ip_range.buckets.1.key: "192.168.0.0-192.169.0.0" }
- match: { aggregations.ip_range.buckets.2.key: "192.169.0.0-*" }
--- ---
"Date range": "Date range":
- do: - do:

View File

@ -90,18 +90,12 @@ public interface ActionListener<Response> {
* @param <Response> the type of the response * @param <Response> the type of the response
* @return a bi consumer that will complete the wrapped listener * @return a bi consumer that will complete the wrapped listener
*/ */
static <Response> BiConsumer<Response, Throwable> toBiConsumer(ActionListener<Response> listener) { static <Response> BiConsumer<Response, Exception> toBiConsumer(ActionListener<Response> listener) {
return (response, throwable) -> { return (response, throwable) -> {
if (throwable == null) { if (throwable == null) {
listener.onResponse(response); listener.onResponse(response);
} else { } else {
if (throwable instanceof Exception) { listener.onFailure(throwable);
listener.onFailure((Exception) throwable);
} else if (throwable instanceof Error) {
throw (Error) throwable;
} else {
throw new AssertionError("Should have been either Error or Exception", throwable);
}
} }
}; };
} }

View File

@ -23,6 +23,7 @@ import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.OriginalIndices;
import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.action.support.HandledTransportAction;
import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.cluster.service.ClusterService;
@ -33,10 +34,6 @@ import org.elasticsearch.common.util.concurrent.CountDown;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.RemoteClusterAware; import org.elasticsearch.transport.RemoteClusterAware;
import org.elasticsearch.transport.RemoteClusterService; import org.elasticsearch.transport.RemoteClusterService;
import org.elasticsearch.transport.Transport;
import org.elasticsearch.transport.TransportException;
import org.elasticsearch.transport.TransportRequestOptions;
import org.elasticsearch.transport.TransportResponseHandler;
import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.TransportService;
import java.util.ArrayList; import java.util.ArrayList;
@ -49,7 +46,6 @@ public class TransportFieldCapabilitiesAction extends HandledTransportAction<Fie
private final ClusterService clusterService; private final ClusterService clusterService;
private final TransportFieldCapabilitiesIndexAction shardAction; private final TransportFieldCapabilitiesIndexAction shardAction;
private final RemoteClusterService remoteClusterService; private final RemoteClusterService remoteClusterService;
private final TransportService transportService;
@Inject @Inject
public TransportFieldCapabilitiesAction(Settings settings, TransportService transportService, public TransportFieldCapabilitiesAction(Settings settings, TransportService transportService,
@ -62,7 +58,6 @@ public class TransportFieldCapabilitiesAction extends HandledTransportAction<Fie
actionFilters, indexNameExpressionResolver, FieldCapabilitiesRequest::new); actionFilters, indexNameExpressionResolver, FieldCapabilitiesRequest::new);
this.clusterService = clusterService; this.clusterService = clusterService;
this.remoteClusterService = transportService.getRemoteClusterService(); this.remoteClusterService = transportService.getRemoteClusterService();
this.transportService = transportService;
this.shardAction = shardAction; this.shardAction = shardAction;
} }
@ -118,48 +113,21 @@ public class TransportFieldCapabilitiesAction extends HandledTransportAction<Fie
for (Map.Entry<String, OriginalIndices> remoteIndices : remoteClusterIndices.entrySet()) { for (Map.Entry<String, OriginalIndices> remoteIndices : remoteClusterIndices.entrySet()) {
String clusterAlias = remoteIndices.getKey(); String clusterAlias = remoteIndices.getKey();
OriginalIndices originalIndices = remoteIndices.getValue(); OriginalIndices originalIndices = remoteIndices.getValue();
// if we are connected this is basically a no-op, if we are not we try to connect in parallel in a non-blocking fashion Client remoteClusterClient = remoteClusterService.getRemoteClusterClient(threadPool, clusterAlias);
remoteClusterService.ensureConnected(clusterAlias, ActionListener.wrap(v -> {
Transport.Connection connection = remoteClusterService.getConnection(clusterAlias);
FieldCapabilitiesRequest remoteRequest = new FieldCapabilitiesRequest(); FieldCapabilitiesRequest remoteRequest = new FieldCapabilitiesRequest();
remoteRequest.setMergeResults(false); // we need to merge on this node remoteRequest.setMergeResults(false); // we need to merge on this node
remoteRequest.indicesOptions(originalIndices.indicesOptions()); remoteRequest.indicesOptions(originalIndices.indicesOptions());
remoteRequest.indices(originalIndices.indices()); remoteRequest.indices(originalIndices.indices());
remoteRequest.fields(request.fields()); remoteRequest.fields(request.fields());
transportService.sendRequest(connection, FieldCapabilitiesAction.NAME, remoteRequest, TransportRequestOptions.EMPTY, remoteClusterClient.fieldCaps(remoteRequest, ActionListener.wrap(response -> {
new TransportResponseHandler<FieldCapabilitiesResponse>() {
@Override
public FieldCapabilitiesResponse newInstance() {
return new FieldCapabilitiesResponse();
}
@Override
public void handleResponse(FieldCapabilitiesResponse response) {
try {
for (FieldCapabilitiesIndexResponse res : response.getIndexResponses()) { for (FieldCapabilitiesIndexResponse res : response.getIndexResponses()) {
indexResponses.add(new FieldCapabilitiesIndexResponse(RemoteClusterAware. indexResponses.add(new FieldCapabilitiesIndexResponse(RemoteClusterAware.
buildRemoteIndexName(clusterAlias, res.getIndexName()), res.get())); buildRemoteIndexName(clusterAlias, res.getIndexName()), res.get()));
} }
} finally {
onResponse.run(); onResponse.run();
}, failure -> onResponse.run()));
} }
} }
@Override
public void handleException(TransportException exp) {
onResponse.run();
}
@Override
public String executor() {
return ThreadPool.Names.SAME;
}
});
}, e -> onResponse.run()));
}
}
} }
private FieldCapabilitiesResponse merge(List<FieldCapabilitiesIndexResponse> indexResponses) { private FieldCapabilitiesResponse merge(List<FieldCapabilitiesIndexResponse> indexResponses) {

View File

@ -25,13 +25,15 @@ import org.elasticsearch.action.support.master.AcknowledgedRequest;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
import java.io.IOException; import java.io.IOException;
import java.util.Objects; import java.util.Objects;
public class PutPipelineRequest extends AcknowledgedRequest<PutPipelineRequest> { public class PutPipelineRequest extends AcknowledgedRequest<PutPipelineRequest> implements ToXContentObject {
private String id; private String id;
private BytesReference source; private BytesReference source;
@ -96,4 +98,14 @@ public class PutPipelineRequest extends AcknowledgedRequest<PutPipelineRequest>
out.writeEnum(xContentType); out.writeEnum(xContentType);
} }
} }
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
if (source != null) {
builder.rawValue(source.streamInput(), xContentType);
} else {
builder.startObject().endObject();
}
return builder;
}
} }

View File

@ -0,0 +1,62 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.ingest;
import org.elasticsearch.action.support.master.AcknowledgedResponse;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
public class PutPipelineResponse extends AcknowledgedResponse implements ToXContentObject {
private static final ConstructingObjectParser<PutPipelineResponse, Void> PARSER = new ConstructingObjectParser<>(
"cluster_put_pipeline", true, args -> new PutPipelineResponse((boolean) args[0]));
static {
declareAcknowledgedField(PARSER);
}
public PutPipelineResponse() {
}
public PutPipelineResponse(boolean acknowledged) {
super(acknowledged);
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
readAcknowledged(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
writeAcknowledged(out);
}
public static PutPipelineResponse fromXContent(XContentParser parser) {
return PARSER.apply(parser, null);
}
}

View File

@ -68,6 +68,7 @@ import java.util.function.ToLongBiFunction;
* @param <V> The type of the values * @param <V> The type of the values
*/ */
public class Cache<K, V> { public class Cache<K, V> {
// positive if entries have an expiration // positive if entries have an expiration
private long expireAfterAccessNanos = -1; private long expireAfterAccessNanos = -1;
@ -282,6 +283,39 @@ public class Cache<K, V> {
} }
} }
/**
* remove an entry from the segment iff the future is done and the value is equal to the
* expected value
*
* @param key the key of the entry to remove from the cache
* @param value the value expected to be associated with the key
* @param onRemoval a callback for the removed entry
*/
void remove(K key, V value, Consumer<CompletableFuture<Entry<K, V>>> onRemoval) {
CompletableFuture<Entry<K, V>> future;
boolean removed = false;
try (ReleasableLock ignored = writeLock.acquire()) {
future = map.get(key);
try {
if (future != null) {
if (future.isDone()) {
Entry<K, V> entry = future.get();
if (Objects.equals(value, entry.value)) {
removed = map.remove(key, future);
}
}
}
} catch (ExecutionException | InterruptedException e) {
throw new IllegalStateException(e);
}
}
if (future != null && removed) {
segmentStats.eviction();
onRemoval.accept(future);
}
}
private static class SegmentStats { private static class SegmentStats {
private final LongAdder hits = new LongAdder(); private final LongAdder hits = new LongAdder();
private final LongAdder misses = new LongAdder(); private final LongAdder misses = new LongAdder();
@ -314,7 +348,7 @@ public class Cache<K, V> {
Entry<K, V> tail; Entry<K, V> tail;
// lock protecting mutations to the LRU list // lock protecting mutations to the LRU list
private ReleasableLock lruLock = new ReleasableLock(new ReentrantLock()); private final ReleasableLock lruLock = new ReleasableLock(new ReentrantLock());
/** /**
* Returns the value to which the specified key is mapped, or null if this map contains no mapping for the key. * Returns the value to which the specified key is mapped, or null if this map contains no mapping for the key.
@ -455,15 +489,7 @@ public class Cache<K, V> {
} }
} }
/** private final Consumer<CompletableFuture<Entry<K, V>>> invalidationConsumer = f -> {
* Invalidate the association for the specified key. A removal notification will be issued for invalidated
* entries with {@link org.elasticsearch.common.cache.RemovalNotification.RemovalReason} INVALIDATED.
*
* @param key the key whose mapping is to be invalidated from the cache
*/
public void invalidate(K key) {
CacheSegment<K, V> segment = getCacheSegment(key);
segment.remove(key, f -> {
try { try {
Entry<K, V> entry = f.get(); Entry<K, V> entry = f.get();
try (ReleasableLock ignored = lruLock.acquire()) { try (ReleasableLock ignored = lruLock.acquire()) {
@ -474,7 +500,30 @@ public class Cache<K, V> {
} catch (InterruptedException e) { } catch (InterruptedException e) {
throw new IllegalStateException(e); throw new IllegalStateException(e);
} }
}); };
/**
* Invalidate the association for the specified key. A removal notification will be issued for invalidated
* entries with {@link org.elasticsearch.common.cache.RemovalNotification.RemovalReason} INVALIDATED.
*
* @param key the key whose mapping is to be invalidated from the cache
*/
public void invalidate(K key) {
CacheSegment<K, V> segment = getCacheSegment(key);
segment.remove(key, invalidationConsumer);
}
/**
* Invalidate the entry for the specified key and value. If the value provided is not equal to the value in
* the cache, no removal will occur. A removal notification will be issued for invalidated
* entries with {@link org.elasticsearch.common.cache.RemovalNotification.RemovalReason} INVALIDATED.
*
* @param key the key whose mapping is to be invalidated from the cache
* @param value the expected value that should be associated with the key
*/
public void invalidate(K key, V value) {
CacheSegment<K, V> segment = getCacheSegment(key);
segment.remove(key, value, invalidationConsumer);
} }
/** /**
@ -625,7 +674,7 @@ public class Cache<K, V> {
Entry<K, V> entry = current; Entry<K, V> entry = current;
if (entry != null) { if (entry != null) {
CacheSegment<K, V> segment = getCacheSegment(entry.key); CacheSegment<K, V> segment = getCacheSegment(entry.key);
segment.remove(entry.key, f -> {}); segment.remove(entry.key, entry.value, f -> {});
try (ReleasableLock ignored = lruLock.acquire()) { try (ReleasableLock ignored = lruLock.acquire()) {
current = null; current = null;
delete(entry, RemovalNotification.RemovalReason.INVALIDATED); delete(entry, RemovalNotification.RemovalReason.INVALIDATED);
@ -710,7 +759,7 @@ public class Cache<K, V> {
CacheSegment<K, V> segment = getCacheSegment(entry.key); CacheSegment<K, V> segment = getCacheSegment(entry.key);
if (segment != null) { if (segment != null) {
segment.remove(entry.key, f -> {}); segment.remove(entry.key, entry.value, f -> {});
} }
delete(entry, RemovalNotification.RemovalReason.EVICTED); delete(entry, RemovalNotification.RemovalReason.EVICTED);
} }

View File

@ -22,6 +22,7 @@ package org.elasticsearch.common.geo;
import org.apache.lucene.document.LatLonDocValuesField; import org.apache.lucene.document.LatLonDocValuesField;
import org.apache.lucene.document.LatLonPoint; import org.apache.lucene.document.LatLonPoint;
import org.apache.lucene.geo.GeoEncodingUtils; import org.apache.lucene.geo.GeoEncodingUtils;
import org.apache.lucene.geo.Rectangle;
import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.IndexableField;
import org.apache.lucene.util.BitUtil; import org.apache.lucene.util.BitUtil;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
@ -85,6 +86,13 @@ public final class GeoPoint implements ToXContentFragment {
public GeoPoint resetFromString(String value, final boolean ignoreZValue) { public GeoPoint resetFromString(String value, final boolean ignoreZValue) {
if (value.contains(",")) { if (value.contains(",")) {
return resetFromCoordinates(value, ignoreZValue);
}
return resetFromGeoHash(value);
}
public GeoPoint resetFromCoordinates(String value, final boolean ignoreZValue) {
String[] vals = value.split(","); String[] vals = value.split(",");
if (vals.length > 3) { if (vals.length > 3) {
throw new ElasticsearchParseException("failed to parse [{}], expected 2 or 3 coordinates " throw new ElasticsearchParseException("failed to parse [{}], expected 2 or 3 coordinates "
@ -97,8 +105,7 @@ public final class GeoPoint implements ToXContentFragment {
} }
return reset(lat, lon); return reset(lat, lon);
} }
return resetFromGeoHash(value);
}
public GeoPoint resetFromIndexHash(long hash) { public GeoPoint resetFromIndexHash(long hash) {
lon = GeoHashUtils.decodeLongitude(hash); lon = GeoHashUtils.decodeLongitude(hash);

View File

@ -387,6 +387,25 @@ public class GeoUtils {
} }
} }
/**
* Represents the point of the geohash cell that should be used as the value of geohash
*/
public enum EffectivePoint {
TOP_LEFT,
TOP_RIGHT,
BOTTOM_LEFT,
BOTTOM_RIGHT
}
/**
* Parse a geopoint represented as an object, string or an array. If the geopoint is represented as a geohash,
* the left bottom corner of the geohash cell is used as the geopoint coordinates.GeoBoundingBoxQueryBuilder.java
*/
public static GeoPoint parseGeoPoint(XContentParser parser, GeoPoint point, final boolean ignoreZValue)
throws IOException, ElasticsearchParseException {
return parseGeoPoint(parser, point, ignoreZValue, EffectivePoint.BOTTOM_LEFT);
}
/** /**
* Parse a {@link GeoPoint} with a {@link XContentParser}. A geopoint has one of the following forms: * Parse a {@link GeoPoint} with a {@link XContentParser}. A geopoint has one of the following forms:
* *
@ -401,7 +420,7 @@ public class GeoUtils {
* @param point A {@link GeoPoint} that will be reset by the values parsed * @param point A {@link GeoPoint} that will be reset by the values parsed
* @return new {@link GeoPoint} parsed from the parse * @return new {@link GeoPoint} parsed from the parse
*/ */
public static GeoPoint parseGeoPoint(XContentParser parser, GeoPoint point, final boolean ignoreZValue) public static GeoPoint parseGeoPoint(XContentParser parser, GeoPoint point, final boolean ignoreZValue, EffectivePoint effectivePoint)
throws IOException, ElasticsearchParseException { throws IOException, ElasticsearchParseException {
double lat = Double.NaN; double lat = Double.NaN;
double lon = Double.NaN; double lon = Double.NaN;
@ -458,7 +477,7 @@ public class GeoUtils {
if(!Double.isNaN(lat) || !Double.isNaN(lon)) { if(!Double.isNaN(lat) || !Double.isNaN(lon)) {
throw new ElasticsearchParseException("field must be either lat/lon or geohash"); throw new ElasticsearchParseException("field must be either lat/lon or geohash");
} else { } else {
return point.resetFromGeoHash(geohash); return parseGeoHash(point, geohash, effectivePoint);
} }
} else if (numberFormatException != null) { } else if (numberFormatException != null) {
throw new ElasticsearchParseException("[{}] and [{}] must be valid double values", numberFormatException, LATITUDE, throw new ElasticsearchParseException("[{}] and [{}] must be valid double values", numberFormatException, LATITUDE,
@ -489,12 +508,36 @@ public class GeoUtils {
} }
return point.reset(lat, lon); return point.reset(lat, lon);
} else if(parser.currentToken() == Token.VALUE_STRING) { } else if(parser.currentToken() == Token.VALUE_STRING) {
return point.resetFromString(parser.text(), ignoreZValue); String val = parser.text();
if (val.contains(",")) {
return point.resetFromString(val, ignoreZValue);
} else {
return parseGeoHash(point, val, effectivePoint);
}
} else { } else {
throw new ElasticsearchParseException("geo_point expected"); throw new ElasticsearchParseException("geo_point expected");
} }
} }
private static GeoPoint parseGeoHash(GeoPoint point, String geohash, EffectivePoint effectivePoint) {
if (effectivePoint == EffectivePoint.BOTTOM_LEFT) {
return point.resetFromGeoHash(geohash);
} else {
Rectangle rectangle = GeoHashUtils.bbox(geohash);
switch (effectivePoint) {
case TOP_LEFT:
return point.reset(rectangle.maxLat, rectangle.minLon);
case TOP_RIGHT:
return point.reset(rectangle.maxLat, rectangle.maxLon);
case BOTTOM_RIGHT:
return point.reset(rectangle.minLat, rectangle.maxLon);
default:
throw new IllegalArgumentException("Unsupported effective point " + effectivePoint);
}
}
}
/** /**
* Parse a precision that can be expressed as an integer or a distance measure like "1km", "10m". * Parse a precision that can be expressed as an integer or a distance measure like "1km", "10m".
* *

View File

@ -0,0 +1,115 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.util.concurrent;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.common.collect.Tuple;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;
/**
* A future implementation that allows for the result to be passed to listeners waiting for
* notification. This is useful for cases where a computation is requested many times
* concurrently, but really only needs to be performed a single time. Once the computation
* has been performed the registered listeners will be notified by submitting a runnable
* for execution in the provided {@link ExecutorService}. If the computation has already
* been performed, a request to add a listener will simply result in execution of the listener
* on the calling thread.
*/
public final class ListenableFuture<V> extends BaseFuture<V> implements ActionListener<V> {
private volatile boolean done = false;
private final List<Tuple<ActionListener<V>, ExecutorService>> listeners = new ArrayList<>();
/**
* Adds a listener to this future. If the future has not yet completed, the listener will be
* notified of a response or exception in a runnable submitted to the ExecutorService provided.
* If the future has completed, the listener will be notified immediately without forking to
* a different thread.
*/
public void addListener(ActionListener<V> listener, ExecutorService executor) {
if (done) {
// run the callback directly, we don't hold the lock and don't need to fork!
notifyListener(listener, EsExecutors.newDirectExecutorService());
} else {
final boolean run;
// check done under lock since it could have been modified and protect modifications
// to the list under lock
synchronized (this) {
if (done) {
run = true;
} else {
listeners.add(new Tuple<>(listener, executor));
run = false;
}
}
if (run) {
// run the callback directly, we don't hold the lock and don't need to fork!
notifyListener(listener, EsExecutors.newDirectExecutorService());
}
}
}
@Override
protected synchronized void done() {
done = true;
listeners.forEach(t -> notifyListener(t.v1(), t.v2()));
// release references to any listeners as we no longer need them and will live
// much longer than the listeners in most cases
listeners.clear();
}
private void notifyListener(ActionListener<V> listener, ExecutorService executorService) {
try {
executorService.submit(() -> {
try {
// call get in a non-blocking fashion as we could be on a network thread
// or another thread like the scheduler, which we should never block!
V value = FutureUtils.get(this, 0L, TimeUnit.NANOSECONDS);
listener.onResponse(value);
} catch (Exception e) {
listener.onFailure(e);
}
});
} catch (Exception e) {
listener.onFailure(e);
}
}
@Override
public void onResponse(V v) {
final boolean set = set(v);
if (set == false) {
throw new IllegalStateException("did not set value, value or exception already set?");
}
}
@Override
public void onFailure(Exception e) {
final boolean set = setException(e);
if (set == false) {
throw new IllegalStateException("did not set exception, value already set or exception already set?");
}
}
}

View File

@ -491,19 +491,19 @@ public class GeoBoundingBoxQueryBuilder extends AbstractQueryBuilder<GeoBounding
right = parser.doubleValue(); right = parser.doubleValue();
} else { } else {
if (TOP_LEFT_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { if (TOP_LEFT_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
GeoUtils.parseGeoPoint(parser, sparse); GeoUtils.parseGeoPoint(parser, sparse, false, GeoUtils.EffectivePoint.TOP_LEFT);
top = sparse.getLat(); top = sparse.getLat();
left = sparse.getLon(); left = sparse.getLon();
} else if (BOTTOM_RIGHT_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { } else if (BOTTOM_RIGHT_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
GeoUtils.parseGeoPoint(parser, sparse); GeoUtils.parseGeoPoint(parser, sparse, false, GeoUtils.EffectivePoint.BOTTOM_RIGHT);
bottom = sparse.getLat(); bottom = sparse.getLat();
right = sparse.getLon(); right = sparse.getLon();
} else if (TOP_RIGHT_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { } else if (TOP_RIGHT_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
GeoUtils.parseGeoPoint(parser, sparse); GeoUtils.parseGeoPoint(parser, sparse, false, GeoUtils.EffectivePoint.TOP_RIGHT);
top = sparse.getLat(); top = sparse.getLat();
right = sparse.getLon(); right = sparse.getLon();
} else if (BOTTOM_LEFT_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { } else if (BOTTOM_LEFT_FIELD.match(currentFieldName, parser.getDeprecationHandler())) {
GeoUtils.parseGeoPoint(parser, sparse); GeoUtils.parseGeoPoint(parser, sparse, false, GeoUtils.EffectivePoint.BOTTOM_LEFT);
bottom = sparse.getLat(); bottom = sparse.getLat();
left = sparse.getLon(); left = sparse.getLon();
} else { } else {
@ -515,7 +515,8 @@ public class GeoBoundingBoxQueryBuilder extends AbstractQueryBuilder<GeoBounding
} }
} }
if (envelope != null) { if (envelope != null) {
if ((Double.isNaN(top) || Double.isNaN(bottom) || Double.isNaN(left) || Double.isNaN(right)) == false) { if (Double.isNaN(top) == false || Double.isNaN(bottom) == false || Double.isNaN(left) == false ||
Double.isNaN(right) == false) {
throw new ElasticsearchParseException("failed to parse bounding box. Conflicting definition found " throw new ElasticsearchParseException("failed to parse bounding box. Conflicting definition found "
+ "using well-known text and explicit corners."); + "using well-known text and explicit corners.");
} }

View File

@ -32,10 +32,10 @@ import java.util.Map;
*/ */
public final class Pipeline { public final class Pipeline {
static final String DESCRIPTION_KEY = "description"; public static final String DESCRIPTION_KEY = "description";
static final String PROCESSORS_KEY = "processors"; public static final String PROCESSORS_KEY = "processors";
static final String VERSION_KEY = "version"; public static final String VERSION_KEY = "version";
static final String ON_FAILURE_KEY = "on_failure"; public static final String ON_FAILURE_KEY = "on_failure";
private final String id; private final String id;
@Nullable @Nullable

View File

@ -20,6 +20,7 @@
package org.elasticsearch.search.aggregations.bucket.range; package org.elasticsearch.search.aggregations.bucket.range;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
@ -57,35 +58,41 @@ public final class InternalBinaryRange
long docCount, InternalAggregations aggregations) { long docCount, InternalAggregations aggregations) {
this.format = format; this.format = format;
this.keyed = keyed; this.keyed = keyed;
this.key = key; this.key = key != null ? key : generateKey(from, to, format);
this.from = from; this.from = from;
this.to = to; this.to = to;
this.docCount = docCount; this.docCount = docCount;
this.aggregations = aggregations; this.aggregations = aggregations;
} }
// for serialization private static String generateKey(BytesRef from, BytesRef to, DocValueFormat format) {
private Bucket(StreamInput in, DocValueFormat format, boolean keyed) throws IOException { StringBuilder builder = new StringBuilder()
this.format = format; .append(from == null ? "*" : format.format(from))
this.keyed = keyed; .append("-")
key = in.readOptionalString(); .append(to == null ? "*" : format.format(to));
if (in.readBoolean()) { return builder.toString();
from = in.readBytesRef();
} else {
from = null;
} }
if (in.readBoolean()) {
to = in.readBytesRef(); private static Bucket createFromStream(StreamInput in, DocValueFormat format, boolean keyed) throws IOException {
} else { String key = in.getVersion().onOrAfter(Version.V_6_4_0)
to = null; ? in.readString()
} : in.readOptionalString();
docCount = in.readLong();
aggregations = InternalAggregations.readAggregations(in); BytesRef from = in.readBoolean() ? in.readBytesRef() : null;
BytesRef to = in.readBoolean() ? in.readBytesRef() : null;
long docCount = in.readLong();
InternalAggregations aggregations = InternalAggregations.readAggregations(in);
return new Bucket(format, keyed, key, from, to, docCount, aggregations);
} }
@Override @Override
public void writeTo(StreamOutput out) throws IOException { public void writeTo(StreamOutput out) throws IOException {
if (out.getVersion().onOrAfter(Version.V_6_4_0)) {
out.writeString(key);
} else {
out.writeOptionalString(key); out.writeOptionalString(key);
}
out.writeBoolean(from != null); out.writeBoolean(from != null);
if (from != null) { if (from != null) {
out.writeBytesRef(from); out.writeBytesRef(from);
@ -122,20 +129,11 @@ public final class InternalBinaryRange
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
String key = this.key; String key = this.key;
if (keyed) { if (keyed) {
if (key == null) {
StringBuilder keyBuilder = new StringBuilder();
keyBuilder.append(from == null ? "*" : format.format(from));
keyBuilder.append("-");
keyBuilder.append(to == null ? "*" : format.format(to));
key = keyBuilder.toString();
}
builder.startObject(key); builder.startObject(key);
} else { } else {
builder.startObject(); builder.startObject();
if (key != null) {
builder.field(CommonFields.KEY.getPreferredName(), key); builder.field(CommonFields.KEY.getPreferredName(), key);
} }
}
if (from != null) { if (from != null) {
builder.field(CommonFields.FROM.getPreferredName(), getFrom()); builder.field(CommonFields.FROM.getPreferredName(), getFrom());
} }
@ -208,10 +206,9 @@ public final class InternalBinaryRange
super(in); super(in);
format = in.readNamedWriteable(DocValueFormat.class); format = in.readNamedWriteable(DocValueFormat.class);
keyed = in.readBoolean(); keyed = in.readBoolean();
buckets = in.readList(stream -> new Bucket(stream, format, keyed)); buckets = in.readList(stream -> Bucket.createFromStream(stream, format, keyed));
} }
@Override @Override
protected void doWriteTo(StreamOutput out) throws IOException { protected void doWriteTo(StreamOutput out) throws IOException {
out.writeNamedWriteable(format); out.writeNamedWriteable(format);

View File

@ -98,18 +98,16 @@ public class ParsedBinaryRange extends ParsedMultiBucketAggregation<ParsedBinary
@Override @Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
if (isKeyed()) { if (isKeyed()) {
builder.startObject(key != null ? key : rangeKey(from, to)); builder.startObject(key);
} else { } else {
builder.startObject(); builder.startObject();
if (key != null) {
builder.field(CommonFields.KEY.getPreferredName(), key); builder.field(CommonFields.KEY.getPreferredName(), key);
} }
}
if (from != null) { if (from != null) {
builder.field(CommonFields.FROM.getPreferredName(), getFrom()); builder.field(CommonFields.FROM.getPreferredName(), from);
} }
if (to != null) { if (to != null) {
builder.field(CommonFields.TO.getPreferredName(), getTo()); builder.field(CommonFields.TO.getPreferredName(), to);
} }
builder.field(CommonFields.DOC_COUNT.getPreferredName(), getDocCount()); builder.field(CommonFields.DOC_COUNT.getPreferredName(), getDocCount());
getAggregations().toXContentInternal(builder, params); getAggregations().toXContentInternal(builder, params);
@ -123,10 +121,9 @@ public class ParsedBinaryRange extends ParsedMultiBucketAggregation<ParsedBinary
XContentParser.Token token = parser.currentToken(); XContentParser.Token token = parser.currentToken();
String currentFieldName = parser.currentName(); String currentFieldName = parser.currentName();
String rangeKey = null;
if (keyed) { if (keyed) {
ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser::getTokenLocation); ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser::getTokenLocation);
rangeKey = currentFieldName; bucket.key = currentFieldName;
ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation); ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation);
} }
@ -150,19 +147,7 @@ public class ParsedBinaryRange extends ParsedMultiBucketAggregation<ParsedBinary
} }
} }
bucket.setAggregations(new Aggregations(aggregations)); bucket.setAggregations(new Aggregations(aggregations));
if (keyed) {
if (rangeKey(bucket.from, bucket.to).equals(rangeKey)) {
bucket.key = null;
} else {
bucket.key = rangeKey;
}
}
return bucket; return bucket;
} }
private static String rangeKey(String from, String to) {
return (from == null ? "*" : from) + '-' + (to == null ? "*" : to);
}
} }
} }

View File

@ -20,9 +20,13 @@
package org.elasticsearch.action.ingest; package org.elasticsearch.action.ingest;
import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.ingest.Pipeline;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import java.io.IOException; import java.io.IOException;
@ -43,4 +47,25 @@ public class PutPipelineRequestTests extends ESTestCase {
assertEquals(XContentType.JSON, serialized.getXContentType()); assertEquals(XContentType.JSON, serialized.getXContentType());
assertEquals("{}", serialized.getSource().utf8ToString()); assertEquals("{}", serialized.getSource().utf8ToString());
} }
public void testToXContent() throws IOException {
XContentType xContentType = randomFrom(XContentType.values());
XContentBuilder pipelineBuilder = XContentBuilder.builder(xContentType.xContent());
pipelineBuilder.startObject().field(Pipeline.DESCRIPTION_KEY, "some random set of processors");
pipelineBuilder.startArray(Pipeline.PROCESSORS_KEY);
//Start first processor
pipelineBuilder.startObject();
pipelineBuilder.startObject("set");
pipelineBuilder.field("field", "foo");
pipelineBuilder.field("value", "bar");
pipelineBuilder.endObject();
pipelineBuilder.endObject();
//End first processor
pipelineBuilder.endArray();
pipelineBuilder.endObject();
PutPipelineRequest request = new PutPipelineRequest("1", BytesReference.bytes(pipelineBuilder), xContentType);
XContentBuilder requestBuilder = XContentBuilder.builder(xContentType.xContent());
BytesReference actualRequestBody = BytesReference.bytes(request.toXContent(requestBuilder, ToXContent.EMPTY_PARAMS));
assertEquals(BytesReference.bytes(pipelineBuilder), actualRequestBody);
}
} }

View File

@ -0,0 +1,53 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.ingest;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.AbstractStreamableXContentTestCase;
public class PutPipelineResponseTests extends AbstractStreamableXContentTestCase<PutPipelineResponse> {
public void testToXContent() {
PutPipelineResponse response = new PutPipelineResponse(true);
String output = Strings.toString(response);
assertEquals("{\"acknowledged\":true}", output);
}
@Override
protected PutPipelineResponse doParseInstance(XContentParser parser) {
return PutPipelineResponse.fromXContent(parser);
}
@Override
protected PutPipelineResponse createTestInstance() {
return new PutPipelineResponse(randomBoolean());
}
@Override
protected PutPipelineResponse createBlankInstance() {
return new PutPipelineResponse();
}
@Override
protected PutPipelineResponse mutateInstance(PutPipelineResponse response) {
return new PutPipelineResponse(response.isAcknowledged() == false);
}
}

View File

@ -66,6 +66,7 @@ public class IndexingMasterFailoverIT extends ESIntegTestCase {
* This retry logic is implemented in TransportMasterNodeAction and tested by the following master failover scenario. * This retry logic is implemented in TransportMasterNodeAction and tested by the following master failover scenario.
*/ */
@TestLogging("_root:DEBUG") @TestLogging("_root:DEBUG")
@AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/30844")
public void testMasterFailoverDuringIndexingWithMappingChanges() throws Throwable { public void testMasterFailoverDuringIndexingWithMappingChanges() throws Throwable {
logger.info("--> start 4 nodes, 3 master, 1 data"); logger.info("--> start 4 nodes, 3 master, 1 data");

View File

@ -457,6 +457,62 @@ public class CacheTests extends ESTestCase {
assertEquals(notifications, invalidated); assertEquals(notifications, invalidated);
} }
// randomly invalidate some cached entries, then check that a lookup for each of those and only those keys is null
public void testInvalidateWithValue() {
Cache<Integer, String> cache = CacheBuilder.<Integer, String>builder().build();
for (int i = 0; i < numberOfEntries; i++) {
cache.put(i, Integer.toString(i));
}
Set<Integer> keys = new HashSet<>();
for (Integer key : cache.keys()) {
if (rarely()) {
if (randomBoolean()) {
cache.invalidate(key, key.toString());
keys.add(key);
} else {
// invalidate with incorrect value
cache.invalidate(key, Integer.toString(key * randomIntBetween(2, 10)));
}
}
}
for (int i = 0; i < numberOfEntries; i++) {
if (keys.contains(i)) {
assertNull(cache.get(i));
} else {
assertNotNull(cache.get(i));
}
}
}
// randomly invalidate some cached entries, then check that we receive invalidate notifications for those and only
// those entries
public void testNotificationOnInvalidateWithValue() {
Set<Integer> notifications = new HashSet<>();
Cache<Integer, String> cache =
CacheBuilder.<Integer, String>builder()
.removalListener(notification -> {
assertEquals(RemovalNotification.RemovalReason.INVALIDATED, notification.getRemovalReason());
notifications.add(notification.getKey());
})
.build();
for (int i = 0; i < numberOfEntries; i++) {
cache.put(i, Integer.toString(i));
}
Set<Integer> invalidated = new HashSet<>();
for (int i = 0; i < numberOfEntries; i++) {
if (rarely()) {
if (randomBoolean()) {
cache.invalidate(i, Integer.toString(i));
invalidated.add(i);
} else {
// invalidate with incorrect value
cache.invalidate(i, Integer.toString(i * randomIntBetween(2, 10)));
}
}
}
assertEquals(notifications, invalidated);
}
// invalidate all cached entries, then check that the cache is empty // invalidate all cached entries, then check that the cache is empty
public void testInvalidateAll() { public void testInvalidateAll() {
Cache<Integer, String> cache = CacheBuilder.<Integer, String>builder().build(); Cache<Integer, String> cache = CacheBuilder.<Integer, String>builder().build();

View File

@ -0,0 +1,118 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.common.util.concurrent;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.test.ESTestCase;
import org.junit.After;
import java.util.concurrent.BrokenBarrierException;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.CyclicBarrier;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.atomic.AtomicInteger;
public class ListenableFutureTests extends ESTestCase {
private ExecutorService executorService;
@After
public void stopExecutorService() throws InterruptedException {
if (executorService != null) {
terminate(executorService);
}
}
public void testListenableFutureNotifiesListeners() {
ListenableFuture<String> future = new ListenableFuture<>();
AtomicInteger notifications = new AtomicInteger(0);
final int numberOfListeners = scaledRandomIntBetween(1, 12);
for (int i = 0; i < numberOfListeners; i++) {
future.addListener(ActionListener.wrap(notifications::incrementAndGet), EsExecutors.newDirectExecutorService());
}
future.onResponse("");
assertEquals(numberOfListeners, notifications.get());
assertTrue(future.isDone());
}
public void testListenableFutureNotifiesListenersOnException() {
ListenableFuture<String> future = new ListenableFuture<>();
AtomicInteger notifications = new AtomicInteger(0);
final int numberOfListeners = scaledRandomIntBetween(1, 12);
final Exception exception = new RuntimeException();
for (int i = 0; i < numberOfListeners; i++) {
future.addListener(ActionListener.wrap(s -> fail("this should never be called"), e -> {
assertEquals(exception, e);
notifications.incrementAndGet();
}), EsExecutors.newDirectExecutorService());
}
future.onFailure(exception);
assertEquals(numberOfListeners, notifications.get());
assertTrue(future.isDone());
}
public void testConcurrentListenerRegistrationAndCompletion() throws BrokenBarrierException, InterruptedException {
final int numberOfThreads = scaledRandomIntBetween(2, 32);
final int completingThread = randomIntBetween(0, numberOfThreads - 1);
final ListenableFuture<String> future = new ListenableFuture<>();
executorService = EsExecutors.newFixed("testConcurrentListenerRegistrationAndCompletion", numberOfThreads, 1000,
EsExecutors.daemonThreadFactory("listener"), new ThreadContext(Settings.EMPTY));
final CyclicBarrier barrier = new CyclicBarrier(1 + numberOfThreads);
final CountDownLatch listenersLatch = new CountDownLatch(numberOfThreads - 1);
final AtomicInteger numResponses = new AtomicInteger(0);
final AtomicInteger numExceptions = new AtomicInteger(0);
for (int i = 0; i < numberOfThreads; i++) {
final int threadNum = i;
Thread thread = new Thread(() -> {
try {
barrier.await();
if (threadNum == completingThread) {
future.onResponse("");
} else {
future.addListener(ActionListener.wrap(s -> {
assertEquals("", s);
numResponses.incrementAndGet();
listenersLatch.countDown();
}, e -> {
logger.error("caught unexpected exception", e);
numExceptions.incrementAndGet();
listenersLatch.countDown();
}), executorService);
}
barrier.await();
} catch (InterruptedException | BrokenBarrierException e) {
throw new AssertionError(e);
}
});
thread.start();
}
barrier.await();
barrier.await();
listenersLatch.await();
assertEquals(numberOfThreads - 1, numResponses.get());
assertEquals(0, numExceptions.get());
}
}

View File

@ -24,6 +24,7 @@ import org.apache.lucene.document.LatLonPoint;
import org.apache.lucene.search.IndexOrDocValuesQuery; import org.apache.lucene.search.IndexOrDocValuesQuery;
import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.geo.GeoUtils;
import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType;
@ -450,6 +451,64 @@ public class GeoBoundingBoxQueryBuilderTests extends AbstractQueryTestCase<GeoBo
assertEquals(expectedJson, GeoExecType.MEMORY, parsed.type()); assertEquals(expectedJson, GeoExecType.MEMORY, parsed.type());
} }
public void testFromGeohash() throws IOException {
String json =
"{\n" +
" \"geo_bounding_box\" : {\n" +
" \"pin.location\" : {\n" +
" \"top_left\" : \"dr\",\n" +
" \"bottom_right\" : \"dq\"\n" +
" },\n" +
" \"validation_method\" : \"STRICT\",\n" +
" \"type\" : \"MEMORY\",\n" +
" \"ignore_unmapped\" : false,\n" +
" \"boost\" : 1.0\n" +
" }\n" +
"}";
String expectedJson =
"{\n" +
" \"geo_bounding_box\" : {\n" +
" \"pin.location\" : {\n" +
" \"top_left\" : [ -78.75, 45.0 ],\n" +
" \"bottom_right\" : [ -67.5, 33.75 ]\n" +
" },\n" +
" \"validation_method\" : \"STRICT\",\n" +
" \"type\" : \"MEMORY\",\n" +
" \"ignore_unmapped\" : false,\n" +
" \"boost\" : 1.0\n" +
" }\n" +
"}";
GeoBoundingBoxQueryBuilder parsed = (GeoBoundingBoxQueryBuilder) parseQuery(json);
checkGeneratedJson(expectedJson, parsed);
assertEquals(json, "pin.location", parsed.fieldName());
assertEquals(json, -78.75, parsed.topLeft().getLon(), 0.0001);
assertEquals(json, 45.0, parsed.topLeft().getLat(), 0.0001);
assertEquals(json, -67.5, parsed.bottomRight().getLon(), 0.0001);
assertEquals(json, 33.75, parsed.bottomRight().getLat(), 0.0001);
assertEquals(json, 1.0, parsed.boost(), 0.0001);
assertEquals(json, GeoExecType.MEMORY, parsed.type());
}
public void testMalformedGeohashes() {
String jsonGeohashAndWkt =
"{\n" +
" \"geo_bounding_box\" : {\n" +
" \"pin.location\" : {\n" +
" \"top_left\" : [ -78.75, 45.0 ],\n" +
" \"wkt\" : \"BBOX (-74.1, -71.12, 40.73, 40.01)\"\n" +
" },\n" +
" \"validation_method\" : \"STRICT\",\n" +
" \"type\" : \"MEMORY\",\n" +
" \"ignore_unmapped\" : false,\n" +
" \"boost\" : 1.0\n" +
" }\n" +
"}";
ElasticsearchParseException e1 = expectThrows(ElasticsearchParseException.class, () -> parseQuery(jsonGeohashAndWkt));
assertThat(e1.getMessage(), containsString("Conflicting definition found using well-known text and explicit corners."));
}
@Override @Override
public void testMustRewrite() throws IOException { public void testMustRewrite() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0); assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);

View File

@ -610,6 +610,20 @@ public class GeoUtilsTests extends ESTestCase {
} }
} }
public void testParseGeoPointGeohashPositions() throws IOException {
assertNormalizedPoint(parseGeohash("drt5", GeoUtils.EffectivePoint.TOP_LEFT), new GeoPoint(42.890625, -71.71875));
assertNormalizedPoint(parseGeohash("drt5", GeoUtils.EffectivePoint.TOP_RIGHT), new GeoPoint(42.890625, -71.3671875));
assertNormalizedPoint(parseGeohash("drt5", GeoUtils.EffectivePoint.BOTTOM_LEFT), new GeoPoint(42.71484375, -71.71875));
assertNormalizedPoint(parseGeohash("drt5", GeoUtils.EffectivePoint.BOTTOM_RIGHT), new GeoPoint(42.71484375, -71.3671875));
assertNormalizedPoint(parseGeohash("drtk", GeoUtils.EffectivePoint.BOTTOM_LEFT), new GeoPoint(42.890625, -71.3671875));
}
private GeoPoint parseGeohash(String geohash, GeoUtils.EffectivePoint effectivePoint) throws IOException {
XContentParser parser = createParser(jsonBuilder().startObject().field("geohash", geohash).endObject());
parser.nextToken();
return GeoUtils.parseGeoPoint(parser, new GeoPoint(), randomBoolean(), effectivePoint);
}
private static void assertNormalizedPoint(GeoPoint input, GeoPoint expected) { private static void assertNormalizedPoint(GeoPoint input, GeoPoint expected) {
GeoUtils.normalizePoint(input); GeoUtils.normalizePoint(input);
if (Double.isNaN(expected.lat())) { if (Double.isNaN(expected.lat())) {

View File

@ -18,14 +18,8 @@
*/ */
package org.elasticsearch.search.aggregations.bucket; package org.elasticsearch.search.aggregations.bucket;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Map;
import java.util.function.Function;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchPhaseExecutionException;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.health.ClusterHealthStatus;
import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.script.MockScriptPlugin; import org.elasticsearch.script.MockScriptPlugin;
@ -35,6 +29,12 @@ import org.elasticsearch.search.aggregations.AggregationBuilders;
import org.elasticsearch.search.aggregations.bucket.range.Range; import org.elasticsearch.search.aggregations.bucket.range.Range;
import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.ESIntegTestCase;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Map;
import java.util.function.Function;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.containsString;
@ -91,16 +91,19 @@ public class IpRangeIT extends ESIntegTestCase {
Range.Bucket bucket1 = range.getBuckets().get(0); Range.Bucket bucket1 = range.getBuckets().get(0);
assertNull(bucket1.getFrom()); assertNull(bucket1.getFrom());
assertEquals("192.168.1.0", bucket1.getTo()); assertEquals("192.168.1.0", bucket1.getTo());
assertEquals("*-192.168.1.0", bucket1.getKey());
assertEquals(0, bucket1.getDocCount()); assertEquals(0, bucket1.getDocCount());
Range.Bucket bucket2 = range.getBuckets().get(1); Range.Bucket bucket2 = range.getBuckets().get(1);
assertEquals("192.168.1.0", bucket2.getFrom()); assertEquals("192.168.1.0", bucket2.getFrom());
assertEquals("192.168.1.10", bucket2.getTo()); assertEquals("192.168.1.10", bucket2.getTo());
assertEquals("192.168.1.0-192.168.1.10", bucket2.getKey());
assertEquals(1, bucket2.getDocCount()); assertEquals(1, bucket2.getDocCount());
Range.Bucket bucket3 = range.getBuckets().get(2); Range.Bucket bucket3 = range.getBuckets().get(2);
assertEquals("192.168.1.10", bucket3.getFrom()); assertEquals("192.168.1.10", bucket3.getFrom());
assertNull(bucket3.getTo()); assertNull(bucket3.getTo());
assertEquals("192.168.1.10-*", bucket3.getKey());
assertEquals(2, bucket3.getDocCount()); assertEquals(2, bucket3.getDocCount());
} }
@ -118,16 +121,19 @@ public class IpRangeIT extends ESIntegTestCase {
Range.Bucket bucket1 = range.getBuckets().get(0); Range.Bucket bucket1 = range.getBuckets().get(0);
assertNull(bucket1.getFrom()); assertNull(bucket1.getFrom());
assertEquals("192.168.1.0", bucket1.getTo()); assertEquals("192.168.1.0", bucket1.getTo());
assertEquals("*-192.168.1.0", bucket1.getKey());
assertEquals(1, bucket1.getDocCount()); assertEquals(1, bucket1.getDocCount());
Range.Bucket bucket2 = range.getBuckets().get(1); Range.Bucket bucket2 = range.getBuckets().get(1);
assertEquals("192.168.1.0", bucket2.getFrom()); assertEquals("192.168.1.0", bucket2.getFrom());
assertEquals("192.168.1.10", bucket2.getTo()); assertEquals("192.168.1.10", bucket2.getTo());
assertEquals("192.168.1.0-192.168.1.10", bucket2.getKey());
assertEquals(1, bucket2.getDocCount()); assertEquals(1, bucket2.getDocCount());
Range.Bucket bucket3 = range.getBuckets().get(2); Range.Bucket bucket3 = range.getBuckets().get(2);
assertEquals("192.168.1.10", bucket3.getFrom()); assertEquals("192.168.1.10", bucket3.getFrom());
assertNull(bucket3.getTo()); assertNull(bucket3.getTo());
assertEquals("192.168.1.10-*", bucket3.getKey());
assertEquals(2, bucket3.getDocCount()); assertEquals(2, bucket3.getDocCount());
} }
@ -169,16 +175,19 @@ public class IpRangeIT extends ESIntegTestCase {
Range.Bucket bucket1 = range.getBuckets().get(0); Range.Bucket bucket1 = range.getBuckets().get(0);
assertNull(bucket1.getFrom()); assertNull(bucket1.getFrom());
assertEquals("192.168.1.0", bucket1.getTo()); assertEquals("192.168.1.0", bucket1.getTo());
assertEquals("*-192.168.1.0", bucket1.getKey());
assertEquals(0, bucket1.getDocCount()); assertEquals(0, bucket1.getDocCount());
Range.Bucket bucket2 = range.getBuckets().get(1); Range.Bucket bucket2 = range.getBuckets().get(1);
assertEquals("192.168.1.0", bucket2.getFrom()); assertEquals("192.168.1.0", bucket2.getFrom());
assertEquals("192.168.1.10", bucket2.getTo()); assertEquals("192.168.1.10", bucket2.getTo());
assertEquals("192.168.1.0-192.168.1.10", bucket2.getKey());
assertEquals(1, bucket2.getDocCount()); assertEquals(1, bucket2.getDocCount());
Range.Bucket bucket3 = range.getBuckets().get(2); Range.Bucket bucket3 = range.getBuckets().get(2);
assertEquals("192.168.1.10", bucket3.getFrom()); assertEquals("192.168.1.10", bucket3.getFrom());
assertNull(bucket3.getTo()); assertNull(bucket3.getTo());
assertEquals("192.168.1.10-*", bucket3.getKey());
assertEquals(2, bucket3.getDocCount()); assertEquals(2, bucket3.getDocCount());
} }
@ -196,16 +205,19 @@ public class IpRangeIT extends ESIntegTestCase {
Range.Bucket bucket1 = range.getBuckets().get(0); Range.Bucket bucket1 = range.getBuckets().get(0);
assertNull(bucket1.getFrom()); assertNull(bucket1.getFrom());
assertEquals("192.168.1.0", bucket1.getTo()); assertEquals("192.168.1.0", bucket1.getTo());
assertEquals("*-192.168.1.0", bucket1.getKey());
assertEquals(0, bucket1.getDocCount()); assertEquals(0, bucket1.getDocCount());
Range.Bucket bucket2 = range.getBuckets().get(1); Range.Bucket bucket2 = range.getBuckets().get(1);
assertEquals("192.168.1.0", bucket2.getFrom()); assertEquals("192.168.1.0", bucket2.getFrom());
assertEquals("192.168.1.10", bucket2.getTo()); assertEquals("192.168.1.10", bucket2.getTo());
assertEquals("192.168.1.0-192.168.1.10", bucket2.getKey());
assertEquals(0, bucket2.getDocCount()); assertEquals(0, bucket2.getDocCount());
Range.Bucket bucket3 = range.getBuckets().get(2); Range.Bucket bucket3 = range.getBuckets().get(2);
assertEquals("192.168.1.10", bucket3.getFrom()); assertEquals("192.168.1.10", bucket3.getFrom());
assertNull(bucket3.getTo()); assertNull(bucket3.getTo());
assertEquals("192.168.1.10-*", bucket3.getKey());
assertEquals(0, bucket3.getDocCount()); assertEquals(0, bucket3.getDocCount());
} }

View File

@ -157,4 +157,15 @@ public class InternalBinaryRangeTests extends InternalRangeTestCase<InternalBina
} }
return new InternalBinaryRange(name, format, keyed, buckets, pipelineAggregators, metaData); return new InternalBinaryRange(name, format, keyed, buckets, pipelineAggregators, metaData);
} }
/**
* Checks the invariant that bucket keys are always non-null, even if null keys
* were originally provided.
*/
public void testKeyGeneration() {
InternalBinaryRange range = createTestInstance();
for (InternalBinaryRange.Bucket bucket : range.getBuckets()) {
assertNotNull(bucket.getKey());
}
}
} }

View File

@ -36,6 +36,7 @@ import org.elasticsearch.common.util.concurrent.EsExecutors;
import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.mocksocket.MockServerSocket; import org.elasticsearch.mocksocket.MockServerSocket;
import org.elasticsearch.mocksocket.MockSocket; import org.elasticsearch.mocksocket.MockSocket;
import org.elasticsearch.common.concurrent.CompletableContext;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import java.io.BufferedInputStream; import java.io.BufferedInputStream;
@ -50,7 +51,6 @@ import java.net.SocketException;
import java.util.Collections; import java.util.Collections;
import java.util.HashSet; import java.util.HashSet;
import java.util.Set; import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CountDownLatch; import java.util.concurrent.CountDownLatch;
import java.util.concurrent.Executor; import java.util.concurrent.Executor;
@ -218,7 +218,7 @@ public class MockTcpTransport extends TcpTransport {
private final Socket activeChannel; private final Socket activeChannel;
private final String profile; private final String profile;
private final CancellableThreads cancellableThreads = new CancellableThreads(); private final CancellableThreads cancellableThreads = new CancellableThreads();
private final CompletableFuture<Void> closeFuture = new CompletableFuture<>(); private final CompletableContext<Void> closeFuture = new CompletableContext<>();
/** /**
* Constructs a new MockChannel instance intended for handling the actual incoming / outgoing traffic. * Constructs a new MockChannel instance intended for handling the actual incoming / outgoing traffic.
@ -364,7 +364,7 @@ public class MockTcpTransport extends TcpTransport {
@Override @Override
public void addCloseListener(ActionListener<Void> listener) { public void addCloseListener(ActionListener<Void> listener) {
closeFuture.whenComplete(ActionListener.toBiConsumer(listener)); closeFuture.addListener(ActionListener.toBiConsumer(listener));
} }
@Override @Override

View File

@ -2,7 +2,6 @@
"index_patterns": [ ".watcher-history-${xpack.watcher.template.version}*" ], "index_patterns": [ ".watcher-history-${xpack.watcher.template.version}*" ],
"order": 2147483647, "order": 2147483647,
"settings": { "settings": {
"xpack.watcher.template.version": "${xpack.watcher.template.version}",
"index.number_of_shards": 1, "index.number_of_shards": 1,
"index.number_of_replicas": 0, "index.number_of_replicas": 0,
"index.auto_expand_replicas": "0-1", "index.auto_expand_replicas": "0-1",
@ -10,6 +9,9 @@
}, },
"mappings": { "mappings": {
"doc": { "doc": {
"_meta": {
"watcher-history-version": "${xpack.watcher.template.version}"
},
"dynamic_templates": [ "dynamic_templates": [
{ {
"disabled_payload_fields": { "disabled_payload_fields": {

View File

@ -410,7 +410,7 @@ public class Security extends Plugin implements ActionPlugin, IngestPlugin, Netw
final NativeRoleMappingStore nativeRoleMappingStore = new NativeRoleMappingStore(settings, client, securityIndex.get()); final NativeRoleMappingStore nativeRoleMappingStore = new NativeRoleMappingStore(settings, client, securityIndex.get());
final AnonymousUser anonymousUser = new AnonymousUser(settings); final AnonymousUser anonymousUser = new AnonymousUser(settings);
final ReservedRealm reservedRealm = new ReservedRealm(env, settings, nativeUsersStore, final ReservedRealm reservedRealm = new ReservedRealm(env, settings, nativeUsersStore,
anonymousUser, securityIndex.get(), threadPool.getThreadContext()); anonymousUser, securityIndex.get(), threadPool);
Map<String, Realm.Factory> realmFactories = new HashMap<>(InternalRealms.getFactories(threadPool, resourceWatcherService, Map<String, Realm.Factory> realmFactories = new HashMap<>(InternalRealms.getFactories(threadPool, resourceWatcherService,
getSslService(), nativeUsersStore, nativeRoleMappingStore, securityIndex.get())); getSslService(), nativeUsersStore, nativeRoleMappingStore, securityIndex.get()));
for (SecurityExtension extension : securityExtensions) { for (SecurityExtension extension : securityExtensions) {

View File

@ -93,9 +93,9 @@ public final class InternalRealms {
SecurityIndexManager securityIndex) { SecurityIndexManager securityIndex) {
Map<String, Realm.Factory> map = new HashMap<>(); Map<String, Realm.Factory> map = new HashMap<>();
map.put(FileRealmSettings.TYPE, config -> new FileRealm(config, resourceWatcherService)); map.put(FileRealmSettings.TYPE, config -> new FileRealm(config, resourceWatcherService, threadPool));
map.put(NativeRealmSettings.TYPE, config -> { map.put(NativeRealmSettings.TYPE, config -> {
final NativeRealm nativeRealm = new NativeRealm(config, nativeUsersStore); final NativeRealm nativeRealm = new NativeRealm(config, nativeUsersStore, threadPool);
securityIndex.addIndexStateListener(nativeRealm::onSecurityIndexStateChange); securityIndex.addIndexStateListener(nativeRealm::onSecurityIndexStateChange);
return nativeRealm; return nativeRealm;
}); });

View File

@ -6,6 +6,7 @@
package org.elasticsearch.xpack.security.authc.esnative; package org.elasticsearch.xpack.security.authc.esnative;
import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListener;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.xpack.core.security.authc.AuthenticationResult; import org.elasticsearch.xpack.core.security.authc.AuthenticationResult;
import org.elasticsearch.xpack.core.security.authc.RealmConfig; import org.elasticsearch.xpack.core.security.authc.RealmConfig;
import org.elasticsearch.xpack.core.security.authc.esnative.NativeRealmSettings; import org.elasticsearch.xpack.core.security.authc.esnative.NativeRealmSettings;
@ -24,8 +25,8 @@ public class NativeRealm extends CachingUsernamePasswordRealm {
private final NativeUsersStore userStore; private final NativeUsersStore userStore;
public NativeRealm(RealmConfig config, NativeUsersStore usersStore) { public NativeRealm(RealmConfig config, NativeUsersStore usersStore, ThreadPool threadPool) {
super(NativeRealmSettings.TYPE, config); super(NativeRealmSettings.TYPE, config, threadPool);
this.userStore = usersStore; this.userStore = usersStore;
} }

View File

@ -14,8 +14,8 @@ import org.elasticsearch.common.settings.SecureSetting;
import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.SecureString;
import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.env.Environment; import org.elasticsearch.env.Environment;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.XPackSettings;
import org.elasticsearch.xpack.core.security.SecurityField; import org.elasticsearch.xpack.core.security.SecurityField;
import org.elasticsearch.xpack.core.security.authc.AuthenticationResult; import org.elasticsearch.xpack.core.security.authc.AuthenticationResult;
@ -66,8 +66,8 @@ public class ReservedRealm extends CachingUsernamePasswordRealm {
private final SecurityIndexManager securityIndex; private final SecurityIndexManager securityIndex;
public ReservedRealm(Environment env, Settings settings, NativeUsersStore nativeUsersStore, AnonymousUser anonymousUser, public ReservedRealm(Environment env, Settings settings, NativeUsersStore nativeUsersStore, AnonymousUser anonymousUser,
SecurityIndexManager securityIndex, ThreadContext threadContext) { SecurityIndexManager securityIndex, ThreadPool threadPool) {
super(TYPE, new RealmConfig(TYPE, Settings.EMPTY, settings, env, threadContext)); super(TYPE, new RealmConfig(TYPE, Settings.EMPTY, settings, env, threadPool.getThreadContext()), threadPool);
this.nativeUsersStore = nativeUsersStore; this.nativeUsersStore = nativeUsersStore;
this.realmEnabled = XPackSettings.RESERVED_REALM_ENABLED_SETTING.get(settings); this.realmEnabled = XPackSettings.RESERVED_REALM_ENABLED_SETTING.get(settings);
this.anonymousUser = anonymousUser; this.anonymousUser = anonymousUser;

View File

@ -6,6 +6,7 @@
package org.elasticsearch.xpack.security.authc.file; package org.elasticsearch.xpack.security.authc.file;
import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListener;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.watcher.ResourceWatcherService; import org.elasticsearch.watcher.ResourceWatcherService;
import org.elasticsearch.xpack.core.security.authc.AuthenticationResult; import org.elasticsearch.xpack.core.security.authc.AuthenticationResult;
import org.elasticsearch.xpack.core.security.authc.RealmConfig; import org.elasticsearch.xpack.core.security.authc.RealmConfig;
@ -21,13 +22,13 @@ public class FileRealm extends CachingUsernamePasswordRealm {
private final FileUserPasswdStore userPasswdStore; private final FileUserPasswdStore userPasswdStore;
private final FileUserRolesStore userRolesStore; private final FileUserRolesStore userRolesStore;
public FileRealm(RealmConfig config, ResourceWatcherService watcherService) { public FileRealm(RealmConfig config, ResourceWatcherService watcherService, ThreadPool threadPool) {
this(config, new FileUserPasswdStore(config, watcherService), new FileUserRolesStore(config, watcherService)); this(config, new FileUserPasswdStore(config, watcherService), new FileUserRolesStore(config, watcherService), threadPool);
} }
// pkg private for testing // pkg private for testing
FileRealm(RealmConfig config, FileUserPasswdStore userPasswdStore, FileUserRolesStore userRolesStore) { FileRealm(RealmConfig config, FileUserPasswdStore userPasswdStore, FileUserRolesStore userRolesStore, ThreadPool threadPool) {
super(FileRealmSettings.TYPE, config); super(FileRealmSettings.TYPE, config, threadPool);
this.userPasswdStore = userPasswdStore; this.userPasswdStore = userPasswdStore;
userPasswdStore.addListener(this::expireAll); userPasswdStore.addListener(this::expireAll);
this.userRolesStore = userRolesStore; this.userRolesStore = userRolesStore;

View File

@ -67,7 +67,7 @@ public final class LdapRealm extends CachingUsernamePasswordRealm {
// pkg private for testing // pkg private for testing
LdapRealm(String type, RealmConfig config, SessionFactory sessionFactory, LdapRealm(String type, RealmConfig config, SessionFactory sessionFactory,
UserRoleMapper roleMapper, ThreadPool threadPool) { UserRoleMapper roleMapper, ThreadPool threadPool) {
super(type, config); super(type, config, threadPool);
this.sessionFactory = sessionFactory; this.sessionFactory = sessionFactory;
this.roleMapper = roleMapper; this.roleMapper = roleMapper;
this.threadPool = threadPool; this.threadPool = threadPool;

View File

@ -5,11 +5,15 @@
*/ */
package org.elasticsearch.xpack.security.authc.support; package org.elasticsearch.xpack.security.authc.support;
import org.apache.lucene.util.SetOnce;
import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListener;
import org.elasticsearch.common.cache.Cache; import org.elasticsearch.common.cache.Cache;
import org.elasticsearch.common.cache.CacheBuilder; import org.elasticsearch.common.cache.CacheBuilder;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.SecureString;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.ListenableFuture;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.xpack.core.security.authc.AuthenticationResult; import org.elasticsearch.xpack.core.security.authc.AuthenticationResult;
import org.elasticsearch.xpack.core.security.authc.AuthenticationToken; import org.elasticsearch.xpack.core.security.authc.AuthenticationToken;
import org.elasticsearch.xpack.core.security.authc.RealmConfig; import org.elasticsearch.xpack.core.security.authc.RealmConfig;
@ -21,18 +25,21 @@ import org.elasticsearch.xpack.core.security.user.User;
import java.util.Map; import java.util.Map;
import java.util.Objects; import java.util.Objects;
import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutionException;
import java.util.concurrent.atomic.AtomicBoolean;
public abstract class CachingUsernamePasswordRealm extends UsernamePasswordRealm implements CachingRealm { public abstract class CachingUsernamePasswordRealm extends UsernamePasswordRealm implements CachingRealm {
private final Cache<String, UserWithHash> cache; private final Cache<String, ListenableFuture<Tuple<AuthenticationResult, UserWithHash>>> cache;
private final ThreadPool threadPool;
final Hasher hasher; final Hasher hasher;
protected CachingUsernamePasswordRealm(String type, RealmConfig config) { protected CachingUsernamePasswordRealm(String type, RealmConfig config, ThreadPool threadPool) {
super(type, config); super(type, config);
hasher = Hasher.resolve(CachingUsernamePasswordRealmSettings.CACHE_HASH_ALGO_SETTING.get(config.settings()), Hasher.SSHA256); hasher = Hasher.resolve(CachingUsernamePasswordRealmSettings.CACHE_HASH_ALGO_SETTING.get(config.settings()), Hasher.SSHA256);
this.threadPool = threadPool;
TimeValue ttl = CachingUsernamePasswordRealmSettings.CACHE_TTL_SETTING.get(config.settings()); TimeValue ttl = CachingUsernamePasswordRealmSettings.CACHE_TTL_SETTING.get(config.settings());
if (ttl.getNanos() > 0) { if (ttl.getNanos() > 0) {
cache = CacheBuilder.<String, UserWithHash>builder() cache = CacheBuilder.<String, ListenableFuture<Tuple<AuthenticationResult, UserWithHash>>>builder()
.setExpireAfterWrite(ttl) .setExpireAfterWrite(ttl)
.setMaximumWeight(CachingUsernamePasswordRealmSettings.CACHE_MAX_USERS_SETTING.get(config.settings())) .setMaximumWeight(CachingUsernamePasswordRealmSettings.CACHE_MAX_USERS_SETTING.get(config.settings()))
.build(); .build();
@ -78,74 +85,95 @@ public abstract class CachingUsernamePasswordRealm extends UsernamePasswordRealm
} }
private void authenticateWithCache(UsernamePasswordToken token, ActionListener<AuthenticationResult> listener) { private void authenticateWithCache(UsernamePasswordToken token, ActionListener<AuthenticationResult> listener) {
UserWithHash userWithHash = cache.get(token.principal()); try {
if (userWithHash == null) { final SetOnce<User> authenticatedUser = new SetOnce<>();
if (logger.isDebugEnabled()) { final AtomicBoolean createdAndStartedFuture = new AtomicBoolean(false);
logger.debug("user [{}] not found in cache for realm [{}], proceeding with normal authentication", final ListenableFuture<Tuple<AuthenticationResult, UserWithHash>> future = cache.computeIfAbsent(token.principal(), k -> {
token.principal(), name()); final ListenableFuture<Tuple<AuthenticationResult, UserWithHash>> created = new ListenableFuture<>();
if (createdAndStartedFuture.compareAndSet(false, true) == false) {
throw new IllegalStateException("something else already started this. how?");
} }
doAuthenticateAndCache(token, ActionListener.wrap((result) -> { return created;
});
if (createdAndStartedFuture.get()) {
doAuthenticate(token, ActionListener.wrap(result -> {
if (result.isAuthenticated()) { if (result.isAuthenticated()) {
final User user = result.getUser(); final User user = result.getUser();
logger.debug("realm [{}] authenticated user [{}], with roles [{}]", name(), token.principal(), user.roles()); authenticatedUser.set(user);
final UserWithHash userWithHash = new UserWithHash(user, token.credentials(), hasher);
future.onResponse(new Tuple<>(result, userWithHash));
} else {
future.onResponse(new Tuple<>(result, null));
} }
listener.onResponse(result); }, future::onFailure));
}, listener::onFailure)); }
} else if (userWithHash.hasHash()) {
future.addListener(ActionListener.wrap(tuple -> {
if (tuple != null) {
final UserWithHash userWithHash = tuple.v2();
final boolean performedAuthentication = createdAndStartedFuture.get() && userWithHash != null &&
tuple.v2().user == authenticatedUser.get();
handleResult(future, createdAndStartedFuture.get(), performedAuthentication, token, tuple, listener);
} else {
handleFailure(future, createdAndStartedFuture.get(), token, new IllegalStateException("unknown error authenticating"),
listener);
}
}, e -> handleFailure(future, createdAndStartedFuture.get(), token, e, listener)),
threadPool.executor(ThreadPool.Names.GENERIC));
} catch (ExecutionException e) {
listener.onResponse(AuthenticationResult.unsuccessful("", e));
}
}
private void handleResult(ListenableFuture<Tuple<AuthenticationResult, UserWithHash>> future, boolean createdAndStartedFuture,
boolean performedAuthentication, UsernamePasswordToken token,
Tuple<AuthenticationResult, UserWithHash> result, ActionListener<AuthenticationResult> listener) {
final AuthenticationResult authResult = result.v1();
if (authResult == null) {
// this was from a lookup; clear and redo
cache.invalidate(token.principal(), future);
authenticateWithCache(token, listener);
} else if (authResult.isAuthenticated()) {
if (performedAuthentication) {
listener.onResponse(authResult);
} else {
UserWithHash userWithHash = result.v2();
if (userWithHash.verify(token.credentials())) { if (userWithHash.verify(token.credentials())) {
if (userWithHash.user.enabled()) { if (userWithHash.user.enabled()) {
User user = userWithHash.user; User user = userWithHash.user;
logger.debug("realm [{}] authenticated user [{}], with roles [{}]", name(), token.principal(), user.roles()); logger.debug("realm [{}] authenticated user [{}], with roles [{}]",
name(), token.principal(), user.roles());
listener.onResponse(AuthenticationResult.success(user)); listener.onResponse(AuthenticationResult.success(user));
} else { } else {
// We successfully authenticated, but the cached user is disabled. // re-auth to see if user has been enabled
// Reload the primary record to check whether the user is still disabled cache.invalidate(token.principal(), future);
cache.invalidate(token.principal()); authenticateWithCache(token, listener);
doAuthenticateAndCache(token, ActionListener.wrap((result) -> {
if (result.isAuthenticated()) {
final User user = result.getUser();
logger.debug("realm [{}] authenticated user [{}] (enabled:{}), with roles [{}]", name(), token.principal(),
user.enabled(), user.roles());
}
listener.onResponse(result);
}, listener::onFailure));
} }
} else { } else {
cache.invalidate(token.principal()); // could be a password change?
doAuthenticateAndCache(token, ActionListener.wrap((result) -> { cache.invalidate(token.principal(), future);
if (result.isAuthenticated()) { authenticateWithCache(token, listener);
final User user = result.getUser();
logger.debug("cached user's password changed. realm [{}] authenticated user [{}], with roles [{}]",
name(), token.principal(), user.roles());
} }
listener.onResponse(result);
}, listener::onFailure));
} }
} else { } else {
cache.invalidate(token.principal()); cache.invalidate(token.principal(), future);
doAuthenticateAndCache(token, ActionListener.wrap((result) -> { if (createdAndStartedFuture) {
if (result.isAuthenticated()) { listener.onResponse(authResult);
final User user = result.getUser(); } else {
logger.debug("cached user came from a lookup and could not be used for authentication. " + authenticateWithCache(token, listener);
"realm [{}] authenticated user [{}] with roles [{}]", name(), token.principal(), user.roles());
} }
listener.onResponse(result);
}, listener::onFailure));
} }
} }
private void doAuthenticateAndCache(UsernamePasswordToken token, ActionListener<AuthenticationResult> listener) { private void handleFailure(ListenableFuture<Tuple<AuthenticationResult, UserWithHash>> future, boolean createdAndStarted,
ActionListener<AuthenticationResult> wrapped = ActionListener.wrap((result) -> { UsernamePasswordToken token, Exception e, ActionListener<AuthenticationResult> listener) {
Objects.requireNonNull(result, "AuthenticationResult cannot be null"); cache.invalidate(token.principal(), future);
if (result.getStatus() == AuthenticationResult.Status.SUCCESS) { if (createdAndStarted) {
UserWithHash userWithHash = new UserWithHash(result.getUser(), token.credentials(), hasher); listener.onFailure(e);
// it doesn't matter if we already computed it elsewhere } else {
cache.put(token.principal(), userWithHash); authenticateWithCache(token, listener);
} }
listener.onResponse(result);
}, listener::onFailure);
doAuthenticate(token, wrapped);
} }
@Override @Override
@ -160,29 +188,34 @@ public abstract class CachingUsernamePasswordRealm extends UsernamePasswordRealm
@Override @Override
public final void lookupUser(String username, ActionListener<User> listener) { public final void lookupUser(String username, ActionListener<User> listener) {
if (cache != null) { if (cache != null) {
UserWithHash withHash = cache.get(username);
if (withHash == null) {
try { try {
doLookupUser(username, ActionListener.wrap((user) -> { ListenableFuture<Tuple<AuthenticationResult, UserWithHash>> future = cache.computeIfAbsent(username, key -> {
Runnable action = () -> listener.onResponse(null); ListenableFuture<Tuple<AuthenticationResult, UserWithHash>> created = new ListenableFuture<>();
doLookupUser(username, ActionListener.wrap(user -> {
if (user != null) { if (user != null) {
UserWithHash userWithHash = new UserWithHash(user, null, null); UserWithHash userWithHash = new UserWithHash(user, null, null);
try { created.onResponse(new Tuple<>(null, userWithHash));
// computeIfAbsent is used here to avoid overwriting a value from a concurrent authenticate call as it } else {
// contains the password hash, which provides a performance boost and we shouldn't just erase that created.onResponse(new Tuple<>(null, null));
cache.computeIfAbsent(username, (n) -> userWithHash);
action = () -> listener.onResponse(userWithHash.user);
} catch (ExecutionException e) {
action = () -> listener.onFailure(e);
} }
} }, created::onFailure));
action.run(); return created;
}, listener::onFailure)); });
} catch (Exception e) {
listener.onFailure(e); future.addListener(ActionListener.wrap(tuple -> {
if (tuple != null) {
if (tuple.v2() == null) {
cache.invalidate(username, future);
listener.onResponse(null);
} else {
listener.onResponse(tuple.v2().user);
} }
} else { } else {
listener.onResponse(withHash.user); listener.onResponse(null);
}
}, listener::onFailure), threadPool.executor(ThreadPool.Names.GENERIC));
} catch (ExecutionException e) {
listener.onFailure(e);
} }
} else { } else {
doLookupUser(username, listener); doLookupUser(username, listener);
@ -192,12 +225,12 @@ public abstract class CachingUsernamePasswordRealm extends UsernamePasswordRealm
protected abstract void doLookupUser(String username, ActionListener<User> listener); protected abstract void doLookupUser(String username, ActionListener<User> listener);
private static class UserWithHash { private static class UserWithHash {
User user; final User user;
char[] hash; final char[] hash;
Hasher hasher; final Hasher hasher;
UserWithHash(User user, SecureString password, Hasher hasher) { UserWithHash(User user, SecureString password, Hasher hasher) {
this.user = user; this.user = Objects.requireNonNull(user);
this.hash = password == null ? null : hasher.hash(password); this.hash = password == null ? null : hasher.hash(password);
this.hasher = hasher; this.hasher = hasher;
} }
@ -205,9 +238,5 @@ public abstract class CachingUsernamePasswordRealm extends UsernamePasswordRealm
boolean verify(SecureString password) { boolean verify(SecureString password) {
return hash != null && hasher.verify(password, hash); return hash != null && hasher.verify(password, hash);
} }
boolean hasHash() {
return hash != null;
}
} }
} }

View File

@ -14,11 +14,14 @@ import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest;
import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.cluster.metadata.AliasMetaData;
import org.elasticsearch.cluster.metadata.AliasOrIndex; import org.elasticsearch.cluster.metadata.AliasOrIndex;
import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
@ -35,6 +38,7 @@ import java.util.Collections;
import java.util.HashSet; import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Optional;
import java.util.Set; import java.util.Set;
import java.util.SortedMap; import java.util.SortedMap;
import java.util.concurrent.CopyOnWriteArraySet; import java.util.concurrent.CopyOnWriteArraySet;
@ -42,7 +46,7 @@ import java.util.stream.Collectors;
import static org.elasticsearch.xpack.core.security.authz.IndicesAndAliasesResolverField.NO_INDEX_PLACEHOLDER; import static org.elasticsearch.xpack.core.security.authz.IndicesAndAliasesResolverField.NO_INDEX_PLACEHOLDER;
public class IndicesAndAliasesResolver { class IndicesAndAliasesResolver {
//`*,-*` what we replace indices with if we need Elasticsearch to return empty responses without throwing exception //`*,-*` what we replace indices with if we need Elasticsearch to return empty responses without throwing exception
private static final String[] NO_INDICES_ARRAY = new String[] { "*", "-*" }; private static final String[] NO_INDICES_ARRAY = new String[] { "*", "-*" };
@ -51,7 +55,7 @@ public class IndicesAndAliasesResolver {
private final IndexNameExpressionResolver nameExpressionResolver; private final IndexNameExpressionResolver nameExpressionResolver;
private final RemoteClusterResolver remoteClusterResolver; private final RemoteClusterResolver remoteClusterResolver;
public IndicesAndAliasesResolver(Settings settings, ClusterService clusterService) { IndicesAndAliasesResolver(Settings settings, ClusterService clusterService) {
this.nameExpressionResolver = new IndexNameExpressionResolver(settings); this.nameExpressionResolver = new IndexNameExpressionResolver(settings);
this.remoteClusterResolver = new RemoteClusterResolver(settings, clusterService.getClusterSettings()); this.remoteClusterResolver = new RemoteClusterResolver(settings, clusterService.getClusterSettings());
} }
@ -85,7 +89,7 @@ public class IndicesAndAliasesResolver {
* Otherwise, <em>N</em> will be added to the <em>local</em> index list. * Otherwise, <em>N</em> will be added to the <em>local</em> index list.
*/ */
public ResolvedIndices resolve(TransportRequest request, MetaData metaData, AuthorizedIndices authorizedIndices) { ResolvedIndices resolve(TransportRequest request, MetaData metaData, AuthorizedIndices authorizedIndices) {
if (request instanceof IndicesAliasesRequest) { if (request instanceof IndicesAliasesRequest) {
ResolvedIndices.Builder resolvedIndicesBuilder = new ResolvedIndices.Builder(); ResolvedIndices.Builder resolvedIndicesBuilder = new ResolvedIndices.Builder();
IndicesAliasesRequest indicesAliasesRequest = (IndicesAliasesRequest) request; IndicesAliasesRequest indicesAliasesRequest = (IndicesAliasesRequest) request;
@ -116,7 +120,7 @@ public class IndicesAndAliasesResolver {
*/ */
assert indicesRequest.indices() == null || indicesRequest.indices().length == 0 assert indicesRequest.indices() == null || indicesRequest.indices().length == 0
: "indices are: " + Arrays.toString(indicesRequest.indices()); // Arrays.toString() can handle null values - all good : "indices are: " + Arrays.toString(indicesRequest.indices()); // Arrays.toString() can handle null values - all good
resolvedIndicesBuilder.addLocal(((PutMappingRequest) indicesRequest).getConcreteIndex().getName()); resolvedIndicesBuilder.addLocal(getPutMappingIndexOrAlias((PutMappingRequest) indicesRequest, authorizedIndices, metaData));
} else if (indicesRequest instanceof IndicesRequest.Replaceable) { } else if (indicesRequest instanceof IndicesRequest.Replaceable) {
IndicesRequest.Replaceable replaceable = (IndicesRequest.Replaceable) indicesRequest; IndicesRequest.Replaceable replaceable = (IndicesRequest.Replaceable) indicesRequest;
final boolean replaceWildcards = indicesRequest.indicesOptions().expandWildcardsOpen() final boolean replaceWildcards = indicesRequest.indicesOptions().expandWildcardsOpen()
@ -213,7 +217,48 @@ public class IndicesAndAliasesResolver {
return resolvedIndicesBuilder.build(); return resolvedIndicesBuilder.build();
} }
public static boolean allowsRemoteIndices(IndicesRequest request) { /**
* Special handling of the value to authorize for a put mapping request. Dynamic put mapping
* requests use a concrete index, but we allow permissions to be defined on aliases so if the
* request's concrete index is not in the list of authorized indices, then we need to look to
* see if this can be authorized against an alias
*/
static String getPutMappingIndexOrAlias(PutMappingRequest request, AuthorizedIndices authorizedIndices, MetaData metaData) {
final String concreteIndexName = request.getConcreteIndex().getName();
final List<String> authorizedIndicesList = authorizedIndices.get();
// validate that the concrete index exists, otherwise there is no remapping that we could do
final AliasOrIndex aliasOrIndex = metaData.getAliasAndIndexLookup().get(concreteIndexName);
final String resolvedAliasOrIndex;
if (aliasOrIndex == null) {
resolvedAliasOrIndex = concreteIndexName;
} else if (aliasOrIndex.isAlias()) {
throw new IllegalStateException("concrete index [" + concreteIndexName + "] is an alias but should not be");
} else if (authorizedIndicesList.contains(concreteIndexName)) {
// user is authorized to put mappings for this index
resolvedAliasOrIndex = concreteIndexName;
} else {
// the user is not authorized to put mappings for this index, but could have been
// authorized for a write using an alias that triggered a dynamic mapping update
ImmutableOpenMap<String, List<AliasMetaData>> foundAliases =
metaData.findAliases(Strings.EMPTY_ARRAY, new String[] { concreteIndexName });
List<AliasMetaData> aliasMetaData = foundAliases.get(concreteIndexName);
if (aliasMetaData != null) {
Optional<String> foundAlias = aliasMetaData.stream()
.map(AliasMetaData::alias)
.filter(authorizedIndicesList::contains)
.filter(aliasName -> metaData.getAliasAndIndexLookup().get(aliasName).getIndices().size() == 1)
.findFirst();
resolvedAliasOrIndex = foundAlias.orElse(concreteIndexName);
} else {
resolvedAliasOrIndex = concreteIndexName;
}
}
return resolvedAliasOrIndex;
}
static boolean allowsRemoteIndices(IndicesRequest request) {
return request instanceof SearchRequest || request instanceof FieldCapabilitiesRequest return request instanceof SearchRequest || request instanceof FieldCapabilitiesRequest
|| request instanceof GraphExploreRequest; || request instanceof GraphExploreRequest;
} }

View File

@ -159,7 +159,7 @@ public final class SSLChannelContext extends SocketChannelContext {
private static class CloseNotifyOperation implements WriteOperation { private static class CloseNotifyOperation implements WriteOperation {
private static final BiConsumer<Void, Throwable> LISTENER = (v, t) -> {}; private static final BiConsumer<Void, Exception> LISTENER = (v, t) -> {};
private static final Object WRITE_OBJECT = new Object(); private static final Object WRITE_OBJECT = new Object();
private final SocketChannelContext channelContext; private final SocketChannelContext channelContext;
@ -168,7 +168,7 @@ public final class SSLChannelContext extends SocketChannelContext {
} }
@Override @Override
public BiConsumer<Void, Throwable> getListener() { public BiConsumer<Void, Exception> getListener() {
return LISTENER; return LISTENER;
} }

View File

@ -13,9 +13,9 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.ValidationException;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.env.Environment; import org.elasticsearch.env.Environment;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.threadpool.TestThreadPool;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.TransportService;
import org.elasticsearch.xpack.core.security.action.user.GetUsersRequest; import org.elasticsearch.xpack.core.security.action.user.GetUsersRequest;
@ -28,6 +28,7 @@ import org.elasticsearch.xpack.security.authc.esnative.NativeUsersStore;
import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm; import org.elasticsearch.xpack.security.authc.esnative.ReservedRealm;
import org.elasticsearch.xpack.security.authc.esnative.ReservedRealmTests; import org.elasticsearch.xpack.security.authc.esnative.ReservedRealmTests;
import org.elasticsearch.xpack.security.support.SecurityIndexManager; import org.elasticsearch.xpack.security.support.SecurityIndexManager;
import org.junit.After;
import org.junit.Before; import org.junit.Before;
import org.mockito.invocation.InvocationOnMock; import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer; import org.mockito.stubbing.Answer;
@ -62,6 +63,7 @@ public class TransportGetUsersActionTests extends ESTestCase {
private boolean anonymousEnabled; private boolean anonymousEnabled;
private Settings settings; private Settings settings;
private ThreadPool threadPool;
@Before @Before
public void maybeEnableAnonymous() { public void maybeEnableAnonymous() {
@ -71,6 +73,14 @@ public class TransportGetUsersActionTests extends ESTestCase {
} else { } else {
settings = Settings.EMPTY; settings = Settings.EMPTY;
} }
threadPool = new TestThreadPool("TransportGetUsersActionTests");
}
@After
public void terminateThreadPool() throws InterruptedException {
if (threadPool != null) {
terminate(threadPool);
}
} }
public void testAnonymousUser() { public void testAnonymousUser() {
@ -79,10 +89,10 @@ public class TransportGetUsersActionTests extends ESTestCase {
when(securityIndex.isAvailable()).thenReturn(true); when(securityIndex.isAvailable()).thenReturn(true);
AnonymousUser anonymousUser = new AnonymousUser(settings); AnonymousUser anonymousUser = new AnonymousUser(settings);
ReservedRealm reservedRealm = ReservedRealm reservedRealm =
new ReservedRealm(mock(Environment.class), settings, usersStore, anonymousUser, securityIndex, new ThreadContext(Settings.EMPTY)); new ReservedRealm(mock(Environment.class), settings, usersStore, anonymousUser, securityIndex, threadPool);
TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR,
x -> null, null, Collections.emptySet()); x -> null, null, Collections.emptySet());
TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, threadPool, mock(ActionFilters.class),
mock(IndexNameExpressionResolver.class), usersStore, transportService, reservedRealm); mock(IndexNameExpressionResolver.class), usersStore, transportService, reservedRealm);
GetUsersRequest request = new GetUsersRequest(); GetUsersRequest request = new GetUsersRequest();
@ -117,7 +127,7 @@ public class TransportGetUsersActionTests extends ESTestCase {
NativeUsersStore usersStore = mock(NativeUsersStore.class); NativeUsersStore usersStore = mock(NativeUsersStore.class);
TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR,
x -> null, null, Collections.emptySet()); x -> null, null, Collections.emptySet());
TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, threadPool, mock(ActionFilters.class),
mock(IndexNameExpressionResolver.class), usersStore, transportService, mock(ReservedRealm.class)); mock(IndexNameExpressionResolver.class), usersStore, transportService, mock(ReservedRealm.class));
GetUsersRequest request = new GetUsersRequest(); GetUsersRequest request = new GetUsersRequest();
@ -151,7 +161,7 @@ public class TransportGetUsersActionTests extends ESTestCase {
ReservedRealmTests.mockGetAllReservedUserInfo(usersStore, Collections.emptyMap()); ReservedRealmTests.mockGetAllReservedUserInfo(usersStore, Collections.emptyMap());
ReservedRealm reservedRealm = ReservedRealm reservedRealm =
new ReservedRealm(mock(Environment.class), settings, usersStore, new AnonymousUser(settings), securityIndex, new ThreadContext(Settings.EMPTY)); new ReservedRealm(mock(Environment.class), settings, usersStore, new AnonymousUser(settings), securityIndex, threadPool);
PlainActionFuture<Collection<User>> userFuture = new PlainActionFuture<>(); PlainActionFuture<Collection<User>> userFuture = new PlainActionFuture<>();
reservedRealm.users(userFuture); reservedRealm.users(userFuture);
final Collection<User> allReservedUsers = userFuture.actionGet(); final Collection<User> allReservedUsers = userFuture.actionGet();
@ -160,7 +170,7 @@ public class TransportGetUsersActionTests extends ESTestCase {
final List<String> names = reservedUsers.stream().map(User::principal).collect(Collectors.toList()); final List<String> names = reservedUsers.stream().map(User::principal).collect(Collectors.toList());
TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR,
x -> null, null, Collections.emptySet()); x -> null, null, Collections.emptySet());
TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, threadPool, mock(ActionFilters.class),
mock(IndexNameExpressionResolver.class), usersStore, transportService, reservedRealm); mock(IndexNameExpressionResolver.class), usersStore, transportService, reservedRealm);
logger.error("names {}", names); logger.error("names {}", names);
@ -197,10 +207,10 @@ public class TransportGetUsersActionTests extends ESTestCase {
when(securityIndex.isAvailable()).thenReturn(true); when(securityIndex.isAvailable()).thenReturn(true);
ReservedRealmTests.mockGetAllReservedUserInfo(usersStore, Collections.emptyMap()); ReservedRealmTests.mockGetAllReservedUserInfo(usersStore, Collections.emptyMap());
ReservedRealm reservedRealm = new ReservedRealm(mock(Environment.class), settings, usersStore, new AnonymousUser(settings), ReservedRealm reservedRealm = new ReservedRealm(mock(Environment.class), settings, usersStore, new AnonymousUser(settings),
securityIndex, new ThreadContext(Settings.EMPTY)); securityIndex, threadPool);
TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR,
x -> null, null, Collections.emptySet()); x -> null, null, Collections.emptySet());
TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, threadPool, mock(ActionFilters.class),
mock(IndexNameExpressionResolver.class), usersStore, transportService, reservedRealm); mock(IndexNameExpressionResolver.class), usersStore, transportService, reservedRealm);
GetUsersRequest request = new GetUsersRequest(); GetUsersRequest request = new GetUsersRequest();
@ -247,7 +257,7 @@ public class TransportGetUsersActionTests extends ESTestCase {
NativeUsersStore usersStore = mock(NativeUsersStore.class); NativeUsersStore usersStore = mock(NativeUsersStore.class);
TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR,
x -> null, null, Collections.emptySet()); x -> null, null, Collections.emptySet());
TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, threadPool, mock(ActionFilters.class),
mock(IndexNameExpressionResolver.class), usersStore, transportService, mock(ReservedRealm.class)); mock(IndexNameExpressionResolver.class), usersStore, transportService, mock(ReservedRealm.class));
GetUsersRequest request = new GetUsersRequest(); GetUsersRequest request = new GetUsersRequest();
@ -295,7 +305,7 @@ public class TransportGetUsersActionTests extends ESTestCase {
NativeUsersStore usersStore = mock(NativeUsersStore.class); NativeUsersStore usersStore = mock(NativeUsersStore.class);
TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR,
x -> null, null, Collections.emptySet()); x -> null, null, Collections.emptySet());
TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, threadPool, mock(ActionFilters.class),
mock(IndexNameExpressionResolver.class), usersStore, transportService, mock(ReservedRealm.class)); mock(IndexNameExpressionResolver.class), usersStore, transportService, mock(ReservedRealm.class));
GetUsersRequest request = new GetUsersRequest(); GetUsersRequest request = new GetUsersRequest();

View File

@ -121,14 +121,16 @@ public class TransportPutUserActionTests extends ESTestCase {
when(securityIndex.isAvailable()).thenReturn(true); when(securityIndex.isAvailable()).thenReturn(true);
ReservedRealmTests.mockGetAllReservedUserInfo(usersStore, Collections.emptyMap()); ReservedRealmTests.mockGetAllReservedUserInfo(usersStore, Collections.emptyMap());
Settings settings = Settings.builder().put("path.home", createTempDir()).build(); Settings settings = Settings.builder().put("path.home", createTempDir()).build();
final ThreadPool threadPool = mock(ThreadPool.class);
when(threadPool.getThreadContext()).thenReturn(new ThreadContext(settings));
ReservedRealm reservedRealm = new ReservedRealm(TestEnvironment.newEnvironment(settings), settings, usersStore, ReservedRealm reservedRealm = new ReservedRealm(TestEnvironment.newEnvironment(settings), settings, usersStore,
new AnonymousUser(settings), securityIndex, new ThreadContext(settings)); new AnonymousUser(settings), securityIndex, threadPool);
PlainActionFuture<Collection<User>> userFuture = new PlainActionFuture<>(); PlainActionFuture<Collection<User>> userFuture = new PlainActionFuture<>();
reservedRealm.users(userFuture); reservedRealm.users(userFuture);
final User reserved = randomFrom(userFuture.actionGet().toArray(new User[0])); final User reserved = randomFrom(userFuture.actionGet().toArray(new User[0]));
TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR,
x -> null, null, Collections.emptySet()); x -> null, null, Collections.emptySet());
TransportPutUserAction action = new TransportPutUserAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), TransportPutUserAction action = new TransportPutUserAction(Settings.EMPTY, threadPool, mock(ActionFilters.class),
mock(IndexNameExpressionResolver.class), usersStore, transportService); mock(IndexNameExpressionResolver.class), usersStore, transportService);
PutUserRequest request = new PutUserRequest(); PutUserRequest request = new PutUserRequest();

View File

@ -11,6 +11,7 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.env.TestEnvironment;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.xpack.core.security.authc.RealmConfig; import org.elasticsearch.xpack.core.security.authc.RealmConfig;
import org.elasticsearch.xpack.security.support.SecurityIndexManager; import org.elasticsearch.xpack.security.support.SecurityIndexManager;
@ -18,6 +19,7 @@ import java.util.concurrent.atomic.AtomicInteger;
import static org.elasticsearch.xpack.security.test.SecurityTestUtils.getClusterIndexHealth; import static org.elasticsearch.xpack.security.test.SecurityTestUtils.getClusterIndexHealth;
import static org.mockito.Mockito.mock; import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class NativeRealmTests extends ESTestCase { public class NativeRealmTests extends ESTestCase {
@ -26,12 +28,15 @@ public class NativeRealmTests extends ESTestCase {
} }
public void testCacheClearOnIndexHealthChange() { public void testCacheClearOnIndexHealthChange() {
final ThreadPool threadPool = mock(ThreadPool.class);
final ThreadContext threadContext = new ThreadContext(Settings.EMPTY);
when(threadPool.getThreadContext()).thenReturn(threadContext);
final AtomicInteger numInvalidation = new AtomicInteger(0); final AtomicInteger numInvalidation = new AtomicInteger(0);
int expectedInvalidation = 0; int expectedInvalidation = 0;
Settings settings = Settings.builder().put("path.home", createTempDir()).build(); Settings settings = Settings.builder().put("path.home", createTempDir()).build();
RealmConfig config = new RealmConfig("native", Settings.EMPTY, settings, TestEnvironment.newEnvironment(settings), RealmConfig config = new RealmConfig("native", Settings.EMPTY, settings, TestEnvironment.newEnvironment(settings),
new ThreadContext(settings)); new ThreadContext(settings));
final NativeRealm nativeRealm = new NativeRealm(config, mock(NativeUsersStore.class)) { final NativeRealm nativeRealm = new NativeRealm(config, mock(NativeUsersStore.class), threadPool) {
@Override @Override
void clearCache() { void clearCache() {
numInvalidation.incrementAndGet(); numInvalidation.incrementAndGet();

View File

@ -15,6 +15,7 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.env.Environment; import org.elasticsearch.env.Environment;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.XPackSettings;
import org.elasticsearch.xpack.core.security.authc.AuthenticationResult; import org.elasticsearch.xpack.core.security.authc.AuthenticationResult;
import org.elasticsearch.xpack.core.security.authc.esnative.ClientReservedRealm; import org.elasticsearch.xpack.core.security.authc.esnative.ClientReservedRealm;
@ -63,6 +64,7 @@ public class ReservedRealmTests extends ESTestCase {
private static final SecureString EMPTY_PASSWORD = new SecureString("".toCharArray()); private static final SecureString EMPTY_PASSWORD = new SecureString("".toCharArray());
private NativeUsersStore usersStore; private NativeUsersStore usersStore;
private SecurityIndexManager securityIndex; private SecurityIndexManager securityIndex;
private ThreadPool threadPool;
@Before @Before
public void setupMocks() throws Exception { public void setupMocks() throws Exception {
@ -71,6 +73,8 @@ public class ReservedRealmTests extends ESTestCase {
when(securityIndex.isAvailable()).thenReturn(true); when(securityIndex.isAvailable()).thenReturn(true);
when(securityIndex.checkMappingVersion(any())).thenReturn(true); when(securityIndex.checkMappingVersion(any())).thenReturn(true);
mockGetAllReservedUserInfo(usersStore, Collections.emptyMap()); mockGetAllReservedUserInfo(usersStore, Collections.emptyMap());
threadPool = mock(ThreadPool.class);
when(threadPool.getThreadContext()).thenReturn(new ThreadContext(Settings.EMPTY));
} }
public void testReservedUserEmptyPasswordAuthenticationFails() throws Throwable { public void testReservedUserEmptyPasswordAuthenticationFails() throws Throwable {
@ -78,7 +82,7 @@ public class ReservedRealmTests extends ESTestCase {
UsernamesField.BEATS_NAME); UsernamesField.BEATS_NAME);
final ReservedRealm reservedRealm = new ReservedRealm(mock(Environment.class), Settings.EMPTY, usersStore, final ReservedRealm reservedRealm = new ReservedRealm(mock(Environment.class), Settings.EMPTY, usersStore,
new AnonymousUser(Settings.EMPTY), securityIndex, new ThreadContext(Settings.EMPTY)); new AnonymousUser(Settings.EMPTY), securityIndex, threadPool);
PlainActionFuture<AuthenticationResult> listener = new PlainActionFuture<>(); PlainActionFuture<AuthenticationResult> listener = new PlainActionFuture<>();
@ -94,7 +98,7 @@ public class ReservedRealmTests extends ESTestCase {
} }
final ReservedRealm reservedRealm = final ReservedRealm reservedRealm =
new ReservedRealm(mock(Environment.class), settings, usersStore, new ReservedRealm(mock(Environment.class), settings, usersStore,
new AnonymousUser(settings), securityIndex, new ThreadContext(Settings.EMPTY)); new AnonymousUser(settings), securityIndex, threadPool);
final User expected = randomReservedUser(true); final User expected = randomReservedUser(true);
final String principal = expected.principal(); final String principal = expected.principal();
@ -116,7 +120,7 @@ public class ReservedRealmTests extends ESTestCase {
private void verifySuccessfulAuthentication(boolean enabled) throws Exception { private void verifySuccessfulAuthentication(boolean enabled) throws Exception {
final ReservedRealm reservedRealm = new ReservedRealm(mock(Environment.class), Settings.EMPTY, usersStore, final ReservedRealm reservedRealm = new ReservedRealm(mock(Environment.class), Settings.EMPTY, usersStore,
new AnonymousUser(Settings.EMPTY), securityIndex, new ThreadContext(Settings.EMPTY)); new AnonymousUser(Settings.EMPTY), securityIndex, threadPool);
final User expectedUser = randomReservedUser(enabled); final User expectedUser = randomReservedUser(enabled);
final String principal = expectedUser.principal(); final String principal = expectedUser.principal();
final SecureString newPassword = new SecureString("foobar".toCharArray()); final SecureString newPassword = new SecureString("foobar".toCharArray());
@ -157,7 +161,7 @@ public class ReservedRealmTests extends ESTestCase {
public void testLookup() throws Exception { public void testLookup() throws Exception {
final ReservedRealm reservedRealm = final ReservedRealm reservedRealm =
new ReservedRealm(mock(Environment.class), Settings.EMPTY, usersStore, new ReservedRealm(mock(Environment.class), Settings.EMPTY, usersStore,
new AnonymousUser(Settings.EMPTY), securityIndex, new ThreadContext(Settings.EMPTY)); new AnonymousUser(Settings.EMPTY), securityIndex, threadPool);
final User expectedUser = randomReservedUser(true); final User expectedUser = randomReservedUser(true);
final String principal = expectedUser.principal(); final String principal = expectedUser.principal();
@ -182,7 +186,7 @@ public class ReservedRealmTests extends ESTestCase {
Settings settings = Settings.builder().put(XPackSettings.RESERVED_REALM_ENABLED_SETTING.getKey(), false).build(); Settings settings = Settings.builder().put(XPackSettings.RESERVED_REALM_ENABLED_SETTING.getKey(), false).build();
final ReservedRealm reservedRealm = final ReservedRealm reservedRealm =
new ReservedRealm(mock(Environment.class), settings, usersStore, new AnonymousUser(settings), new ReservedRealm(mock(Environment.class), settings, usersStore, new AnonymousUser(settings),
securityIndex, new ThreadContext(Settings.EMPTY)); securityIndex, threadPool);
final User expectedUser = randomReservedUser(true); final User expectedUser = randomReservedUser(true);
final String principal = expectedUser.principal(); final String principal = expectedUser.principal();
@ -196,7 +200,7 @@ public class ReservedRealmTests extends ESTestCase {
public void testLookupThrows() throws Exception { public void testLookupThrows() throws Exception {
final ReservedRealm reservedRealm = final ReservedRealm reservedRealm =
new ReservedRealm(mock(Environment.class), Settings.EMPTY, usersStore, new ReservedRealm(mock(Environment.class), Settings.EMPTY, usersStore,
new AnonymousUser(Settings.EMPTY), securityIndex, new ThreadContext(Settings.EMPTY)); new AnonymousUser(Settings.EMPTY), securityIndex, threadPool);
final User expectedUser = randomReservedUser(true); final User expectedUser = randomReservedUser(true);
final String principal = expectedUser.principal(); final String principal = expectedUser.principal();
when(securityIndex.indexExists()).thenReturn(true); when(securityIndex.indexExists()).thenReturn(true);
@ -243,7 +247,7 @@ public class ReservedRealmTests extends ESTestCase {
public void testGetUsers() { public void testGetUsers() {
final ReservedRealm reservedRealm = new ReservedRealm(mock(Environment.class), Settings.EMPTY, usersStore, final ReservedRealm reservedRealm = new ReservedRealm(mock(Environment.class), Settings.EMPTY, usersStore,
new AnonymousUser(Settings.EMPTY), securityIndex, new ThreadContext(Settings.EMPTY)); new AnonymousUser(Settings.EMPTY), securityIndex, threadPool);
PlainActionFuture<Collection<User>> userFuture = new PlainActionFuture<>(); PlainActionFuture<Collection<User>> userFuture = new PlainActionFuture<>();
reservedRealm.users(userFuture); reservedRealm.users(userFuture);
assertThat(userFuture.actionGet(), assertThat(userFuture.actionGet(),
@ -258,7 +262,7 @@ public class ReservedRealmTests extends ESTestCase {
.build(); .build();
final AnonymousUser anonymousUser = new AnonymousUser(settings); final AnonymousUser anonymousUser = new AnonymousUser(settings);
final ReservedRealm reservedRealm = new ReservedRealm(mock(Environment.class), settings, usersStore, anonymousUser, final ReservedRealm reservedRealm = new ReservedRealm(mock(Environment.class), settings, usersStore, anonymousUser,
securityIndex, new ThreadContext(Settings.EMPTY)); securityIndex, threadPool);
PlainActionFuture<Collection<User>> userFuture = new PlainActionFuture<>(); PlainActionFuture<Collection<User>> userFuture = new PlainActionFuture<>();
reservedRealm.users(userFuture); reservedRealm.users(userFuture);
if (anonymousEnabled) { if (anonymousEnabled) {
@ -275,7 +279,7 @@ public class ReservedRealmTests extends ESTestCase {
ReservedUserInfo userInfo = new ReservedUserInfo(hash, true, false); ReservedUserInfo userInfo = new ReservedUserInfo(hash, true, false);
mockGetAllReservedUserInfo(usersStore, Collections.singletonMap("elastic", userInfo)); mockGetAllReservedUserInfo(usersStore, Collections.singletonMap("elastic", userInfo));
final ReservedRealm reservedRealm = new ReservedRealm(mock(Environment.class), Settings.EMPTY, usersStore, final ReservedRealm reservedRealm = new ReservedRealm(mock(Environment.class), Settings.EMPTY, usersStore,
new AnonymousUser(Settings.EMPTY), securityIndex, new ThreadContext(Settings.EMPTY)); new AnonymousUser(Settings.EMPTY), securityIndex, threadPool);
if (randomBoolean()) { if (randomBoolean()) {
PlainActionFuture<AuthenticationResult> future = new PlainActionFuture<>(); PlainActionFuture<AuthenticationResult> future = new PlainActionFuture<>();
@ -305,7 +309,7 @@ public class ReservedRealmTests extends ESTestCase {
when(securityIndex.indexExists()).thenReturn(true); when(securityIndex.indexExists()).thenReturn(true);
final ReservedRealm reservedRealm = new ReservedRealm(mock(Environment.class), settings, usersStore, final ReservedRealm reservedRealm = new ReservedRealm(mock(Environment.class), settings, usersStore,
new AnonymousUser(Settings.EMPTY), securityIndex, new ThreadContext(Settings.EMPTY)); new AnonymousUser(Settings.EMPTY), securityIndex, threadPool);
PlainActionFuture<AuthenticationResult> listener = new PlainActionFuture<>(); PlainActionFuture<AuthenticationResult> listener = new PlainActionFuture<>();
doAnswer((i) -> { doAnswer((i) -> {
@ -327,7 +331,7 @@ public class ReservedRealmTests extends ESTestCase {
when(securityIndex.indexExists()).thenReturn(true); when(securityIndex.indexExists()).thenReturn(true);
final ReservedRealm reservedRealm = new ReservedRealm(mock(Environment.class), settings, usersStore, final ReservedRealm reservedRealm = new ReservedRealm(mock(Environment.class), settings, usersStore,
new AnonymousUser(Settings.EMPTY), securityIndex, new ThreadContext(Settings.EMPTY)); new AnonymousUser(Settings.EMPTY), securityIndex, threadPool);
PlainActionFuture<AuthenticationResult> listener = new PlainActionFuture<>(); PlainActionFuture<AuthenticationResult> listener = new PlainActionFuture<>();
SecureString password = new SecureString("password".toCharArray()); SecureString password = new SecureString("password".toCharArray());
doAnswer((i) -> { doAnswer((i) -> {
@ -354,7 +358,7 @@ public class ReservedRealmTests extends ESTestCase {
when(securityIndex.indexExists()).thenReturn(false); when(securityIndex.indexExists()).thenReturn(false);
final ReservedRealm reservedRealm = new ReservedRealm(mock(Environment.class), settings, usersStore, final ReservedRealm reservedRealm = new ReservedRealm(mock(Environment.class), settings, usersStore,
new AnonymousUser(Settings.EMPTY), securityIndex, new ThreadContext(Settings.EMPTY)); new AnonymousUser(Settings.EMPTY), securityIndex, threadPool);
PlainActionFuture<AuthenticationResult> listener = new PlainActionFuture<>(); PlainActionFuture<AuthenticationResult> listener = new PlainActionFuture<>();
reservedRealm.doAuthenticate(new UsernamePasswordToken(new ElasticUser(true).principal(), reservedRealm.doAuthenticate(new UsernamePasswordToken(new ElasticUser(true).principal(),
@ -372,7 +376,7 @@ public class ReservedRealmTests extends ESTestCase {
when(securityIndex.indexExists()).thenReturn(true); when(securityIndex.indexExists()).thenReturn(true);
final ReservedRealm reservedRealm = new ReservedRealm(mock(Environment.class), settings, usersStore, final ReservedRealm reservedRealm = new ReservedRealm(mock(Environment.class), settings, usersStore,
new AnonymousUser(Settings.EMPTY), securityIndex, new ThreadContext(Settings.EMPTY)); new AnonymousUser(Settings.EMPTY), securityIndex, threadPool);
PlainActionFuture<AuthenticationResult> listener = new PlainActionFuture<>(); PlainActionFuture<AuthenticationResult> listener = new PlainActionFuture<>();
final String principal = randomFrom(KibanaUser.NAME, LogstashSystemUser.NAME, BeatsSystemUser.NAME); final String principal = randomFrom(KibanaUser.NAME, LogstashSystemUser.NAME, BeatsSystemUser.NAME);
@ -394,7 +398,7 @@ public class ReservedRealmTests extends ESTestCase {
when(securityIndex.indexExists()).thenReturn(false); when(securityIndex.indexExists()).thenReturn(false);
final ReservedRealm reservedRealm = new ReservedRealm(mock(Environment.class), settings, usersStore, final ReservedRealm reservedRealm = new ReservedRealm(mock(Environment.class), settings, usersStore,
new AnonymousUser(Settings.EMPTY), securityIndex, new ThreadContext(Settings.EMPTY)); new AnonymousUser(Settings.EMPTY), securityIndex, threadPool);
PlainActionFuture<AuthenticationResult> listener = new PlainActionFuture<>(); PlainActionFuture<AuthenticationResult> listener = new PlainActionFuture<>();
final String principal = randomFrom(KibanaUser.NAME, LogstashSystemUser.NAME, BeatsSystemUser.NAME); final String principal = randomFrom(KibanaUser.NAME, LogstashSystemUser.NAME, BeatsSystemUser.NAME);

View File

@ -11,6 +11,7 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.env.TestEnvironment;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.watcher.ResourceWatcherService; import org.elasticsearch.watcher.ResourceWatcherService;
import org.elasticsearch.xpack.core.security.authc.AuthenticationResult; import org.elasticsearch.xpack.core.security.authc.AuthenticationResult;
import org.elasticsearch.xpack.core.security.authc.RealmConfig; import org.elasticsearch.xpack.core.security.authc.RealmConfig;
@ -50,20 +51,26 @@ public class FileRealmTests extends ESTestCase {
private FileUserPasswdStore userPasswdStore; private FileUserPasswdStore userPasswdStore;
private FileUserRolesStore userRolesStore; private FileUserRolesStore userRolesStore;
private Settings globalSettings; private Settings globalSettings;
private ThreadPool threadPool;
private ThreadContext threadContext;
@Before @Before
public void init() throws Exception { public void init() throws Exception {
userPasswdStore = mock(FileUserPasswdStore.class); userPasswdStore = mock(FileUserPasswdStore.class);
userRolesStore = mock(FileUserRolesStore.class); userRolesStore = mock(FileUserRolesStore.class);
globalSettings = Settings.builder().put("path.home", createTempDir()).build(); globalSettings = Settings.builder().put("path.home", createTempDir()).build();
threadPool = mock(ThreadPool.class);
threadContext = new ThreadContext(globalSettings);
when(threadPool.getThreadContext()).thenReturn(threadContext);
} }
public void testAuthenticate() throws Exception { public void testAuthenticate() throws Exception {
when(userPasswdStore.verifyPassword(eq("user1"), eq(new SecureString("test123")), any(Supplier.class))) when(userPasswdStore.verifyPassword(eq("user1"), eq(new SecureString("test123")), any(Supplier.class)))
.thenAnswer(VERIFY_PASSWORD_ANSWER); .thenAnswer(VERIFY_PASSWORD_ANSWER);
when(userRolesStore.roles("user1")).thenReturn(new String[] { "role1", "role2" }); when(userRolesStore.roles("user1")).thenReturn(new String[] { "role1", "role2" });
RealmConfig config = new RealmConfig("file-test", Settings.EMPTY, globalSettings, TestEnvironment.newEnvironment(globalSettings), new ThreadContext(globalSettings)); RealmConfig config = new RealmConfig("file-test", Settings.EMPTY, globalSettings, TestEnvironment.newEnvironment(globalSettings),
FileRealm realm = new FileRealm(config, userPasswdStore, userRolesStore); threadContext);
FileRealm realm = new FileRealm(config, userPasswdStore, userRolesStore, threadPool);
PlainActionFuture<AuthenticationResult> future = new PlainActionFuture<>(); PlainActionFuture<AuthenticationResult> future = new PlainActionFuture<>();
realm.authenticate(new UsernamePasswordToken("user1", new SecureString("test123")), future); realm.authenticate(new UsernamePasswordToken("user1", new SecureString("test123")), future);
final AuthenticationResult result = future.actionGet(); final AuthenticationResult result = future.actionGet();
@ -80,11 +87,12 @@ public class FileRealmTests extends ESTestCase {
Settings settings = Settings.builder() Settings settings = Settings.builder()
.put("cache.hash_algo", Hasher.values()[randomIntBetween(0, Hasher.values().length - 1)].name().toLowerCase(Locale.ROOT)) .put("cache.hash_algo", Hasher.values()[randomIntBetween(0, Hasher.values().length - 1)].name().toLowerCase(Locale.ROOT))
.build(); .build();
RealmConfig config = new RealmConfig("file-test", settings, globalSettings, TestEnvironment.newEnvironment(globalSettings), new ThreadContext(globalSettings)); RealmConfig config = new RealmConfig("file-test", settings, globalSettings, TestEnvironment.newEnvironment(globalSettings),
threadContext);
when(userPasswdStore.verifyPassword(eq("user1"), eq(new SecureString("test123")), any(Supplier.class))) when(userPasswdStore.verifyPassword(eq("user1"), eq(new SecureString("test123")), any(Supplier.class)))
.thenAnswer(VERIFY_PASSWORD_ANSWER); .thenAnswer(VERIFY_PASSWORD_ANSWER);
when(userRolesStore.roles("user1")).thenReturn(new String[]{"role1", "role2"}); when(userRolesStore.roles("user1")).thenReturn(new String[]{"role1", "role2"});
FileRealm realm = new FileRealm(config, userPasswdStore, userRolesStore); FileRealm realm = new FileRealm(config, userPasswdStore, userRolesStore, threadPool);
PlainActionFuture<AuthenticationResult> future = new PlainActionFuture<>(); PlainActionFuture<AuthenticationResult> future = new PlainActionFuture<>();
realm.authenticate(new UsernamePasswordToken("user1", new SecureString("test123")), future); realm.authenticate(new UsernamePasswordToken("user1", new SecureString("test123")), future);
User user1 = future.actionGet().getUser(); User user1 = future.actionGet().getUser();
@ -95,13 +103,14 @@ public class FileRealmTests extends ESTestCase {
} }
public void testAuthenticateCachingRefresh() throws Exception { public void testAuthenticateCachingRefresh() throws Exception {
RealmConfig config = new RealmConfig("file-test", Settings.EMPTY, globalSettings, TestEnvironment.newEnvironment(globalSettings), new ThreadContext(globalSettings)); RealmConfig config = new RealmConfig("file-test", Settings.EMPTY, globalSettings, TestEnvironment.newEnvironment(globalSettings),
threadContext);
userPasswdStore = spy(new UserPasswdStore(config)); userPasswdStore = spy(new UserPasswdStore(config));
userRolesStore = spy(new UserRolesStore(config)); userRolesStore = spy(new UserRolesStore(config));
when(userPasswdStore.verifyPassword(eq("user1"), eq(new SecureString("test123")), any(Supplier.class))) when(userPasswdStore.verifyPassword(eq("user1"), eq(new SecureString("test123")), any(Supplier.class)))
.thenAnswer(VERIFY_PASSWORD_ANSWER); .thenAnswer(VERIFY_PASSWORD_ANSWER);
doReturn(new String[] { "role1", "role2" }).when(userRolesStore).roles("user1"); doReturn(new String[] { "role1", "role2" }).when(userRolesStore).roles("user1");
FileRealm realm = new FileRealm(config, userPasswdStore, userRolesStore); FileRealm realm = new FileRealm(config, userPasswdStore, userRolesStore, threadPool);
PlainActionFuture<AuthenticationResult> future = new PlainActionFuture<>(); PlainActionFuture<AuthenticationResult> future = new PlainActionFuture<>();
realm.authenticate(new UsernamePasswordToken("user1", new SecureString("test123")), future); realm.authenticate(new UsernamePasswordToken("user1", new SecureString("test123")), future);
User user1 = future.actionGet().getUser(); User user1 = future.actionGet().getUser();
@ -134,11 +143,12 @@ public class FileRealmTests extends ESTestCase {
} }
public void testToken() throws Exception { public void testToken() throws Exception {
RealmConfig config = new RealmConfig("file-test", Settings.EMPTY, globalSettings, TestEnvironment.newEnvironment(globalSettings), new ThreadContext(globalSettings)); RealmConfig config = new RealmConfig("file-test", Settings.EMPTY, globalSettings, TestEnvironment.newEnvironment(globalSettings),
threadContext);
when(userPasswdStore.verifyPassword(eq("user1"), eq(new SecureString("test123")), any(Supplier.class))) when(userPasswdStore.verifyPassword(eq("user1"), eq(new SecureString("test123")), any(Supplier.class)))
.thenAnswer(VERIFY_PASSWORD_ANSWER); .thenAnswer(VERIFY_PASSWORD_ANSWER);
when(userRolesStore.roles("user1")).thenReturn(new String[]{"role1", "role2"}); when(userRolesStore.roles("user1")).thenReturn(new String[]{"role1", "role2"});
FileRealm realm = new FileRealm(config, userPasswdStore, userRolesStore); FileRealm realm = new FileRealm(config, userPasswdStore, userRolesStore, threadPool);
ThreadContext threadContext = new ThreadContext(Settings.EMPTY); ThreadContext threadContext = new ThreadContext(Settings.EMPTY);
UsernamePasswordToken.putTokenHeader(threadContext, new UsernamePasswordToken("user1", new SecureString("test123"))); UsernamePasswordToken.putTokenHeader(threadContext, new UsernamePasswordToken("user1", new SecureString("test123")));
@ -153,8 +163,9 @@ public class FileRealmTests extends ESTestCase {
public void testLookup() throws Exception { public void testLookup() throws Exception {
when(userPasswdStore.userExists("user1")).thenReturn(true); when(userPasswdStore.userExists("user1")).thenReturn(true);
when(userRolesStore.roles("user1")).thenReturn(new String[] { "role1", "role2" }); when(userRolesStore.roles("user1")).thenReturn(new String[] { "role1", "role2" });
RealmConfig config = new RealmConfig("file-test", Settings.EMPTY, globalSettings, TestEnvironment.newEnvironment(globalSettings), new ThreadContext(globalSettings)); RealmConfig config = new RealmConfig("file-test", Settings.EMPTY, globalSettings, TestEnvironment.newEnvironment(globalSettings),
FileRealm realm = new FileRealm(config, userPasswdStore, userRolesStore); threadContext);
FileRealm realm = new FileRealm(config, userPasswdStore, userRolesStore, threadPool);
PlainActionFuture<User> future = new PlainActionFuture<>(); PlainActionFuture<User> future = new PlainActionFuture<>();
realm.lookupUser("user1", future); realm.lookupUser("user1", future);
@ -170,8 +181,9 @@ public class FileRealmTests extends ESTestCase {
public void testLookupCaching() throws Exception { public void testLookupCaching() throws Exception {
when(userPasswdStore.userExists("user1")).thenReturn(true); when(userPasswdStore.userExists("user1")).thenReturn(true);
when(userRolesStore.roles("user1")).thenReturn(new String[] { "role1", "role2" }); when(userRolesStore.roles("user1")).thenReturn(new String[] { "role1", "role2" });
RealmConfig config = new RealmConfig("file-test", Settings.EMPTY, globalSettings, TestEnvironment.newEnvironment(globalSettings), new ThreadContext(globalSettings)); RealmConfig config = new RealmConfig("file-test", Settings.EMPTY, globalSettings, TestEnvironment.newEnvironment(globalSettings),
FileRealm realm = new FileRealm(config, userPasswdStore, userRolesStore); threadContext);
FileRealm realm = new FileRealm(config, userPasswdStore, userRolesStore, threadPool);
PlainActionFuture<User> future = new PlainActionFuture<>(); PlainActionFuture<User> future = new PlainActionFuture<>();
realm.lookupUser("user1", future); realm.lookupUser("user1", future);
@ -185,12 +197,13 @@ public class FileRealmTests extends ESTestCase {
} }
public void testLookupCachingWithRefresh() throws Exception { public void testLookupCachingWithRefresh() throws Exception {
RealmConfig config = new RealmConfig("file-test", Settings.EMPTY, globalSettings, TestEnvironment.newEnvironment(globalSettings), new ThreadContext(globalSettings)); RealmConfig config = new RealmConfig("file-test", Settings.EMPTY, globalSettings, TestEnvironment.newEnvironment(globalSettings),
threadContext);
userPasswdStore = spy(new UserPasswdStore(config)); userPasswdStore = spy(new UserPasswdStore(config));
userRolesStore = spy(new UserRolesStore(config)); userRolesStore = spy(new UserRolesStore(config));
doReturn(true).when(userPasswdStore).userExists("user1"); doReturn(true).when(userPasswdStore).userExists("user1");
doReturn(new String[] { "role1", "role2" }).when(userRolesStore).roles("user1"); doReturn(new String[] { "role1", "role2" }).when(userRolesStore).roles("user1");
FileRealm realm = new FileRealm(config, userPasswdStore, userRolesStore); FileRealm realm = new FileRealm(config, userPasswdStore, userRolesStore, threadPool);
PlainActionFuture<User> future = new PlainActionFuture<>(); PlainActionFuture<User> future = new PlainActionFuture<>();
realm.lookupUser("user1", future); realm.lookupUser("user1", future);
User user1 = future.actionGet(); User user1 = future.actionGet();
@ -231,8 +244,9 @@ public class FileRealmTests extends ESTestCase {
int order = randomIntBetween(0, 10); int order = randomIntBetween(0, 10);
settings.put("order", order); settings.put("order", order);
RealmConfig config = new RealmConfig("file-realm", settings.build(), globalSettings, TestEnvironment.newEnvironment(globalSettings), new ThreadContext(globalSettings)); RealmConfig config = new RealmConfig("file-realm", settings.build(), globalSettings, TestEnvironment.newEnvironment(globalSettings),
FileRealm realm = new FileRealm(config, userPasswdStore, userRolesStore); threadContext);
FileRealm realm = new FileRealm(config, userPasswdStore, userRolesStore, threadPool);
Map<String, Object> usage = realm.usageStats(); Map<String, Object> usage = realm.usageStats();
assertThat(usage, is(notNullValue())); assertThat(usage, is(notNullValue()));

View File

@ -14,6 +14,8 @@ import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.env.TestEnvironment;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.SecuritySettingsSourceField; import org.elasticsearch.test.SecuritySettingsSourceField;
import org.elasticsearch.threadpool.TestThreadPool;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.xpack.core.security.authc.AuthenticationResult; import org.elasticsearch.xpack.core.security.authc.AuthenticationResult;
import org.elasticsearch.xpack.core.security.authc.Realm; import org.elasticsearch.xpack.core.security.authc.Realm;
import org.elasticsearch.xpack.core.security.authc.RealmConfig; import org.elasticsearch.xpack.core.security.authc.RealmConfig;
@ -22,6 +24,7 @@ import org.elasticsearch.xpack.core.security.authc.support.CachingUsernamePasswo
import org.elasticsearch.xpack.core.security.authc.support.Hasher; import org.elasticsearch.xpack.core.security.authc.support.Hasher;
import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken; import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken;
import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.core.security.user.User;
import org.junit.After;
import org.junit.Before; import org.junit.Before;
import java.util.ArrayList; import java.util.ArrayList;
@ -42,10 +45,19 @@ import static org.hamcrest.Matchers.sameInstance;
public class CachingUsernamePasswordRealmTests extends ESTestCase { public class CachingUsernamePasswordRealmTests extends ESTestCase {
private Settings globalSettings; private Settings globalSettings;
private ThreadPool threadPool;
@Before @Before
public void setup() { public void setup() {
globalSettings = Settings.builder().put("path.home", createTempDir()).build(); globalSettings = Settings.builder().put("path.home", createTempDir()).build();
threadPool = new TestThreadPool("caching username password realm tests");
}
@After
public void stop() throws InterruptedException {
if (threadPool != null) {
terminate(threadPool);
}
} }
public void testSettings() throws Exception { public void testSettings() throws Exception {
@ -61,7 +73,7 @@ public class CachingUsernamePasswordRealmTests extends ESTestCase {
RealmConfig config = new RealmConfig("test_realm", settings, globalSettings, TestEnvironment.newEnvironment(globalSettings), RealmConfig config = new RealmConfig("test_realm", settings, globalSettings, TestEnvironment.newEnvironment(globalSettings),
new ThreadContext(Settings.EMPTY)); new ThreadContext(Settings.EMPTY));
CachingUsernamePasswordRealm realm = new CachingUsernamePasswordRealm("test", config) { CachingUsernamePasswordRealm realm = new CachingUsernamePasswordRealm("test", config, threadPool) {
@Override @Override
protected void doAuthenticate(UsernamePasswordToken token, ActionListener<AuthenticationResult> listener) { protected void doAuthenticate(UsernamePasswordToken token, ActionListener<AuthenticationResult> listener) {
listener.onResponse(AuthenticationResult.success(new User("username", new String[]{"r1", "r2", "r3"}))); listener.onResponse(AuthenticationResult.success(new User("username", new String[]{"r1", "r2", "r3"})));
@ -77,7 +89,7 @@ public class CachingUsernamePasswordRealmTests extends ESTestCase {
} }
public void testAuthCache() { public void testAuthCache() {
AlwaysAuthenticateCachingRealm realm = new AlwaysAuthenticateCachingRealm(globalSettings); AlwaysAuthenticateCachingRealm realm = new AlwaysAuthenticateCachingRealm(globalSettings, threadPool);
SecureString pass = new SecureString("pass"); SecureString pass = new SecureString("pass");
PlainActionFuture<AuthenticationResult> future = new PlainActionFuture<>(); PlainActionFuture<AuthenticationResult> future = new PlainActionFuture<>();
realm.authenticate(new UsernamePasswordToken("a", pass), future); realm.authenticate(new UsernamePasswordToken("a", pass), future);
@ -106,7 +118,7 @@ public class CachingUsernamePasswordRealmTests extends ESTestCase {
} }
public void testLookupCache() { public void testLookupCache() {
AlwaysAuthenticateCachingRealm realm = new AlwaysAuthenticateCachingRealm(globalSettings); AlwaysAuthenticateCachingRealm realm = new AlwaysAuthenticateCachingRealm(globalSettings, threadPool);
PlainActionFuture<User> future = new PlainActionFuture<>(); PlainActionFuture<User> future = new PlainActionFuture<>();
realm.lookupUser("a", future); realm.lookupUser("a", future);
future.actionGet(); future.actionGet();
@ -133,7 +145,7 @@ public class CachingUsernamePasswordRealmTests extends ESTestCase {
} }
public void testLookupAndAuthCache() { public void testLookupAndAuthCache() {
AlwaysAuthenticateCachingRealm realm = new AlwaysAuthenticateCachingRealm(globalSettings); AlwaysAuthenticateCachingRealm realm = new AlwaysAuthenticateCachingRealm(globalSettings, threadPool);
// lookup first // lookup first
PlainActionFuture<User> lookupFuture = new PlainActionFuture<>(); PlainActionFuture<User> lookupFuture = new PlainActionFuture<>();
realm.lookupUser("a", lookupFuture); realm.lookupUser("a", lookupFuture);
@ -172,7 +184,7 @@ public class CachingUsernamePasswordRealmTests extends ESTestCase {
} }
public void testCacheChangePassword() { public void testCacheChangePassword() {
AlwaysAuthenticateCachingRealm realm = new AlwaysAuthenticateCachingRealm(globalSettings); AlwaysAuthenticateCachingRealm realm = new AlwaysAuthenticateCachingRealm(globalSettings, threadPool);
String user = "testUser"; String user = "testUser";
SecureString pass1 = new SecureString("pass"); SecureString pass1 = new SecureString("pass");
@ -198,7 +210,7 @@ public class CachingUsernamePasswordRealmTests extends ESTestCase {
} }
public void testCacheDisabledUser() { public void testCacheDisabledUser() {
AlwaysAuthenticateCachingRealm realm = new AlwaysAuthenticateCachingRealm(globalSettings); AlwaysAuthenticateCachingRealm realm = new AlwaysAuthenticateCachingRealm(globalSettings, threadPool);
realm.setUsersEnabled(false); realm.setUsersEnabled(false);
String user = "testUser"; String user = "testUser";
@ -233,7 +245,7 @@ public class CachingUsernamePasswordRealmTests extends ESTestCase {
.build(); .build();
RealmConfig config = new RealmConfig("test_cache_ttl", settings, globalSettings, TestEnvironment.newEnvironment(globalSettings), RealmConfig config = new RealmConfig("test_cache_ttl", settings, globalSettings, TestEnvironment.newEnvironment(globalSettings),
new ThreadContext(Settings.EMPTY)); new ThreadContext(Settings.EMPTY));
AlwaysAuthenticateCachingRealm realm = new AlwaysAuthenticateCachingRealm(config); AlwaysAuthenticateCachingRealm realm = new AlwaysAuthenticateCachingRealm(config, threadPool);
final UsernamePasswordToken authToken = new UsernamePasswordToken("the-user", new SecureString("the-password")); final UsernamePasswordToken authToken = new UsernamePasswordToken("the-user", new SecureString("the-password"));
@ -262,7 +274,7 @@ public class CachingUsernamePasswordRealmTests extends ESTestCase {
.build(); .build();
RealmConfig config = new RealmConfig("test_cache_ttl", settings, globalSettings, TestEnvironment.newEnvironment(globalSettings), RealmConfig config = new RealmConfig("test_cache_ttl", settings, globalSettings, TestEnvironment.newEnvironment(globalSettings),
new ThreadContext(Settings.EMPTY)); new ThreadContext(Settings.EMPTY));
AlwaysAuthenticateCachingRealm realm = new AlwaysAuthenticateCachingRealm(config); AlwaysAuthenticateCachingRealm realm = new AlwaysAuthenticateCachingRealm(config, threadPool);
final UsernamePasswordToken authToken = new UsernamePasswordToken("the-user", new SecureString("the-password")); final UsernamePasswordToken authToken = new UsernamePasswordToken("the-user", new SecureString("the-password"));
PlainActionFuture<AuthenticationResult> future = new PlainActionFuture<>(); PlainActionFuture<AuthenticationResult> future = new PlainActionFuture<>();
@ -304,13 +316,13 @@ public class CachingUsernamePasswordRealmTests extends ESTestCase {
} }
public void testAuthenticateContract() throws Exception { public void testAuthenticateContract() throws Exception {
Realm realm = new FailingAuthenticationRealm(Settings.EMPTY, globalSettings); Realm realm = new FailingAuthenticationRealm(Settings.EMPTY, globalSettings, threadPool);
PlainActionFuture<AuthenticationResult> future = new PlainActionFuture<>(); PlainActionFuture<AuthenticationResult> future = new PlainActionFuture<>();
realm.authenticate(new UsernamePasswordToken("user", new SecureString("pass")), future); realm.authenticate(new UsernamePasswordToken("user", new SecureString("pass")), future);
User user = future.actionGet().getUser(); User user = future.actionGet().getUser();
assertThat(user, nullValue()); assertThat(user, nullValue());
realm = new ThrowingAuthenticationRealm(Settings.EMPTY, globalSettings); realm = new ThrowingAuthenticationRealm(Settings.EMPTY, globalSettings, threadPool);
future = new PlainActionFuture<>(); future = new PlainActionFuture<>();
realm.authenticate(new UsernamePasswordToken("user", new SecureString("pass")), future); realm.authenticate(new UsernamePasswordToken("user", new SecureString("pass")), future);
RuntimeException e = expectThrows(RuntimeException.class, future::actionGet); RuntimeException e = expectThrows(RuntimeException.class, future::actionGet);
@ -318,19 +330,85 @@ public class CachingUsernamePasswordRealmTests extends ESTestCase {
} }
public void testLookupContract() throws Exception { public void testLookupContract() throws Exception {
Realm realm = new FailingAuthenticationRealm(Settings.EMPTY, globalSettings); Realm realm = new FailingAuthenticationRealm(Settings.EMPTY, globalSettings, threadPool);
PlainActionFuture<User> future = new PlainActionFuture<>(); PlainActionFuture<User> future = new PlainActionFuture<>();
realm.lookupUser("user", future); realm.lookupUser("user", future);
User user = future.actionGet(); User user = future.actionGet();
assertThat(user, nullValue()); assertThat(user, nullValue());
realm = new ThrowingAuthenticationRealm(Settings.EMPTY, globalSettings); realm = new ThrowingAuthenticationRealm(Settings.EMPTY, globalSettings, threadPool);
future = new PlainActionFuture<>(); future = new PlainActionFuture<>();
realm.lookupUser("user", future); realm.lookupUser("user", future);
RuntimeException e = expectThrows(RuntimeException.class, future::actionGet); RuntimeException e = expectThrows(RuntimeException.class, future::actionGet);
assertThat(e.getMessage(), containsString("lookup exception")); assertThat(e.getMessage(), containsString("lookup exception"));
} }
public void testSingleAuthPerUserLimit() throws Exception {
final String username = "username";
final SecureString password = SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING;
final AtomicInteger authCounter = new AtomicInteger(0);
final String passwordHash = new String(Hasher.BCRYPT.hash(password));
RealmConfig config = new RealmConfig("test_realm", Settings.EMPTY, globalSettings, TestEnvironment.newEnvironment(globalSettings),
new ThreadContext(Settings.EMPTY));
final CachingUsernamePasswordRealm realm = new CachingUsernamePasswordRealm("test", config, threadPool) {
@Override
protected void doAuthenticate(UsernamePasswordToken token, ActionListener<AuthenticationResult> listener) {
authCounter.incrementAndGet();
// do something slow
if (BCrypt.checkpw(token.credentials(), passwordHash)) {
listener.onResponse(AuthenticationResult.success(new User(username, new String[]{"r1", "r2", "r3"})));
} else {
listener.onFailure(new IllegalStateException("password auth should never fail"));
}
}
@Override
protected void doLookupUser(String username, ActionListener<User> listener) {
listener.onFailure(new UnsupportedOperationException("this method should not be called"));
}
};
final int numberOfProcessors = Runtime.getRuntime().availableProcessors();
final int numberOfThreads = scaledRandomIntBetween((numberOfProcessors + 1) / 2, numberOfProcessors * 3);
final int numberOfIterations = scaledRandomIntBetween(20, 100);
final CountDownLatch latch = new CountDownLatch(1 + numberOfThreads);
List<Thread> threads = new ArrayList<>(numberOfThreads);
for (int i = 0; i < numberOfThreads; i++) {
threads.add(new Thread(() -> {
try {
latch.countDown();
latch.await();
for (int i1 = 0; i1 < numberOfIterations; i1++) {
UsernamePasswordToken token = new UsernamePasswordToken(username, password);
realm.authenticate(token, ActionListener.wrap((result) -> {
if (result.isAuthenticated() == false) {
throw new IllegalStateException("proper password led to an unauthenticated result: " + result);
}
}, (e) -> {
logger.error("caught exception", e);
fail("unexpected exception - " + e);
}));
}
} catch (InterruptedException e) {
logger.error("thread was interrupted", e);
Thread.currentThread().interrupt();
}
}));
}
for (Thread thread : threads) {
thread.start();
}
latch.countDown();
for (Thread thread : threads) {
thread.join();
}
assertEquals(1, authCounter.get());
}
public void testCacheConcurrency() throws Exception { public void testCacheConcurrency() throws Exception {
final String username = "username"; final String username = "username";
final SecureString password = SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING; final SecureString password = SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING;
@ -339,7 +417,7 @@ public class CachingUsernamePasswordRealmTests extends ESTestCase {
final String passwordHash = new String(Hasher.BCRYPT.hash(password)); final String passwordHash = new String(Hasher.BCRYPT.hash(password));
RealmConfig config = new RealmConfig("test_realm", Settings.EMPTY, globalSettings, TestEnvironment.newEnvironment(globalSettings), RealmConfig config = new RealmConfig("test_realm", Settings.EMPTY, globalSettings, TestEnvironment.newEnvironment(globalSettings),
new ThreadContext(Settings.EMPTY)); new ThreadContext(Settings.EMPTY));
final CachingUsernamePasswordRealm realm = new CachingUsernamePasswordRealm("test", config) { final CachingUsernamePasswordRealm realm = new CachingUsernamePasswordRealm("test", config, threadPool) {
@Override @Override
protected void doAuthenticate(UsernamePasswordToken token, ActionListener<AuthenticationResult> listener) { protected void doAuthenticate(UsernamePasswordToken token, ActionListener<AuthenticationResult> listener) {
// do something slow // do something slow
@ -356,19 +434,18 @@ public class CachingUsernamePasswordRealmTests extends ESTestCase {
} }
}; };
final CountDownLatch latch = new CountDownLatch(1);
final int numberOfProcessors = Runtime.getRuntime().availableProcessors(); final int numberOfProcessors = Runtime.getRuntime().availableProcessors();
final int numberOfThreads = scaledRandomIntBetween((numberOfProcessors + 1) / 2, numberOfProcessors * 3); final int numberOfThreads = scaledRandomIntBetween((numberOfProcessors + 1) / 2, numberOfProcessors * 3);
final int numberOfIterations = scaledRandomIntBetween(20, 100); final int numberOfIterations = scaledRandomIntBetween(20, 100);
List<Thread> threads = new ArrayList<>(); final CountDownLatch latch = new CountDownLatch(1 + numberOfThreads);
List<Thread> threads = new ArrayList<>(numberOfThreads);
for (int i = 0; i < numberOfThreads; i++) { for (int i = 0; i < numberOfThreads; i++) {
final boolean invalidPassword = randomBoolean(); final boolean invalidPassword = randomBoolean();
threads.add(new Thread() { threads.add(new Thread(() -> {
@Override
public void run() {
try { try {
latch.countDown();
latch.await(); latch.await();
for (int i = 0; i < numberOfIterations; i++) { for (int i1 = 0; i1 < numberOfIterations; i1++) {
UsernamePasswordToken token = new UsernamePasswordToken(username, invalidPassword ? randomPassword : password); UsernamePasswordToken token = new UsernamePasswordToken(username, invalidPassword ? randomPassword : password);
realm.authenticate(token, ActionListener.wrap((result) -> { realm.authenticate(token, ActionListener.wrap((result) -> {
@ -384,9 +461,10 @@ public class CachingUsernamePasswordRealmTests extends ESTestCase {
} }
} catch (InterruptedException e) { } catch (InterruptedException e) {
logger.error("thread was interrupted", e);
Thread.currentThread().interrupt();
} }
} }));
});
} }
for (Thread thread : threads) { for (Thread thread : threads) {
@ -400,10 +478,11 @@ public class CachingUsernamePasswordRealmTests extends ESTestCase {
public void testUserLookupConcurrency() throws Exception { public void testUserLookupConcurrency() throws Exception {
final String username = "username"; final String username = "username";
final AtomicInteger lookupCounter = new AtomicInteger(0);
RealmConfig config = new RealmConfig("test_realm", Settings.EMPTY, globalSettings, TestEnvironment.newEnvironment(globalSettings), RealmConfig config = new RealmConfig("test_realm", Settings.EMPTY, globalSettings, TestEnvironment.newEnvironment(globalSettings),
new ThreadContext(Settings.EMPTY)); new ThreadContext(Settings.EMPTY));
final CachingUsernamePasswordRealm realm = new CachingUsernamePasswordRealm("test", config) { final CachingUsernamePasswordRealm realm = new CachingUsernamePasswordRealm("test", config, threadPool) {
@Override @Override
protected void doAuthenticate(UsernamePasswordToken token, ActionListener<AuthenticationResult> listener) { protected void doAuthenticate(UsernamePasswordToken token, ActionListener<AuthenticationResult> listener) {
listener.onFailure(new UnsupportedOperationException("authenticate should not be called!")); listener.onFailure(new UnsupportedOperationException("authenticate should not be called!"));
@ -411,22 +490,22 @@ public class CachingUsernamePasswordRealmTests extends ESTestCase {
@Override @Override
protected void doLookupUser(String username, ActionListener<User> listener) { protected void doLookupUser(String username, ActionListener<User> listener) {
lookupCounter.incrementAndGet();
listener.onResponse(new User(username, new String[]{"r1", "r2", "r3"})); listener.onResponse(new User(username, new String[]{"r1", "r2", "r3"}));
} }
}; };
final CountDownLatch latch = new CountDownLatch(1);
final int numberOfProcessors = Runtime.getRuntime().availableProcessors(); final int numberOfProcessors = Runtime.getRuntime().availableProcessors();
final int numberOfThreads = scaledRandomIntBetween(numberOfProcessors, numberOfProcessors * 3); final int numberOfThreads = scaledRandomIntBetween(numberOfProcessors, numberOfProcessors * 3);
final int numberOfIterations = scaledRandomIntBetween(10000, 100000); final int numberOfIterations = scaledRandomIntBetween(10000, 100000);
List<Thread> threads = new ArrayList<>(); final CountDownLatch latch = new CountDownLatch(1 + numberOfThreads);
List<Thread> threads = new ArrayList<>(numberOfThreads);
for (int i = 0; i < numberOfThreads; i++) { for (int i = 0; i < numberOfThreads; i++) {
threads.add(new Thread() { threads.add(new Thread(() -> {
@Override
public void run() {
try { try {
latch.countDown();
latch.await(); latch.await();
for (int i = 0; i < numberOfIterations; i++) { for (int i1 = 0; i1 < numberOfIterations; i1++) {
realm.lookupUser(username, ActionListener.wrap((user) -> { realm.lookupUser(username, ActionListener.wrap((user) -> {
if (user == null) { if (user == null) {
throw new RuntimeException("failed to lookup user"); throw new RuntimeException("failed to lookup user");
@ -438,9 +517,10 @@ public class CachingUsernamePasswordRealmTests extends ESTestCase {
} }
} catch (InterruptedException e) { } catch (InterruptedException e) {
logger.error("thread was interrupted", e);
Thread.currentThread().interrupt();
} }
} }));
});
} }
for (Thread thread : threads) { for (Thread thread : threads) {
@ -450,13 +530,14 @@ public class CachingUsernamePasswordRealmTests extends ESTestCase {
for (Thread thread : threads) { for (Thread thread : threads) {
thread.join(); thread.join();
} }
assertEquals(1, lookupCounter.get());
} }
static class FailingAuthenticationRealm extends CachingUsernamePasswordRealm { static class FailingAuthenticationRealm extends CachingUsernamePasswordRealm {
FailingAuthenticationRealm(Settings settings, Settings global) { FailingAuthenticationRealm(Settings settings, Settings global, ThreadPool threadPool) {
super("failing", new RealmConfig("failing-test", settings, global, TestEnvironment.newEnvironment(global), super("failing", new RealmConfig("failing-test", settings, global, TestEnvironment.newEnvironment(global),
new ThreadContext(Settings.EMPTY))); threadPool.getThreadContext()), threadPool);
} }
@Override @Override
@ -472,9 +553,9 @@ public class CachingUsernamePasswordRealmTests extends ESTestCase {
static class ThrowingAuthenticationRealm extends CachingUsernamePasswordRealm { static class ThrowingAuthenticationRealm extends CachingUsernamePasswordRealm {
ThrowingAuthenticationRealm(Settings settings, Settings globalSettings) { ThrowingAuthenticationRealm(Settings settings, Settings globalSettings, ThreadPool threadPool) {
super("throwing", new RealmConfig("throwing-test", settings, globalSettings, TestEnvironment.newEnvironment(globalSettings), super("throwing", new RealmConfig("throwing-test", settings, globalSettings, TestEnvironment.newEnvironment(globalSettings),
new ThreadContext(Settings.EMPTY))); threadPool.getThreadContext()), threadPool);
} }
@Override @Override
@ -495,13 +576,13 @@ public class CachingUsernamePasswordRealmTests extends ESTestCase {
private boolean usersEnabled = true; private boolean usersEnabled = true;
AlwaysAuthenticateCachingRealm(Settings globalSettings) { AlwaysAuthenticateCachingRealm(Settings globalSettings, ThreadPool threadPool) {
this(new RealmConfig("always-test", Settings.EMPTY, globalSettings, TestEnvironment.newEnvironment(globalSettings), this(new RealmConfig("always-test", Settings.EMPTY, globalSettings, TestEnvironment.newEnvironment(globalSettings),
new ThreadContext(Settings.EMPTY))); threadPool.getThreadContext()), threadPool);
} }
AlwaysAuthenticateCachingRealm(RealmConfig config) { AlwaysAuthenticateCachingRealm(RealmConfig config, ThreadPool threadPool) {
super("always", config); super("always", config, threadPool);
} }
void setUsersEnabled(boolean usersEnabled) { void setUsersEnabled(boolean usersEnabled) {
@ -527,9 +608,9 @@ public class CachingUsernamePasswordRealmTests extends ESTestCase {
public final AtomicInteger authInvocationCounter = new AtomicInteger(0); public final AtomicInteger authInvocationCounter = new AtomicInteger(0);
public final AtomicInteger lookupInvocationCounter = new AtomicInteger(0); public final AtomicInteger lookupInvocationCounter = new AtomicInteger(0);
LookupNotSupportedRealm(Settings globalSettings) { LookupNotSupportedRealm(Settings globalSettings, ThreadPool threadPool) {
super("lookup", new RealmConfig("lookup-notsupported-test", Settings.EMPTY, globalSettings, super("lookup", new RealmConfig("lookup-notsupported-test", Settings.EMPTY, globalSettings,
TestEnvironment.newEnvironment(globalSettings), new ThreadContext(Settings.EMPTY))); TestEnvironment.newEnvironment(globalSettings), threadPool.getThreadContext()), threadPool);
} }
@Override @Override

View File

@ -198,7 +198,7 @@ public class NativeRoleMappingStoreTests extends ESTestCase {
final Environment env = TestEnvironment.newEnvironment(settings); final Environment env = TestEnvironment.newEnvironment(settings);
final RealmConfig realmConfig = new RealmConfig(getTestName(), Settings.EMPTY, settings, env, threadContext); final RealmConfig realmConfig = new RealmConfig(getTestName(), Settings.EMPTY, settings, env, threadContext);
final CachingUsernamePasswordRealm mockRealm = new CachingUsernamePasswordRealm("test", realmConfig) { final CachingUsernamePasswordRealm mockRealm = new CachingUsernamePasswordRealm("test", realmConfig, threadPool) {
@Override @Override
protected void doAuthenticate(UsernamePasswordToken token, ActionListener<AuthenticationResult> listener) { protected void doAuthenticate(UsernamePasswordToken token, ActionListener<AuthenticationResult> listener) {
listener.onResponse(AuthenticationResult.notHandled()); listener.onResponse(AuthenticationResult.notHandled());

View File

@ -39,10 +39,12 @@ import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.UUIDs;
import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexNotFoundException;
import org.elasticsearch.search.internal.ShardSearchTransportRequest; import org.elasticsearch.search.internal.ShardSearchTransportRequest;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
@ -149,7 +151,10 @@ public class IndicesAndAliasesResolverTests extends ESTestCase {
new IndicesPrivileges[] { IndicesPrivileges.builder().indices(authorizedIndices).privileges("all").build() }, null)); new IndicesPrivileges[] { IndicesPrivileges.builder().indices(authorizedIndices).privileges("all").build() }, null));
roleMap.put("dash", new RoleDescriptor("dash", null, roleMap.put("dash", new RoleDescriptor("dash", null,
new IndicesPrivileges[] { IndicesPrivileges.builder().indices(dashIndices).privileges("all").build() }, null)); new IndicesPrivileges[] { IndicesPrivileges.builder().indices(dashIndices).privileges("all").build() }, null));
roleMap.put("test", new RoleDescriptor("role", new String[] { "monitor" }, null, null)); roleMap.put("test", new RoleDescriptor("test", new String[] { "monitor" }, null, null));
roleMap.put("alias_read_write", new RoleDescriptor("alias_read_write", null,
new IndicesPrivileges[] { IndicesPrivileges.builder().indices("barbaz", "foofoobar").privileges("read", "write").build() },
null));
roleMap.put(ReservedRolesStore.SUPERUSER_ROLE_DESCRIPTOR.getName(), ReservedRolesStore.SUPERUSER_ROLE_DESCRIPTOR); roleMap.put(ReservedRolesStore.SUPERUSER_ROLE_DESCRIPTOR.getName(), ReservedRolesStore.SUPERUSER_ROLE_DESCRIPTOR);
final FieldPermissionsCache fieldPermissionsCache = new FieldPermissionsCache(Settings.EMPTY); final FieldPermissionsCache fieldPermissionsCache = new FieldPermissionsCache(Settings.EMPTY);
doAnswer((i) -> { doAnswer((i) -> {
@ -1317,6 +1322,21 @@ public class IndicesAndAliasesResolverTests extends ESTestCase {
assertThat(request.aliases(), arrayContainingInAnyOrder("<datetime-{now/M}>")); assertThat(request.aliases(), arrayContainingInAnyOrder("<datetime-{now/M}>"));
} }
public void testDynamicPutMappingRequestFromAlias() {
PutMappingRequest request = new PutMappingRequest(Strings.EMPTY_ARRAY).setConcreteIndex(new Index("foofoo", UUIDs.base64UUID()));
User user = new User("alias-writer", "alias_read_write");
AuthorizedIndices authorizedIndices = buildAuthorizedIndices(user, PutMappingAction.NAME);
String putMappingIndexOrAlias = IndicesAndAliasesResolver.getPutMappingIndexOrAlias(request, authorizedIndices, metaData);
assertEquals("barbaz", putMappingIndexOrAlias);
// multiple indices map to an alias so we can only return the concrete index
final String index = randomFrom("foo", "foobar");
request = new PutMappingRequest(Strings.EMPTY_ARRAY).setConcreteIndex(new Index(index, UUIDs.base64UUID()));
putMappingIndexOrAlias = IndicesAndAliasesResolver.getPutMappingIndexOrAlias(request, authorizedIndices, metaData);
assertEquals(index, putMappingIndexOrAlias);
}
// TODO with the removal of DeleteByQuery is there another way to test resolving a write action? // TODO with the removal of DeleteByQuery is there another way to test resolving a write action?

View File

@ -41,7 +41,7 @@ public class SSLChannelContextTests extends ESTestCase {
private SSLChannelContext context; private SSLChannelContext context;
private InboundChannelBuffer channelBuffer; private InboundChannelBuffer channelBuffer;
private SocketSelector selector; private SocketSelector selector;
private BiConsumer<Void, Throwable> listener; private BiConsumer<Void, Exception> listener;
private Consumer exceptionHandler; private Consumer exceptionHandler;
private SSLDriver sslDriver; private SSLDriver sslDriver;
private ByteBuffer readBuffer = ByteBuffer.allocate(1 << 14); private ByteBuffer readBuffer = ByteBuffer.allocate(1 << 14);
@ -266,7 +266,7 @@ public class SSLChannelContextTests extends ESTestCase {
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public void testMultipleWritesPartialFlushes() throws IOException { public void testMultipleWritesPartialFlushes() throws IOException {
BiConsumer<Void, Throwable> listener2 = mock(BiConsumer.class); BiConsumer<Void, Exception> listener2 = mock(BiConsumer.class);
ByteBuffer[] buffers1 = {ByteBuffer.allocate(10)}; ByteBuffer[] buffers1 = {ByteBuffer.allocate(10)};
ByteBuffer[] buffers2 = {ByteBuffer.allocate(5)}; ByteBuffer[] buffers2 = {ByteBuffer.allocate(5)};
FlushReadyWrite flushOperation1 = mock(FlushReadyWrite.class); FlushReadyWrite flushOperation1 = mock(FlushReadyWrite.class);

View File

@ -0,0 +1,90 @@
---
setup:
- skip:
features: headers
- do:
cluster.health:
wait_for_status: yellow
- do:
xpack.security.put_role:
name: "alias_write_role"
body: >
{
"indices": [
{ "names": ["write_alias"], "privileges": ["write"] }
]
}
- do:
xpack.security.put_user:
username: "test_user"
body: >
{
"password" : "x-pack-test-password",
"roles" : [ "alias_write_role" ],
"full_name" : "user with privileges to write via alias"
}
- do:
indices.create:
index: write_index_1
body:
settings:
index:
number_of_shards: 1
number_of_replicas: 0
- do:
indices.put_alias:
index: write_index_1
name: write_alias
---
teardown:
- do:
xpack.security.delete_user:
username: "test_user"
ignore: 404
- do:
xpack.security.delete_role:
name: "alias_write_role"
ignore: 404
- do:
indices.delete_alias:
index: "write_index_1"
name: [ "write_alias" ]
ignore: 404
- do:
indices.delete:
index: [ "write_index_1" ]
ignore: 404
---
"Test indexing documents into an alias with dynamic mappings":
- do:
headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user
create:
id: 1
index: write_alias
type: doc
body: >
{
"name" : "doc1"
}
- do:
headers: { Authorization: "Basic dGVzdF91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" } # test_user
create:
id: 2
index: write_alias
type: doc
body: >
{
"name2" : "doc2"
}

View File

@ -196,6 +196,8 @@ import static java.util.Collections.emptyList;
public class Watcher extends Plugin implements ActionPlugin, ScriptPlugin { public class Watcher extends Plugin implements ActionPlugin, ScriptPlugin {
// This setting is only here for backward compatibility reasons as 6.x indices made use of it. It can be removed in 8.x.
@Deprecated
public static final Setting<String> INDEX_WATCHER_TEMPLATE_VERSION_SETTING = public static final Setting<String> INDEX_WATCHER_TEMPLATE_VERSION_SETTING =
new Setting<>("index.xpack.watcher.template.version", "", Function.identity(), Setting.Property.IndexScope); new Setting<>("index.xpack.watcher.template.version", "", Function.identity(), Setting.Property.IndexScope);
public static final Setting<Boolean> ENCRYPT_SENSITIVE_DATA_SETTING = public static final Setting<Boolean> ENCRYPT_SENSITIVE_DATA_SETTING =