Merge branch 'master' into index-lifecycle

This commit is contained in:
Tal Levy 2018-05-29 12:29:53 -07:00
commit bdf70e4f2f
334 changed files with 6127 additions and 1936 deletions

View File

@ -1,5 +1,5 @@
elasticsearch = 7.0.0-alpha1
lucene = 7.4.0-snapshot-cc2ee23050
lucene = 7.4.0-snapshot-1cbadda4d3
# optional dependencies
spatial4j = 0.7

View File

@ -21,8 +21,6 @@ package org.elasticsearch.client;
import org.apache.http.Header;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest;
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse;
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest;
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse;
import org.elasticsearch.action.ingest.PutPipelineRequest;
@ -68,28 +66,6 @@ public final class ClusterClient {
ClusterUpdateSettingsResponse::fromXContent, listener, emptySet(), headers);
}
/**
* Get current tasks using the Task Management API
* <p>
* See
* <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/tasks.html"> Task Management API on elastic.co</a>
*/
public ListTasksResponse listTasks(ListTasksRequest request, Header... headers) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(request, RequestConverters::listTasks, ListTasksResponse::fromXContent,
emptySet(), headers);
}
/**
* Asynchronously get current tasks using the Task Management API
* <p>
* See
* <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/tasks.html"> Task Management API on elastic.co</a>
*/
public void listTasksAsync(ListTasksRequest request, ActionListener<ListTasksResponse> listener, Header... headers) {
restHighLevelClient.performRequestAsyncAndParseEntity(request, RequestConverters::listTasks, ListTasksResponse::fromXContent,
listener, emptySet(), headers);
}
/**
* Add a pipeline or update an existing pipeline in the cluster
* <p>

View File

@ -192,6 +192,7 @@ public class RestHighLevelClient implements Closeable {
private final IndicesClient indicesClient = new IndicesClient(this);
private final ClusterClient clusterClient = new ClusterClient(this);
private final SnapshotClient snapshotClient = new SnapshotClient(this);
private final TasksClient tasksClient = new TasksClient(this);
/**
* Creates a {@link RestHighLevelClient} given the low level {@link RestClientBuilder} that allows to build the
@ -264,6 +265,15 @@ public class RestHighLevelClient implements Closeable {
return snapshotClient;
}
/**
* Provides a {@link TasksClient} which can be used to access the Tasks API.
*
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/tasks.html">Task Management API on elastic.co</a>
*/
public final TasksClient tasks() {
return tasksClient;
}
/**
* Executes a bulk request using the Bulk API
*

View File

@ -0,0 +1,64 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client;
import org.apache.http.Header;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest;
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse;
import java.io.IOException;
import static java.util.Collections.emptySet;
/**
* A wrapper for the {@link RestHighLevelClient} that provides methods for accessing the Tasks API.
* <p>
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/tasks.html">Task Management API on elastic.co</a>
*/
public class TasksClient {
private final RestHighLevelClient restHighLevelClient;
TasksClient(RestHighLevelClient restHighLevelClient) {
this.restHighLevelClient = restHighLevelClient;
}
/**
* Get current tasks using the Task Management API
* <p>
* See
* <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/tasks.html"> Task Management API on elastic.co</a>
*/
public ListTasksResponse list(ListTasksRequest request, Header... headers) throws IOException {
return restHighLevelClient.performRequestAndParseEntity(request, RequestConverters::listTasks, ListTasksResponse::fromXContent,
emptySet(), headers);
}
/**
* Asynchronously get current tasks using the Task Management API
* <p>
* See
* <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/tasks.html"> Task Management API on elastic.co</a>
*/
public void listAsync(ListTasksRequest request, ActionListener<ListTasksResponse> listener, Header... headers) {
restHighLevelClient.performRequestAsyncAndParseEntity(request, RequestConverters::listTasks, ListTasksResponse::fromXContent,
listener, emptySet(), headers);
}
}

View File

@ -20,9 +20,6 @@
package org.elasticsearch.client;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest;
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse;
import org.elasticsearch.action.admin.cluster.node.tasks.list.TaskGroup;
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest;
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse;
import org.elasticsearch.action.ingest.PutPipelineRequest;
@ -37,16 +34,13 @@ import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.indices.recovery.RecoverySettings;
import org.elasticsearch.ingest.Pipeline;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.tasks.TaskInfo;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import static java.util.Collections.emptyList;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
@ -117,31 +111,6 @@ public class ClusterClientIT extends ESRestHighLevelClientTestCase {
"Elasticsearch exception [type=illegal_argument_exception, reason=transient setting [" + setting + "], not recognized]"));
}
public void testListTasks() throws IOException {
ListTasksRequest request = new ListTasksRequest();
ListTasksResponse response = execute(request, highLevelClient().cluster()::listTasks, highLevelClient().cluster()::listTasksAsync);
assertThat(response, notNullValue());
assertThat(response.getNodeFailures(), equalTo(emptyList()));
assertThat(response.getTaskFailures(), equalTo(emptyList()));
// It's possible that there are other tasks except 'cluster:monitor/tasks/lists[n]' and 'action":"cluster:monitor/tasks/lists'
assertThat(response.getTasks().size(), greaterThanOrEqualTo(2));
boolean listTasksFound = false;
for (TaskGroup taskGroup : response.getTaskGroups()) {
TaskInfo parent = taskGroup.getTaskInfo();
if ("cluster:monitor/tasks/lists".equals(parent.getAction())) {
assertThat(taskGroup.getChildTasks().size(), equalTo(1));
TaskGroup childGroup = taskGroup.getChildTasks().iterator().next();
assertThat(childGroup.getChildTasks().isEmpty(), equalTo(true));
TaskInfo child = childGroup.getTaskInfo();
assertThat(child.getAction(), equalTo("cluster:monitor/tasks/lists[n]"));
assertThat(child.getParentTaskId(), equalTo(parent.getTaskId()));
listTasksFound = true;
}
}
assertTrue("List tasks were not found", listTasksFound);
}
public void testPutPipeline() throws IOException {
String id = "some_pipeline_id";
XContentType xContentType = randomFrom(XContentType.values());

View File

@ -0,0 +1,61 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client;
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest;
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse;
import org.elasticsearch.action.admin.cluster.node.tasks.list.TaskGroup;
import org.elasticsearch.tasks.TaskInfo;
import java.io.IOException;
import static java.util.Collections.emptyList;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.notNullValue;
public class TasksIT extends ESRestHighLevelClientTestCase {
public void testListTasks() throws IOException {
ListTasksRequest request = new ListTasksRequest();
ListTasksResponse response = execute(request, highLevelClient().tasks()::list, highLevelClient().tasks()::listAsync);
assertThat(response, notNullValue());
assertThat(response.getNodeFailures(), equalTo(emptyList()));
assertThat(response.getTaskFailures(), equalTo(emptyList()));
// It's possible that there are other tasks except 'cluster:monitor/tasks/lists[n]' and 'action":"cluster:monitor/tasks/lists'
assertThat(response.getTasks().size(), greaterThanOrEqualTo(2));
boolean listTasksFound = false;
for (TaskGroup taskGroup : response.getTaskGroups()) {
TaskInfo parent = taskGroup.getTaskInfo();
if ("cluster:monitor/tasks/lists".equals(parent.getAction())) {
assertThat(taskGroup.getChildTasks().size(), equalTo(1));
TaskGroup childGroup = taskGroup.getChildTasks().iterator().next();
assertThat(childGroup.getChildTasks().isEmpty(), equalTo(true));
TaskInfo child = childGroup.getTaskInfo();
assertThat(child.getAction(), equalTo("cluster:monitor/tasks/lists[n]"));
assertThat(child.getParentTaskId(), equalTo(parent.getTaskId()));
listTasksFound = true;
}
}
assertTrue("List tasks were not found", listTasksFound);
}
}

View File

@ -19,13 +19,8 @@
package org.elasticsearch.client.documentation;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.LatchedActionListener;
import org.elasticsearch.action.TaskOperationFailure;
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest;
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse;
import org.elasticsearch.action.admin.cluster.node.tasks.list.TaskGroup;
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest;
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsResponse;
import org.elasticsearch.action.ingest.PutPipelineRequest;
@ -39,21 +34,15 @@ import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.indices.recovery.RecoverySettings;
import org.elasticsearch.tasks.TaskId;
import org.elasticsearch.tasks.TaskInfo;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import static java.util.Collections.emptyList;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.notNullValue;
/**
* This class is used to generate the Java Cluster API documentation.
@ -193,89 +182,6 @@ public class ClusterClientDocumentationIT extends ESRestHighLevelClientTestCase
}
}
public void testListTasks() throws IOException {
RestHighLevelClient client = highLevelClient();
{
// tag::list-tasks-request
ListTasksRequest request = new ListTasksRequest();
// end::list-tasks-request
// tag::list-tasks-request-filter
request.setActions("cluster:*"); // <1>
request.setNodes("nodeId1", "nodeId2"); // <2>
request.setParentTaskId(new TaskId("parentTaskId", 42)); // <3>
// end::list-tasks-request-filter
// tag::list-tasks-request-detailed
request.setDetailed(true); // <1>
// end::list-tasks-request-detailed
// tag::list-tasks-request-wait-completion
request.setWaitForCompletion(true); // <1>
request.setTimeout(TimeValue.timeValueSeconds(50)); // <2>
request.setTimeout("50s"); // <3>
// end::list-tasks-request-wait-completion
}
ListTasksRequest request = new ListTasksRequest();
// tag::list-tasks-execute
ListTasksResponse response = client.cluster().listTasks(request);
// end::list-tasks-execute
assertThat(response, notNullValue());
// tag::list-tasks-response-tasks
List<TaskInfo> tasks = response.getTasks(); // <1>
// end::list-tasks-response-tasks
// tag::list-tasks-response-calc
Map<String, List<TaskInfo>> perNodeTasks = response.getPerNodeTasks(); // <1>
List<TaskGroup> groups = response.getTaskGroups(); // <2>
// end::list-tasks-response-calc
// tag::list-tasks-response-failures
List<ElasticsearchException> nodeFailures = response.getNodeFailures(); // <1>
List<TaskOperationFailure> taskFailures = response.getTaskFailures(); // <2>
// end::list-tasks-response-failures
assertThat(response.getNodeFailures(), equalTo(emptyList()));
assertThat(response.getTaskFailures(), equalTo(emptyList()));
assertThat(response.getTasks().size(), greaterThanOrEqualTo(2));
}
public void testListTasksAsync() throws Exception {
RestHighLevelClient client = highLevelClient();
{
ListTasksRequest request = new ListTasksRequest();
// tag::list-tasks-execute-listener
ActionListener<ListTasksResponse> listener =
new ActionListener<ListTasksResponse>() {
@Override
public void onResponse(ListTasksResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::list-tasks-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::list-tasks-execute-async
client.cluster().listTasksAsync(request, listener); // <1>
// end::list-tasks-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
public void testPutPipeline() throws IOException {
RestHighLevelClient client = highLevelClient();

View File

@ -45,7 +45,7 @@ import java.util.concurrent.TimeUnit;
import static org.hamcrest.Matchers.equalTo;
/**
* This class is used to generate the Java Cluster API documentation.
* This class is used to generate the Java Snapshot API documentation.
* You need to wrap your code between two tags like:
* // tag::example
* // end::example

View File

@ -0,0 +1,148 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client.documentation;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.LatchedActionListener;
import org.elasticsearch.action.TaskOperationFailure;
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest;
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse;
import org.elasticsearch.action.admin.cluster.node.tasks.list.TaskGroup;
import org.elasticsearch.client.ESRestHighLevelClientTestCase;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.tasks.TaskId;
import org.elasticsearch.tasks.TaskInfo;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import static java.util.Collections.emptyList;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.notNullValue;
/**
* This class is used to generate the Java Tasks API documentation.
* You need to wrap your code between two tags like:
* // tag::example
* // end::example
*
* Where example is your tag name.
*
* Then in the documentation, you can extract what is between tag and end tags with
* ["source","java",subs="attributes,callouts,macros"]
* --------------------------------------------------
* include-tagged::{doc-tests}/{@link TasksClientDocumentationIT}.java[example]
* --------------------------------------------------
*
* The column width of the code block is 84. If the code contains a line longer
* than 84, the line will be cut and a horizontal scroll bar will be displayed.
* (the code indentation of the tag is not included in the width)
*/
public class TasksClientDocumentationIT extends ESRestHighLevelClientTestCase {
public void testListTasks() throws IOException {
RestHighLevelClient client = highLevelClient();
{
// tag::list-tasks-request
ListTasksRequest request = new ListTasksRequest();
// end::list-tasks-request
// tag::list-tasks-request-filter
request.setActions("cluster:*"); // <1>
request.setNodes("nodeId1", "nodeId2"); // <2>
request.setParentTaskId(new TaskId("parentTaskId", 42)); // <3>
// end::list-tasks-request-filter
// tag::list-tasks-request-detailed
request.setDetailed(true); // <1>
// end::list-tasks-request-detailed
// tag::list-tasks-request-wait-completion
request.setWaitForCompletion(true); // <1>
request.setTimeout(TimeValue.timeValueSeconds(50)); // <2>
request.setTimeout("50s"); // <3>
// end::list-tasks-request-wait-completion
}
ListTasksRequest request = new ListTasksRequest();
// tag::list-tasks-execute
ListTasksResponse response = client.tasks().list(request);
// end::list-tasks-execute
assertThat(response, notNullValue());
// tag::list-tasks-response-tasks
List<TaskInfo> tasks = response.getTasks(); // <1>
// end::list-tasks-response-tasks
// tag::list-tasks-response-calc
Map<String, List<TaskInfo>> perNodeTasks = response.getPerNodeTasks(); // <1>
List<TaskGroup> groups = response.getTaskGroups(); // <2>
// end::list-tasks-response-calc
// tag::list-tasks-response-failures
List<ElasticsearchException> nodeFailures = response.getNodeFailures(); // <1>
List<TaskOperationFailure> taskFailures = response.getTaskFailures(); // <2>
// end::list-tasks-response-failures
assertThat(response.getNodeFailures(), equalTo(emptyList()));
assertThat(response.getTaskFailures(), equalTo(emptyList()));
assertThat(response.getTasks().size(), greaterThanOrEqualTo(2));
}
public void testListTasksAsync() throws Exception {
RestHighLevelClient client = highLevelClient();
{
ListTasksRequest request = new ListTasksRequest();
// tag::list-tasks-execute-listener
ActionListener<ListTasksResponse> listener =
new ActionListener<ListTasksResponse>() {
@Override
public void onResponse(ListTasksResponse response) {
// <1>
}
@Override
public void onFailure(Exception e) {
// <2>
}
};
// end::list-tasks-execute-listener
// Replace the empty listener by a blocking listener in test
final CountDownLatch latch = new CountDownLatch(1);
listener = new LatchedActionListener<>(listener, latch);
// tag::list-tasks-execute-async
client.tasks().listAsync(request, listener); // <1>
// end::list-tasks-execute-async
assertTrue(latch.await(30L, TimeUnit.SECONDS));
}
}
}

View File

@ -104,11 +104,9 @@ include::indices/put_template.asciidoc[]
The Java High Level REST Client supports the following Cluster APIs:
* <<java-rest-high-cluster-put-settings>>
* <<java-rest-high-cluster-list-tasks>>
* <<java-rest-high-cluster-put-pipeline>>
include::cluster/put_settings.asciidoc[]
include::cluster/list_tasks.asciidoc[]
include::cluster/put_pipeline.asciidoc[]
== Snapshot APIs
@ -122,3 +120,11 @@ The Java High Level REST Client supports the following Snapshot APIs:
include::snapshot/get_repository.asciidoc[]
include::snapshot/create_repository.asciidoc[]
include::snapshot/delete_repository.asciidoc[]
== Tasks APIs
The Java High Level REST Client supports the following Tasks APIs:
* <<java-rest-high-tasks-list>>
include::tasks/list_tasks.asciidoc[]

View File

@ -1,4 +1,4 @@
[[java-rest-high-cluster-list-tasks]]
[[java-rest-high-tasks-list]]
=== List Tasks API
The List Tasks API allows to get information about the tasks currently executing in the cluster.
@ -10,7 +10,7 @@ A `ListTasksRequest`:
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[list-tasks-request]
include-tagged::{doc-tests}/TasksClientDocumentationIT.java[list-tasks-request]
--------------------------------------------------
There is no required parameters. By default the client will list all tasks and will not wait
for task completion.
@ -19,7 +19,7 @@ for task completion.
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[list-tasks-request-filter]
include-tagged::{doc-tests}/TasksClientDocumentationIT.java[list-tasks-request-filter]
--------------------------------------------------
<1> Request only cluster-related tasks
<2> Request all tasks running on nodes nodeId1 and nodeId2
@ -27,13 +27,13 @@ include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[list-tasks-request
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[list-tasks-request-detailed]
include-tagged::{doc-tests}/TasksClientDocumentationIT.java[list-tasks-request-detailed]
--------------------------------------------------
<1> Should the information include detailed, potentially slow to generate data. Defaults to `false`
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[list-tasks-request-wait-completion]
include-tagged::{doc-tests}/TasksClientDocumentationIT.java[list-tasks-request-wait-completion]
--------------------------------------------------
<1> Should this request wait for all found tasks to complete. Defaults to `false`
<2> Timeout for the request as a `TimeValue`. Applicable only if `setWaitForCompletion` is `true`.
@ -45,7 +45,7 @@ Defaults to 30 seconds
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[list-tasks-execute]
include-tagged::{doc-tests}/TasksClientDocumentationIT.java[list-tasks-execute]
--------------------------------------------------
[[java-rest-high-cluster-list-tasks-async]]
@ -57,7 +57,7 @@ passed to the asynchronous method:
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[list-tasks-execute-async]
include-tagged::{doc-tests}/TasksClientDocumentationIT.java[list-tasks-execute-async]
--------------------------------------------------
<1> The `ListTasksRequest` to execute and the `ActionListener` to use
when the execution completes
@ -71,7 +71,7 @@ A typical listener for `ListTasksResponse` looks like:
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[list-tasks-execute-listener]
include-tagged::{doc-tests}/TasksClientDocumentationIT.java[list-tasks-execute-listener]
--------------------------------------------------
<1> Called when the execution is successfully completed. The response is
provided as an argument
@ -82,20 +82,20 @@ provided as an argument
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[list-tasks-response-tasks]
include-tagged::{doc-tests}/TasksClientDocumentationIT.java[list-tasks-response-tasks]
--------------------------------------------------
<1> List of currently running tasks
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[list-tasks-response-calc]
include-tagged::{doc-tests}/TasksClientDocumentationIT.java[list-tasks-response-calc]
--------------------------------------------------
<1> List of tasks grouped by a node
<2> List of tasks grouped by a parent task
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/ClusterClientDocumentationIT.java[list-tasks-response-failures]
include-tagged::{doc-tests}/TasksClientDocumentationIT.java[list-tasks-response-failures]
--------------------------------------------------
<1> List of node failures
<2> List of tasks failures

View File

@ -26,6 +26,7 @@ include::{xes-repo-dir}/settings/configuring-xes.asciidoc[]
include::{xes-repo-dir}/setup/bootstrap-checks-xes.asciidoc[]
:edit_url:
include::upgrade.asciidoc[]
include::migration/index.asciidoc[]
@ -66,6 +67,7 @@ include::{xes-repo-dir}/rest-api/index.asciidoc[]
include::{xes-repo-dir}/commands/index.asciidoc[]
:edit_url:
include::how-to.asciidoc[]
include::testing.asciidoc[]

View File

@ -193,6 +193,7 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin {
tokenizers.put("pattern", PatternTokenizerFactory::new);
tokenizers.put("uax_url_email", UAX29URLEmailTokenizerFactory::new);
tokenizers.put("whitespace", WhitespaceTokenizerFactory::new);
tokenizers.put("keyword", KeywordTokenizerFactory::new);
return tokenizers;
}

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.index.analysis;
package org.elasticsearch.analysis.common;
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.core.KeywordTokenizer;
@ -30,7 +30,7 @@ public class KeywordTokenizerFactory extends AbstractTokenizerFactory {
private final int bufferSize;
public KeywordTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) {
KeywordTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) {
super(indexSettings, name, settings);
bufferSize = settings.getAsInt("buffer_size", 256);
}

View File

@ -24,7 +24,6 @@ import org.apache.lucene.analysis.en.PorterStemFilterFactory;
import org.apache.lucene.analysis.miscellaneous.LimitTokenCountFilterFactory;
import org.apache.lucene.analysis.reverse.ReverseStringFilterFactory;
import org.apache.lucene.analysis.snowball.SnowballPorterFilterFactory;
import org.elasticsearch.index.analysis.KeywordTokenizerFactory;
import org.elasticsearch.index.analysis.SoraniNormalizationFilterFactory;
import org.elasticsearch.index.analysis.SynonymTokenFilterFactory;
import org.elasticsearch.indices.analysis.AnalysisFactoryTestCase;
@ -56,6 +55,7 @@ public class CommonAnalysisFactoryTests extends AnalysisFactoryTestCase {
tokenizers.put("pattern", PatternTokenizerFactory.class);
tokenizers.put("uax29urlemail", UAX29URLEmailTokenizerFactory.class);
tokenizers.put("whitespace", WhitespaceTokenizerFactory.class);
tokenizers.put("keyword", KeywordTokenizerFactory.class);
return tokenizers;
}

View File

@ -5,9 +5,22 @@
indices.analyze:
body:
text: Foo Bar!
explain: true
tokenizer: keyword
- length: { tokens: 1 }
- match: { tokens.0.token: Foo Bar! }
- length: { detail.tokenizer.tokens: 1 }
- match: { detail.tokenizer.name: keyword }
- match: { detail.tokenizer.tokens.0.token: Foo Bar! }
- do:
indices.analyze:
body:
text: Foo Bar!
explain: true
tokenizer:
type: keyword
- length: { detail.tokenizer.tokens: 1 }
- match: { detail.tokenizer.name: _anonymous_tokenizer }
- match: { detail.tokenizer.tokens.0.token: Foo Bar! }
---
"nGram":

View File

@ -97,3 +97,19 @@
- length: { tokens: 2 }
- match: { tokens.0.token: sha }
- match: { tokens.1.token: hay }
---
"Custom normalizer in request":
- do:
indices.analyze:
body:
text: ABc
explain: true
filter: ["lowercase"]
- length: { detail.tokenizer.tokens: 1 }
- length: { detail.tokenfilters.0.tokens: 1 }
- match: { detail.tokenizer.name: keyword_for_normalizer }
- match: { detail.tokenizer.tokens.0.token: ABc }
- match: { detail.tokenfilters.0.name: lowercase }
- match: { detail.tokenfilters.0.tokens.0.token: abc }

View File

@ -0,0 +1 @@
98c920972b2f5e8563540e805d87e6a3bc888972

View File

@ -1 +0,0 @@
1e28b448387ec05d655f8c81ee54e13ff2975a4d

View File

@ -198,6 +198,7 @@ public class SearchTemplateIT extends ESSingleNodeTestCase {
getResponse = client().admin().cluster().prepareGetStoredScript("testTemplate").get();
assertNull(getResponse.getSource());
assertWarnings("the template context is now deprecated. Specify templates in a \"script\" element.");
}
public void testIndexedTemplate() throws Exception {
@ -267,6 +268,7 @@ public class SearchTemplateIT extends ESSingleNodeTestCase {
.setScript("2").setScriptType(ScriptType.STORED).setScriptParams(templateParams)
.get();
assertHitCount(searchResponse.getResponse(), 1);
assertWarnings("the template context is now deprecated. Specify templates in a \"script\" element.");
}
// Relates to #10397
@ -311,6 +313,7 @@ public class SearchTemplateIT extends ESSingleNodeTestCase {
.get();
assertHitCount(searchResponse.getResponse(), 1);
}
assertWarnings("the template context is now deprecated. Specify templates in a \"script\" element.");
}
public void testIndexedTemplateWithArray() throws Exception {
@ -339,6 +342,7 @@ public class SearchTemplateIT extends ESSingleNodeTestCase {
.setScript("4").setScriptType(ScriptType.STORED).setScriptParams(arrayTemplateParams)
.get();
assertHitCount(searchResponse.getResponse(), 5);
assertWarnings("the template context is now deprecated. Specify templates in a \"script\" element.");
}
}

View File

@ -56,6 +56,7 @@ import org.elasticsearch.common.util.concurrent.EsExecutors;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.http.AbstractHttpServerTransport;
import org.elasticsearch.http.BindHttpException;
import org.elasticsearch.http.HttpHandlingSettings;
import org.elasticsearch.http.HttpStats;

View File

@ -273,7 +273,7 @@ public class Netty4HttpServerTransportTests extends ESTestCase {
try (Netty4HttpServerTransport transport =
new Netty4HttpServerTransport(settings, networkService, bigArrays, threadPool, xContentRegistry(), dispatcher)) {
transport.start();
final TransportAddress remoteAddress = randomFrom(transport.boundAddress.boundAddresses());
final TransportAddress remoteAddress = randomFrom(transport.boundAddress().boundAddresses());
try (Netty4HttpClient client = new Netty4HttpClient()) {
final String url = "/" + new String(new byte[maxInitialLineLength], Charset.forName("UTF-8"));
@ -352,7 +352,7 @@ public class Netty4HttpServerTransportTests extends ESTestCase {
try (Netty4HttpServerTransport transport =
new Netty4HttpServerTransport(settings, networkService, bigArrays, threadPool, xContentRegistry(), dispatcher)) {
transport.start();
final TransportAddress remoteAddress = randomFrom(transport.boundAddress.boundAddresses());
final TransportAddress remoteAddress = randomFrom(transport.boundAddress().boundAddresses());
AtomicBoolean channelClosed = new AtomicBoolean(false);

View File

@ -0,0 +1 @@
844e2b76f4bc6e646e1c3257d668ac598e03f36a

View File

@ -1 +0,0 @@
452c9a9f86b79b9b3eaa7d6aa782e189d5bcfe8f

View File

@ -16,9 +16,11 @@
body:
filter: [icu_normalizer]
text: Foo Bar Ruß
tokenizer: keyword
- length: { tokens: 1 }
- match: { tokens.0.token: foo bar russ }
tokenizer: standard
- length: { tokens: 3 }
- match: { tokens.0.token: foo}
- match: { tokens.1.token: bar }
- match: { tokens.2.token: russ }
---
"Normalization charfilter":
- do:
@ -26,9 +28,11 @@
body:
char_filter: [icu_normalizer]
text: Foo Bar Ruß
tokenizer: keyword
- length: { tokens: 1 }
- match: { tokens.0.token: foo bar russ }
tokenizer: standard
- length: { tokens: 3 }
- match: { tokens.0.token: foo }
- match: { tokens.1.token: bar }
- match: { tokens.2.token: russ }
---
"Folding filter":
- do:
@ -36,9 +40,11 @@
body:
filter: [icu_folding]
text: Foo Bar résumé
tokenizer: keyword
- length: { tokens: 1 }
- match: { tokens.0.token: foo bar resume }
tokenizer: standard
- length: { tokens: 3 }
- match: { tokens.0.token: foo }
- match: { tokens.1.token: bar }
- match: { tokens.2.token: resume }
---
"Normalization with a UnicodeSet Filter":
- do:
@ -64,25 +70,34 @@
index: test
body:
char_filter: ["charfilter_icu_normalizer"]
tokenizer: keyword
tokenizer: standard
text: charfilter Föo Bâr Ruß
- length: { tokens: 1 }
- match: { tokens.0.token: charfilter föo bâr ruß }
- length: { tokens: 4 }
- match: { tokens.0.token: charfilter }
- match: { tokens.1.token: föo }
- match: { tokens.2.token: bâr }
- match: { tokens.3.token: ruß }
- do:
indices.analyze:
index: test
body:
tokenizer: keyword
tokenizer: standard
filter: ["tokenfilter_icu_normalizer"]
text: tokenfilter Föo Bâr Ruß
- length: { tokens: 1 }
- match: { tokens.0.token: tokenfilter föo Bâr ruß }
- length: { tokens: 4 }
- match: { tokens.0.token: tokenfilter }
- match: { tokens.1.token: föo }
- match: { tokens.2.token: Bâr }
- match: { tokens.3.token: ruß }
- do:
indices.analyze:
index: test
body:
tokenizer: keyword
tokenizer: standard
filter: ["tokenfilter_icu_folding"]
text: icufolding Föo Bâr Ruß
- length: { tokens: 1 }
- match: { tokens.0.token: icufolding foo bâr russ }
- length: { tokens: 4 }
- match: { tokens.0.token: icufolding }
- match: { tokens.1.token: foo }
- match: { tokens.2.token: bâr }
- match: { tokens.3.token: russ }

View File

@ -0,0 +1 @@
2f2bd2d67c7952e4ae14ab3f742824a45d0d1719

View File

@ -1 +0,0 @@
48c76a922bdfc7f50b1b6fe22e9456c555f3f990

View File

@ -0,0 +1 @@
46ad7ebcfcdbdb60dd54aae4d720356a7a51c7c0

View File

@ -1 +0,0 @@
4db5777df468b0867ff6539c9ab687e0ed6cab41

View File

@ -0,0 +1 @@
548e9f2b4d4a985dc174b2eee4007c0bd5642e68

View File

@ -1 +0,0 @@
0e09e6b011ab2b1a0e3e0e1df2ab2a91dca8ba23

View File

@ -0,0 +1 @@
b90e66f4104f0234cfef335762f65a6fed695231

View File

@ -1 +0,0 @@
ceefa0f9789ab9ea5c8ab9f67ed7a601a3ae6aa9

View File

@ -0,0 +1 @@
929a4eb52b11f6d3f0df9c8eba014f5ee2464c67

View File

@ -1 +0,0 @@
b013adc183e52a74795ad3d3032f4d0f9db30b73

View File

@ -5,7 +5,7 @@
indices.analyze:
body:
text: studenci
tokenizer: keyword
tokenizer: standard
filter: [polish_stem]
- length: { tokens: 1 }
- match: { tokens.0.token: student }

View File

@ -0,0 +1 @@
0e6575a411b65cd95e0e54f04d3da278b68be521

View File

@ -1 +0,0 @@
95300f29418f60e57e022d934d3462be9e1e2225

View File

@ -35,6 +35,7 @@ import com.microsoft.azure.storage.blob.DeleteSnapshotsOption;
import com.microsoft.azure.storage.blob.ListBlobItem;
import org.apache.logging.log4j.message.ParameterizedMessage;
import org.apache.logging.log4j.util.Supplier;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.blobstore.BlobMetaData;
import org.elasticsearch.common.blobstore.support.PlainBlobMetaData;
import org.elasticsearch.common.collect.MapBuilder;
@ -45,6 +46,7 @@ import org.elasticsearch.repositories.RepositoryException;
import java.io.InputStream;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Collections;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.Map;
@ -52,66 +54,59 @@ import java.util.Map;
public class AzureStorageServiceImpl extends AbstractComponent implements AzureStorageService {
final Map<String, AzureStorageSettings> storageSettings;
final Map<String, CloudBlobClient> clients = new HashMap<>();
final Map<String, CloudBlobClient> clients;
public AzureStorageServiceImpl(Settings settings, Map<String, AzureStorageSettings> storageSettings) {
super(settings);
this.storageSettings = storageSettings;
if (storageSettings.isEmpty()) {
// If someone did not register any settings, they basically can't use the plugin
throw new IllegalArgumentException("If you want to use an azure repository, you need to define a client configuration.");
}
logger.debug("starting azure storage client instance");
// We register all regular azure clients
for (Map.Entry<String, AzureStorageSettings> azureStorageSettingsEntry : this.storageSettings.entrySet()) {
logger.debug("registering regular client for account [{}]", azureStorageSettingsEntry.getKey());
createClient(azureStorageSettingsEntry.getValue());
}
this.storageSettings = storageSettings;
this.clients = createClients(storageSettings);
}
void createClient(AzureStorageSettings azureStorageSettings) {
try {
logger.trace("creating new Azure storage client using account [{}], key [{}], endpoint suffix [{}]",
azureStorageSettings.getAccount(), azureStorageSettings.getKey(), azureStorageSettings.getEndpointSuffix());
private Map<String, CloudBlobClient> createClients(final Map<String, AzureStorageSettings> storageSettings) {
final Map<String, CloudBlobClient> clients = new HashMap<>();
for (Map.Entry<String, AzureStorageSettings> azureStorageEntry : storageSettings.entrySet()) {
final String clientName = azureStorageEntry.getKey();
final AzureStorageSettings clientSettings = azureStorageEntry.getValue();
try {
logger.trace("creating new Azure storage client with name [{}]", clientName);
String storageConnectionString =
"DefaultEndpointsProtocol=https;"
+ "AccountName=" + clientSettings.getAccount() + ";"
+ "AccountKey=" + clientSettings.getKey();
String storageConnectionString =
"DefaultEndpointsProtocol=https;"
+ "AccountName=" + azureStorageSettings.getAccount() + ";"
+ "AccountKey=" + azureStorageSettings.getKey();
final String endpointSuffix = clientSettings.getEndpointSuffix();
if (Strings.hasLength(endpointSuffix)) {
storageConnectionString += ";EndpointSuffix=" + endpointSuffix;
}
// Retrieve storage account from connection-string.
CloudStorageAccount storageAccount = CloudStorageAccount.parse(storageConnectionString);
String endpointSuffix = azureStorageSettings.getEndpointSuffix();
if (endpointSuffix != null && !endpointSuffix.isEmpty()) {
storageConnectionString += ";EndpointSuffix=" + endpointSuffix;
// Create the blob client.
CloudBlobClient client = storageAccount.createCloudBlobClient();
// Register the client
clients.put(clientSettings.getAccount(), client);
} catch (Exception e) {
logger.error(() -> new ParameterizedMessage("Can not create azure storage client [{}]", clientName), e);
}
// Retrieve storage account from connection-string.
CloudStorageAccount storageAccount = CloudStorageAccount.parse(storageConnectionString);
// Create the blob client.
CloudBlobClient client = storageAccount.createCloudBlobClient();
// Register the client
this.clients.put(azureStorageSettings.getAccount(), client);
} catch (Exception e) {
logger.error("can not create azure storage client: {}", e.getMessage());
}
return Collections.unmodifiableMap(clients);
}
CloudBlobClient getSelectedClient(String clientName, LocationMode mode) {
logger.trace("selecting a client named [{}], mode [{}]", clientName, mode.name());
AzureStorageSettings azureStorageSettings = this.storageSettings.get(clientName);
if (azureStorageSettings == null) {
throw new IllegalArgumentException("Can not find named azure client [" + clientName + "]. Check your settings.");
throw new IllegalArgumentException("Unable to find client with name [" + clientName + "]");
}
CloudBlobClient client = this.clients.get(azureStorageSettings.getAccount());
if (client == null) {
throw new IllegalArgumentException("Can not find an azure client named [" + azureStorageSettings.getAccount() + "]");
throw new IllegalArgumentException("No account defined for client with name [" + clientName + "]");
}
// NOTE: for now, just set the location mode in case it is different;

View File

@ -23,7 +23,6 @@ import com.microsoft.azure.storage.LocationMode;
import com.microsoft.azure.storage.RetryExponentialRetry;
import com.microsoft.azure.storage.blob.CloudBlobClient;
import com.microsoft.azure.storage.core.Base64;
import org.elasticsearch.common.settings.MockSecureSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.SettingsException;
@ -36,6 +35,7 @@ import java.net.URI;
import java.net.URISyntaxException;
import java.net.UnknownHostException;
import java.nio.charset.StandardCharsets;
import java.util.Collections;
import java.util.Map;
import static org.elasticsearch.repositories.azure.AzureStorageServiceImpl.blobNameFromUri;
@ -49,31 +49,14 @@ import static org.hamcrest.Matchers.nullValue;
public class AzureStorageServiceTests extends ESTestCase {
private MockSecureSettings buildSecureSettings() {
MockSecureSettings secureSettings = new MockSecureSettings();
secureSettings.setString("azure.client.azure1.account", "myaccount1");
secureSettings.setString("azure.client.azure1.key", "mykey1");
secureSettings.setString("azure.client.azure2.account", "myaccount2");
secureSettings.setString("azure.client.azure2.key", "mykey2");
secureSettings.setString("azure.client.azure3.account", "myaccount3");
secureSettings.setString("azure.client.azure3.key", "mykey3");
return secureSettings;
}
private Settings buildSettings() {
Settings settings = Settings.builder()
.setSecureSettings(buildSecureSettings())
.build();
return settings;
}
public void testReadSecuredSettings() {
MockSecureSettings secureSettings = new MockSecureSettings();
secureSettings.setString("azure.client.azure1.account", "myaccount1");
secureSettings.setString("azure.client.azure1.key", "mykey1");
secureSettings.setString("azure.client.azure1.key", encodeKey("mykey1"));
secureSettings.setString("azure.client.azure2.account", "myaccount2");
secureSettings.setString("azure.client.azure2.key", "mykey2");
secureSettings.setString("azure.client.azure2.key", encodeKey("mykey2"));
secureSettings.setString("azure.client.azure3.account", "myaccount3");
secureSettings.setString("azure.client.azure3.key", "mykey3");
secureSettings.setString("azure.client.azure3.key", encodeKey("mykey3"));
Settings settings = Settings.builder().setSecureSettings(secureSettings)
.put("azure.client.azure3.endpoint_suffix", "my_endpoint_suffix").build();
@ -88,9 +71,9 @@ public class AzureStorageServiceTests extends ESTestCase {
public void testCreateClientWithEndpointSuffix() {
MockSecureSettings secureSettings = new MockSecureSettings();
secureSettings.setString("azure.client.azure1.account", "myaccount1");
secureSettings.setString("azure.client.azure1.key", Base64.encode("mykey1".getBytes(StandardCharsets.UTF_8)));
secureSettings.setString("azure.client.azure1.key", encodeKey("mykey1"));
secureSettings.setString("azure.client.azure2.account", "myaccount2");
secureSettings.setString("azure.client.azure2.key", Base64.encode("mykey2".getBytes(StandardCharsets.UTF_8)));
secureSettings.setString("azure.client.azure2.key", encodeKey("mykey2"));
Settings settings = Settings.builder().setSecureSettings(secureSettings)
.put("azure.client.azure1.endpoint_suffix", "my_endpoint_suffix").build();
AzureStorageServiceImpl azureStorageService = new AzureStorageServiceImpl(settings, AzureStorageSettings.load(settings));
@ -103,7 +86,7 @@ public class AzureStorageServiceTests extends ESTestCase {
public void testGetSelectedClientWithNoPrimaryAndSecondary() {
try {
new AzureStorageServiceMockForSettings(Settings.EMPTY);
new AzureStorageServiceImpl(Settings.EMPTY, Collections.emptyMap());
fail("we should have raised an IllegalArgumentException");
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), is("If you want to use an azure repository, you need to define a client configuration."));
@ -111,11 +94,11 @@ public class AzureStorageServiceTests extends ESTestCase {
}
public void testGetSelectedClientNonExisting() {
AzureStorageServiceImpl azureStorageService = new AzureStorageServiceMockForSettings(buildSettings());
AzureStorageServiceImpl azureStorageService = createAzureService(buildSettings());
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> {
azureStorageService.getSelectedClient("azure4", LocationMode.PRIMARY_ONLY);
});
assertThat(e.getMessage(), is("Can not find named azure client [azure4]. Check your settings."));
assertThat(e.getMessage(), is("Unable to find client with name [azure4]"));
}
public void testGetSelectedClientDefaultTimeout() {
@ -123,7 +106,7 @@ public class AzureStorageServiceTests extends ESTestCase {
.setSecureSettings(buildSecureSettings())
.put("azure.client.azure3.timeout", "30s")
.build();
AzureStorageServiceImpl azureStorageService = new AzureStorageServiceMockForSettings(timeoutSettings);
AzureStorageServiceImpl azureStorageService = createAzureService(timeoutSettings);
CloudBlobClient client1 = azureStorageService.getSelectedClient("azure1", LocationMode.PRIMARY_ONLY);
assertThat(client1.getDefaultRequestOptions().getTimeoutIntervalInMs(), nullValue());
CloudBlobClient client3 = azureStorageService.getSelectedClient("azure3", LocationMode.PRIMARY_ONLY);
@ -131,13 +114,13 @@ public class AzureStorageServiceTests extends ESTestCase {
}
public void testGetSelectedClientNoTimeout() {
AzureStorageServiceImpl azureStorageService = new AzureStorageServiceMockForSettings(buildSettings());
AzureStorageServiceImpl azureStorageService = createAzureService(buildSettings());
CloudBlobClient client1 = azureStorageService.getSelectedClient("azure1", LocationMode.PRIMARY_ONLY);
assertThat(client1.getDefaultRequestOptions().getTimeoutIntervalInMs(), is(nullValue()));
}
public void testGetSelectedClientBackoffPolicy() {
AzureStorageServiceImpl azureStorageService = new AzureStorageServiceMockForSettings(buildSettings());
AzureStorageServiceImpl azureStorageService = createAzureService(buildSettings());
CloudBlobClient client1 = azureStorageService.getSelectedClient("azure1", LocationMode.PRIMARY_ONLY);
assertThat(client1.getDefaultRequestOptions().getRetryPolicyFactory(), is(notNullValue()));
assertThat(client1.getDefaultRequestOptions().getRetryPolicyFactory(), instanceOf(RetryExponentialRetry.class));
@ -149,7 +132,7 @@ public class AzureStorageServiceTests extends ESTestCase {
.put("azure.client.azure1.max_retries", 7)
.build();
AzureStorageServiceImpl azureStorageService = new AzureStorageServiceMockForSettings(timeoutSettings);
AzureStorageServiceImpl azureStorageService = createAzureService(timeoutSettings);
CloudBlobClient client1 = azureStorageService.getSelectedClient("azure1", LocationMode.PRIMARY_ONLY);
assertThat(client1.getDefaultRequestOptions().getRetryPolicyFactory(), is(notNullValue()));
assertThat(client1.getDefaultRequestOptions().getRetryPolicyFactory(), instanceOf(RetryExponentialRetry.class));
@ -159,7 +142,7 @@ public class AzureStorageServiceTests extends ESTestCase {
Settings settings = Settings.builder()
.setSecureSettings(buildSecureSettings())
.build();
AzureStorageServiceMockForSettings mock = new AzureStorageServiceMockForSettings(settings);
AzureStorageServiceImpl mock = createAzureService(settings);
assertThat(mock.storageSettings.get("azure1").getProxy(), nullValue());
assertThat(mock.storageSettings.get("azure2").getProxy(), nullValue());
assertThat(mock.storageSettings.get("azure3").getProxy(), nullValue());
@ -172,7 +155,7 @@ public class AzureStorageServiceTests extends ESTestCase {
.put("azure.client.azure1.proxy.port", 8080)
.put("azure.client.azure1.proxy.type", "http")
.build();
AzureStorageServiceMockForSettings mock = new AzureStorageServiceMockForSettings(settings);
AzureStorageServiceImpl mock = createAzureService(settings);
Proxy azure1Proxy = mock.storageSettings.get("azure1").getProxy();
assertThat(azure1Proxy, notNullValue());
@ -192,7 +175,7 @@ public class AzureStorageServiceTests extends ESTestCase {
.put("azure.client.azure2.proxy.port", 8081)
.put("azure.client.azure2.proxy.type", "http")
.build();
AzureStorageServiceMockForSettings mock = new AzureStorageServiceMockForSettings(settings);
AzureStorageServiceImpl mock = createAzureService(settings);
Proxy azure1Proxy = mock.storageSettings.get("azure1").getProxy();
assertThat(azure1Proxy, notNullValue());
assertThat(azure1Proxy.type(), is(Proxy.Type.HTTP));
@ -211,7 +194,7 @@ public class AzureStorageServiceTests extends ESTestCase {
.put("azure.client.azure1.proxy.port", 8080)
.put("azure.client.azure1.proxy.type", "socks")
.build();
AzureStorageServiceMockForSettings mock = new AzureStorageServiceMockForSettings(settings);
AzureStorageServiceImpl mock = createAzureService(settings);
Proxy azure1Proxy = mock.storageSettings.get("azure1").getProxy();
assertThat(azure1Proxy, notNullValue());
assertThat(azure1Proxy.type(), is(Proxy.Type.SOCKS));
@ -227,7 +210,7 @@ public class AzureStorageServiceTests extends ESTestCase {
.put("azure.client.azure1.proxy.type", randomFrom("socks", "http"))
.build();
SettingsException e = expectThrows(SettingsException.class, () -> new AzureStorageServiceMockForSettings(settings));
SettingsException e = expectThrows(SettingsException.class, () -> createAzureService(settings));
assertEquals("Azure Proxy type has been set but proxy host or port is not defined.", e.getMessage());
}
@ -238,7 +221,7 @@ public class AzureStorageServiceTests extends ESTestCase {
.put("azure.client.azure1.proxy.type", randomFrom("socks", "http"))
.build();
SettingsException e = expectThrows(SettingsException.class, () -> new AzureStorageServiceMockForSettings(settings));
SettingsException e = expectThrows(SettingsException.class, () -> createAzureService(settings));
assertEquals("Azure Proxy type has been set but proxy host or port is not defined.", e.getMessage());
}
@ -249,7 +232,7 @@ public class AzureStorageServiceTests extends ESTestCase {
.put("azure.client.azure1.proxy.port", 8080)
.build();
SettingsException e = expectThrows(SettingsException.class, () -> new AzureStorageServiceMockForSettings(settings));
SettingsException e = expectThrows(SettingsException.class, () -> createAzureService(settings));
assertEquals("Azure Proxy port or host have been set but proxy type is not defined.", e.getMessage());
}
@ -261,26 +244,10 @@ public class AzureStorageServiceTests extends ESTestCase {
.put("azure.client.azure1.proxy.port", 8080)
.build();
SettingsException e = expectThrows(SettingsException.class, () -> new AzureStorageServiceMockForSettings(settings));
SettingsException e = expectThrows(SettingsException.class, () -> createAzureService(settings));
assertEquals("Azure proxy host is unknown.", e.getMessage());
}
/**
* This internal class just overload createClient method which is called by AzureStorageServiceImpl.doStart()
*/
class AzureStorageServiceMockForSettings extends AzureStorageServiceImpl {
AzureStorageServiceMockForSettings(Settings settings) {
super(settings, AzureStorageSettings.load(settings));
}
// We fake the client here
@Override
void createClient(AzureStorageSettings azureStorageSettings) {
this.clients.put(azureStorageSettings.getAccount(),
new CloudBlobClient(URI.create("https://" + azureStorageSettings.getAccount())));
}
}
public void testBlobNameFromUri() throws URISyntaxException {
String name = blobNameFromUri(new URI("https://myservice.azure.net/container/path/to/myfile"));
assertThat(name, is("path/to/myfile"));
@ -291,4 +258,27 @@ public class AzureStorageServiceTests extends ESTestCase {
name = blobNameFromUri(new URI("https://127.0.0.1/container/path/to/myfile"));
assertThat(name, is("path/to/myfile"));
}
private static MockSecureSettings buildSecureSettings() {
MockSecureSettings secureSettings = new MockSecureSettings();
secureSettings.setString("azure.client.azure1.account", "myaccount1");
secureSettings.setString("azure.client.azure1.key", encodeKey("mykey1"));
secureSettings.setString("azure.client.azure2.account", "myaccount2");
secureSettings.setString("azure.client.azure2.key", encodeKey("mykey2"));
secureSettings.setString("azure.client.azure3.account", "myaccount3");
secureSettings.setString("azure.client.azure3.key", encodeKey("mykey3"));
return secureSettings;
}
private static Settings buildSettings() {
return Settings.builder().setSecureSettings(buildSecureSettings()).build();
}
private static AzureStorageServiceImpl createAzureService(final Settings settings) {
return new AzureStorageServiceImpl(settings, AzureStorageSettings.load(settings));
}
private static String encodeKey(final String value) {
return Base64.encode(value.getBytes(StandardCharsets.UTF_8));
}
}

View File

@ -42,7 +42,7 @@ import org.elasticsearch.http.BindHttpException;
import org.elasticsearch.http.HttpHandlingSettings;
import org.elasticsearch.http.HttpServerTransport;
import org.elasticsearch.http.HttpStats;
import org.elasticsearch.http.netty4.AbstractHttpServerTransport;
import org.elasticsearch.http.AbstractHttpServerTransport;
import org.elasticsearch.nio.AcceptingSelector;
import org.elasticsearch.nio.AcceptorEventHandler;
import org.elasticsearch.nio.BytesChannelContext;

View File

@ -68,6 +68,7 @@ setup() {
@test "[TAR] install archive" {
# Install the archive
install_archive
set_debug_logging
count=$(find /tmp -type d -name 'elasticsearch*' | wc -l)
[ "$count" -eq 1 ]

View File

@ -61,6 +61,7 @@ setup() {
[ ! -d "$ESHOME" ]; then
clean_before_test
install
set_debug_logging
fi
}

View File

@ -461,6 +461,13 @@ debug_collect_logs() {
describe_port 127.0.0.1 9201
}
set_debug_logging() {
if [ "$ESCONFIG" ] && [ -d "$ESCONFIG" ] && [ -f /etc/os-release ] && (grep -qi suse /etc/os-release); then
echo 'logger.org.elasticsearch.indices: DEBUG' >> "$ESCONFIG/elasticsearch.yml"
echo 'logger.org.elasticsearch.gateway: TRACE' >> "$ESCONFIG/elasticsearch.yml"
fi
}
# Waits for Elasticsearch to reach some status.
# $1 - expected status - defaults to green
wait_for_elasticsearch_status() {

View File

@ -75,19 +75,3 @@
- match: { detail.tokenizer.tokens.2.token: buzz }
- match: { detail.tokenfilters.0.name: "_anonymous_tokenfilter" }
- match: { detail.tokenfilters.0.tokens.0.token: bar }
---
"Custom normalizer in request":
- do:
indices.analyze:
body:
text: ABc
explain: true
filter: ["lowercase"]
- length: { detail.tokenizer.tokens: 1 }
- length: { detail.tokenfilters.0.tokens: 1 }
- match: { detail.tokenizer.name: keyword_for_normalizer }
- match: { detail.tokenizer.tokens.0.token: ABc }
- match: { detail.tokenfilters.0.name: lowercase }
- match: { detail.tokenfilters.0.tokens.0.token: abc }

View File

@ -0,0 +1 @@
0f75703c30756c31f7d09ec79191dab6fb35c958

View File

@ -1 +0,0 @@
96ab108569c77932ecb17c45421affece207df5c

View File

@ -0,0 +1 @@
c5c519fdea65726612f79e3dd942b7316966646e

View File

@ -1 +0,0 @@
72d09ca50979f716a57f53f2de33d55023a166ec

View File

@ -0,0 +1 @@
f345b6aa3c550dafc63de3e5a5c404691e782336

View File

@ -1 +0,0 @@
e118e4d05070378516b9055184b74498ba528dee

View File

@ -0,0 +1 @@
7a74855e37124a27af36390c9d15abe33614129e

View File

@ -1 +0,0 @@
2b2ea6bfe6fa159bbf205bf7f7fa2ed2c22bbffc

View File

@ -0,0 +1 @@
0e3df4b469465ef101254fdcbb08ebd8a19f1f9d

View File

@ -1 +0,0 @@
423e4fff9276101d845d6073dc6cd27504def207

View File

@ -0,0 +1 @@
05d236149c99c860e6b627a8f78ea32918c108c3

View File

@ -1 +0,0 @@
27561038da2edcae3ecc3a08b0a52824966af87a

View File

@ -0,0 +1 @@
d83e7e65eb268425f7bd5be2425d4a00b556bc47

View File

@ -1 +0,0 @@
d7d422159f705261784d121e24877119d9c95083

View File

@ -0,0 +1 @@
440a998b5bf99871bec4272a219de01b25751d5c

View File

@ -1 +0,0 @@
fc09508fde6ba87f241d7e3148d9e310c0db9cb9

View File

@ -0,0 +1 @@
2a5c031155fdfa743af321150c0dd654a6ea3c71

View File

@ -1 +0,0 @@
201fdf3432ff3fef0f48c38c2c0f482c144f6868

View File

@ -0,0 +1 @@
d021c9a461ff0f020d038ad5ecc5127973d4674a

View File

@ -1 +0,0 @@
917df8c8d08952a012a34050b183b6204ae7081b

View File

@ -0,0 +1 @@
9877a14c53e69b39fff2bf10d49a61171746d940

View File

@ -1 +0,0 @@
caff84fa66cb0376835c39f3d4ca7dfd2177d8f4

View File

@ -0,0 +1 @@
7d7e5101b46a120efa311509948c0d1f9bf30155

View File

@ -1 +0,0 @@
e1bce61a9d9129a8d0fdd3127a84665d29f53eb0

View File

@ -0,0 +1 @@
5a4c11db96ae70b9048243cc530fcbc76faa0978

View File

@ -1 +0,0 @@
3a2e4373d79fda968a078971efa2cb8ec9ff65b0

View File

@ -0,0 +1 @@
afb01af1450067b145ca2c1d737b5907288af560

View File

@ -1 +0,0 @@
7f14927e5c3c1c85c4c5b3681c28c5e36f241dda

View File

@ -0,0 +1 @@
473f0221e0b2ea45940d8ae6dcf16e39c81b18c2

View File

@ -1 +0,0 @@
6e708a38c957a655e0cfedb06a1b9aa892929db0

View File

@ -270,7 +270,8 @@ public class IndexTemplateMetaData extends AbstractDiffable<IndexTemplateMetaDat
public static class Builder {
private static final Set<String> VALID_FIELDS = Sets.newHashSet("template", "order", "mappings", "settings", "index_patterns");
private static final Set<String> VALID_FIELDS = Sets.newHashSet(
"template", "order", "mappings", "settings", "index_patterns", "aliases", "version");
static {
VALID_FIELDS.addAll(IndexMetaData.customPrototypes.keySet());
}

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.http.netty4;
package org.elasticsearch.http;
import com.carrotsearch.hppc.IntHashSet;
import com.carrotsearch.hppc.IntSet;
@ -30,8 +30,6 @@ import org.elasticsearch.common.transport.PortsRange;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.http.BindHttpException;
import org.elasticsearch.http.HttpInfo;
import org.elasticsearch.rest.RestChannel;
import org.elasticsearch.rest.RestRequest;
import org.elasticsearch.threadpool.ThreadPool;

View File

@ -548,6 +548,10 @@ public final class AnalysisRegistry implements Closeable {
TokenizerFactory keywordTokenizerFactory,
Map<String, TokenFilterFactory> tokenFilters,
Map<String, CharFilterFactory> charFilters) {
if (keywordTokenizerFactory == null) {
throw new IllegalStateException("keyword tokenizer factory is null, normalizers require analysis-common module");
}
if (normalizerFactory instanceof CustomNormalizerProvider) {
((CustomNormalizerProvider) normalizerFactory).build(keywordTokenizerFactory, charFilters, tokenFilters);
}

View File

@ -83,7 +83,7 @@ public final class ElasticsearchMergePolicy extends FilterMergePolicy {
@Override
public MergeSpecification findForcedMerges(SegmentInfos segmentInfos,
int maxSegmentCount, Map<SegmentCommitInfo,Boolean> segmentsToMerge, IndexWriter writer)
int maxSegmentCount, Map<SegmentCommitInfo,Boolean> segmentsToMerge, MergeContext mergeContext)
throws IOException {
if (upgradeInProgress) {
@ -122,7 +122,7 @@ public final class ElasticsearchMergePolicy extends FilterMergePolicy {
// has a chance to decide what to do (e.g. collapse the segments to satisfy maxSegmentCount)
}
return super.findForcedMerges(segmentInfos, maxSegmentCount, segmentsToMerge, writer);
return super.findForcedMerges(segmentInfos, maxSegmentCount, segmentsToMerge, mergeContext);
}
/**

View File

@ -56,7 +56,6 @@ import org.elasticsearch.index.analysis.IndonesianAnalyzerProvider;
import org.elasticsearch.index.analysis.IrishAnalyzerProvider;
import org.elasticsearch.index.analysis.ItalianAnalyzerProvider;
import org.elasticsearch.index.analysis.KeywordAnalyzerProvider;
import org.elasticsearch.index.analysis.KeywordTokenizerFactory;
import org.elasticsearch.index.analysis.LatvianAnalyzerProvider;
import org.elasticsearch.index.analysis.LithuanianAnalyzerProvider;
import org.elasticsearch.index.analysis.NorwegianAnalyzerProvider;
@ -225,7 +224,6 @@ public final class AnalysisModule {
private NamedRegistry<AnalysisProvider<TokenizerFactory>> setupTokenizers(List<AnalysisPlugin> plugins) {
NamedRegistry<AnalysisProvider<TokenizerFactory>> tokenizers = new NamedRegistry<>("tokenizer");
tokenizers.register("standard", StandardTokenizerFactory::new);
tokenizers.register("keyword", KeywordTokenizerFactory::new);
tokenizers.extractAndRegister(plugins, AnalysisPlugin::getTokenizers);
return tokenizers;
}

View File

@ -74,6 +74,11 @@ public class StoredScriptSource extends AbstractDiffable<StoredScriptSource> imp
*/
public static final ParseField TEMPLATE_PARSE_FIELD = new ParseField("template");
/**
* Standard {@link ParseField} for query on the inner field.
*/
public static final ParseField TEMPLATE_NO_WRAPPER_PARSE_FIELD = new ParseField("query");
/**
* Standard {@link ParseField} for lang on the inner level.
*/
@ -189,6 +194,26 @@ public class StoredScriptSource extends AbstractDiffable<StoredScriptSource> imp
PARSER.declareField(Builder::setOptions, XContentParser::mapStrings, OPTIONS_PARSE_FIELD, ValueType.OBJECT);
}
private static StoredScriptSource parseRemaining(Token token, XContentParser parser) throws IOException {
try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
if (token != Token.START_OBJECT) {
builder.startObject();
builder.copyCurrentStructure(parser);
builder.endObject();
} else {
builder.copyCurrentStructure(parser);
}
String source = Strings.toString(builder);
if (source == null || source.isEmpty()) {
DEPRECATION_LOGGER.deprecated("empty templates should no longer be used");
}
return new StoredScriptSource(Script.DEFAULT_TEMPLATE_LANG, source, Collections.emptyMap());
}
}
/**
* This will parse XContent into a {@link StoredScriptSource}. The following formats can be parsed:
*
@ -304,38 +329,28 @@ public class StoredScriptSource extends AbstractDiffable<StoredScriptSource> imp
} else {
throw new ParsingException(parser.getTokenLocation(), "unexpected token [" + token + "], expected [{, <source>]");
}
} else {
if (TEMPLATE_PARSE_FIELD.getPreferredName().equals(name)) {
token = parser.nextToken();
} else if (TEMPLATE_PARSE_FIELD.getPreferredName().equals(name)) {
if (token == Token.VALUE_STRING) {
String source = parser.text();
DEPRECATION_LOGGER.deprecated("the template context is now deprecated. Specify templates in a \"script\" element.");
if (source == null || source.isEmpty()) {
DEPRECATION_LOGGER.deprecated("empty templates should no longer be used");
}
return new StoredScriptSource(Script.DEFAULT_TEMPLATE_LANG, source, Collections.emptyMap());
}
}
try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
if (token != Token.START_OBJECT) {
builder.startObject();
builder.copyCurrentStructure(parser);
builder.endObject();
} else {
builder.copyCurrentStructure(parser);
}
String source = Strings.toString(builder);
token = parser.nextToken();
if (token == Token.VALUE_STRING) {
String source = parser.text();
if (source == null || source.isEmpty()) {
DEPRECATION_LOGGER.deprecated("empty templates should no longer be used");
}
return new StoredScriptSource(Script.DEFAULT_TEMPLATE_LANG, source, Collections.emptyMap());
} else {
return parseRemaining(token, parser);
}
} else if (TEMPLATE_NO_WRAPPER_PARSE_FIELD.getPreferredName().equals(name)) {
DEPRECATION_LOGGER.deprecated("the template context is now deprecated. Specify templates in a \"script\" element.");
return parseRemaining(token, parser);
} else {
DEPRECATION_LOGGER.deprecated("scripts should not be stored without a context. Specify them in a \"script\" element.");
return parseRemaining(token, parser);
}
} catch (IOException ioe) {
throw new UncheckedIOException(ioe);

View File

@ -19,6 +19,7 @@
package org.elasticsearch.action.admin.indices;
import org.apache.lucene.analysis.MockTokenFilter;
import org.apache.lucene.analysis.MockTokenizer;
import org.apache.lucene.analysis.TokenStream;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest;
@ -37,6 +38,7 @@ import org.elasticsearch.index.analysis.CharFilterFactory;
import org.elasticsearch.index.analysis.IndexAnalyzers;
import org.elasticsearch.index.analysis.PreConfiguredCharFilter;
import org.elasticsearch.index.analysis.TokenFilterFactory;
import org.elasticsearch.index.analysis.TokenizerFactory;
import org.elasticsearch.indices.analysis.AnalysisModule;
import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider;
import org.elasticsearch.indices.analysis.AnalysisModuleTests.AppendCharFilter;
@ -107,6 +109,12 @@ public class TransportAnalyzeActionTests extends ESTestCase {
return singletonMap("append", AppendCharFilterFactory::new);
}
@Override
public Map<String, AnalysisProvider<TokenizerFactory>> getTokenizers() {
return singletonMap("keyword", (indexSettings, environment, name, settings) ->
() -> new MockTokenizer(MockTokenizer.KEYWORD, false));
}
@Override
public Map<String, AnalysisProvider<TokenFilterFactory>> getTokenFilters() {
return singletonMap("mock", MockFactory::new);

View File

@ -37,10 +37,13 @@ import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.engine.VersionConflictEngineException;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.MockKeywordPlugin;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
@ -58,6 +61,12 @@ import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return Collections.singleton(MockKeywordPlugin.class);
}
public void testNoSuchDoc() throws Exception {
XContentBuilder mapping = jsonBuilder().startObject().startObject("type1")
.startObject("properties")

View File

@ -43,6 +43,7 @@ import java.util.Collections;
import static java.util.Collections.singletonMap;
import static org.elasticsearch.cluster.metadata.AliasMetaData.newAliasMetaDataBuilder;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.Matchers.contains;
public class IndexTemplateMetaDataTests extends ESTestCase {
@ -167,4 +168,54 @@ public class IndexTemplateMetaDataTests extends ESTestCase {
assertThat(ex.getMessage(), equalTo("Index patterns must not be null or empty; got null"));
}
}
public void testParseTemplateWithAliases() throws Exception {
String templateInJSON = "{\"aliases\": {\"log\":{}}, \"index_patterns\": [\"pattern-1\"]}";
try (XContentParser parser =
XContentHelper.createParser(NamedXContentRegistry.EMPTY,
DeprecationHandler.THROW_UNSUPPORTED_OPERATION, new BytesArray(templateInJSON), XContentType.JSON)) {
IndexTemplateMetaData template = IndexTemplateMetaData.Builder.fromXContent(parser, randomAlphaOfLengthBetween(1, 100));
assertThat(template.aliases().containsKey("log"), equalTo(true));
assertThat(template.patterns(), contains("pattern-1"));
}
}
public void testFromToXContent() throws Exception {
String templateName = randomUnicodeOfCodepointLengthBetween(1, 10);
IndexTemplateMetaData.Builder templateBuilder = IndexTemplateMetaData.builder(templateName);
templateBuilder.patterns(Arrays.asList("pattern-1"));
int numAlias = between(0, 5);
for (int i = 0; i < numAlias; i++) {
AliasMetaData.Builder alias = AliasMetaData.builder(randomRealisticUnicodeOfLengthBetween(1, 100));
if (randomBoolean()) {
alias.indexRouting(randomRealisticUnicodeOfLengthBetween(1, 100));
}
if (randomBoolean()) {
alias.searchRouting(randomRealisticUnicodeOfLengthBetween(1, 100));
}
templateBuilder.putAlias(alias);
}
if (randomBoolean()) {
templateBuilder.settings(Settings.builder().put("index.setting-1", randomLong()));
templateBuilder.settings(Settings.builder().put("index.setting-2", randomTimeValue()));
}
if (randomBoolean()) {
templateBuilder.order(randomInt());
}
if (randomBoolean()) {
templateBuilder.version(between(0, 100));
}
if (randomBoolean()) {
templateBuilder.putMapping("doc", "{\"doc\":{\"properties\":{\"type\":\"text\"}}}");
}
IndexTemplateMetaData template = templateBuilder.build();
XContentBuilder builder = XContentBuilder.builder(randomFrom(XContentType.JSON.xContent()));
builder.startObject();
IndexTemplateMetaData.Builder.toXContent(template, builder, ToXContent.EMPTY_PARAMS);
builder.endObject();
try (XContentParser parser = createParser(shuffleXContent(builder))) {
IndexTemplateMetaData parsed = IndexTemplateMetaData.Builder.fromXContent(parser, templateName);
assertThat(parsed, equalTo(template));
}
}
}

View File

@ -432,7 +432,7 @@ public class GatewayIndexStateIT extends ESIntegTestCase {
logger.info("--> starting one node");
internalCluster().startNode();
prepareCreate("test").setSettings(Settings.builder()
.put("index.analysis.analyzer.test.tokenizer", "keyword")
.put("index.analysis.analyzer.test.tokenizer", "standard")
.put("index.number_of_shards", "1"))
.addMapping("type1", "{\n" +
" \"type1\": {\n" +

View File

@ -17,13 +17,11 @@
* under the License.
*/
package org.elasticsearch.http.netty4;
package org.elasticsearch.http;
import org.elasticsearch.common.network.NetworkUtils;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.http.BindHttpException;
import org.elasticsearch.http.HttpTransportSettings;
import org.elasticsearch.test.ESTestCase;
import java.net.UnknownHostException;
@ -32,11 +30,11 @@ import java.util.List;
import static java.net.InetAddress.getByName;
import static java.util.Arrays.asList;
import static org.elasticsearch.http.netty4.Netty4HttpServerTransport.resolvePublishPort;
import static org.elasticsearch.http.AbstractHttpServerTransport.resolvePublishPort;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
public class Netty4HttpPublishPortTests extends ESTestCase {
public class AbstractHttpServerTransportTests extends ESTestCase {
public void testHttpPublishPort() throws Exception {
int boundPort = randomIntBetween(9000, 9100);
@ -88,5 +86,4 @@ public class Netty4HttpPublishPortTests extends ESTestCase {
}
return addresses;
}
}

View File

@ -20,6 +20,8 @@
package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.MockLowerCaseFilter;
import org.apache.lucene.analysis.MockTokenizer;
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
@ -71,7 +73,7 @@ public class CustomNormalizerTests extends ESTokenStreamTestCase {
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> AnalysisTestsHelper.createTestAnalysisFromSettings(settings));
() -> AnalysisTestsHelper.createTestAnalysisFromSettings(settings, MOCK_ANALYSIS_PLUGIN));
assertEquals("Custom normalizer [my_normalizer] cannot configure a tokenizer", e.getMessage());
}
@ -135,7 +137,7 @@ public class CustomNormalizerTests extends ESTokenStreamTestCase {
@Override
public int read(char[] cbuf, int off, int len) throws IOException {
int result = reader.read(cbuf, off, len);
for (int i = off; i < result; i++) {
for (int i = off; i < off + len; i++) {
if (cbuf[i] == 'a') {
cbuf[i] = 'z';
}
@ -157,5 +159,11 @@ public class CustomNormalizerTests extends ESTokenStreamTestCase {
return new Factory();
});
}
@Override
public Map<String, AnalysisProvider<TokenizerFactory>> getTokenizers() {
return singletonMap("keyword", (indexSettings, environment, name, settings) ->
() -> new MockTokenizer(MockTokenizer.KEYWORD, false));
}
}
}

View File

@ -20,6 +20,8 @@
package org.elasticsearch.index.mapper;
import org.apache.lucene.analysis.MockLowerCaseFilter;
import org.apache.lucene.analysis.MockTokenizer;
import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField;
@ -33,7 +35,9 @@ import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.analysis.PreConfiguredTokenFilter;
import org.elasticsearch.index.analysis.TokenizerFactory;
import org.elasticsearch.index.mapper.MapperService.MergeReason;
import org.elasticsearch.indices.analysis.AnalysisModule;
import org.elasticsearch.plugins.AnalysisPlugin;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESSingleNodeTestCase;
@ -44,8 +48,10 @@ import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import static java.util.Collections.singletonList;
import static java.util.Collections.singletonMap;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
@ -58,6 +64,21 @@ public class KeywordFieldMapperTests extends ESSingleNodeTestCase {
public List<PreConfiguredTokenFilter> getPreConfiguredTokenFilters() {
return singletonList(PreConfiguredTokenFilter.singleton("mock_other_lowercase", true, MockLowerCaseFilter::new));
}
@Override
public Map<String, AnalysisModule.AnalysisProvider<TokenizerFactory>> getTokenizers() {
return singletonMap("keyword", (indexSettings, environment, name, settings) -> {
class Factory implements TokenizerFactory {
@Override
public Tokenizer create() {
return new MockTokenizer(MockTokenizer.KEYWORD, false);
}
}
return new Factory();
});
}
};
@Override

View File

@ -21,6 +21,7 @@ package org.elasticsearch.indices.analysis;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.CharFilter;
import org.apache.lucene.analysis.MockTokenizer;
import org.apache.lucene.analysis.TokenFilter;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.Tokenizer;
@ -31,6 +32,7 @@ import org.apache.lucene.store.Directory;
import org.apache.lucene.store.SimpleFSDirectory;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.io.Streams;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.env.Environment;
@ -49,6 +51,7 @@ import org.elasticsearch.index.analysis.StandardTokenizerFactory;
import org.elasticsearch.index.analysis.StopTokenFilterFactory;
import org.elasticsearch.index.analysis.TokenFilterFactory;
import org.elasticsearch.index.analysis.MyFilterTokenFilterFactory;
import org.elasticsearch.index.analysis.TokenizerFactory;
import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider;
import org.elasticsearch.plugins.AnalysisPlugin;
import org.elasticsearch.test.ESTestCase;
@ -60,6 +63,8 @@ import java.io.BufferedWriter;
import java.io.IOException;
import java.io.InputStream;
import java.io.Reader;
import java.io.StringReader;
import java.io.UncheckedIOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
@ -222,7 +227,7 @@ public class AnalysisModuleTests extends ESTestCase {
public void testUnderscoreInAnalyzerName() throws IOException {
Settings settings = Settings.builder()
.put("index.analysis.analyzer._invalid_name.tokenizer", "keyword")
.put("index.analysis.analyzer._invalid_name.tokenizer", "standard")
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put(IndexMetaData.SETTING_VERSION_CREATED, "1")
.build();
@ -256,6 +261,13 @@ public class AnalysisModuleTests extends ESTestCase {
(tokenStream, esVersion) -> new AppendCharFilter(tokenStream, esVersion.toString()))
);
}
@Override
public Map<String, AnalysisProvider<TokenizerFactory>> getTokenizers() {
// Need mock keyword tokenizer here, because alpha / beta versions are broken up by the dash.
return singletonMap("keyword", (indexSettings, environment, name, settings) ->
() -> new MockTokenizer(MockTokenizer.KEYWORD, false));
}
})).getAnalysisRegistry();
Version version = VersionUtils.randomVersion(random());
@ -305,11 +317,11 @@ public class AnalysisModuleTests extends ESTestCase {
Version version = VersionUtils.randomVersion(random());
IndexAnalyzers analyzers = getIndexAnalyzers(registry, Settings.builder()
.put("index.analysis.analyzer.no_version.tokenizer", "keyword")
.put("index.analysis.analyzer.no_version.tokenizer", "standard")
.put("index.analysis.analyzer.no_version.filter", "no_version")
.put("index.analysis.analyzer.lucene_version.tokenizer", "keyword")
.put("index.analysis.analyzer.lucene_version.tokenizer", "standard")
.put("index.analysis.analyzer.lucene_version.filter", "lucene_version")
.put("index.analysis.analyzer.elasticsearch_version.tokenizer", "keyword")
.put("index.analysis.analyzer.elasticsearch_version.tokenizer", "standard")
.put("index.analysis.analyzer.elasticsearch_version.filter", "elasticsearch_version")
.put(IndexMetaData.SETTING_VERSION_CREATED, version)
.build());
@ -425,12 +437,17 @@ public class AnalysisModuleTests extends ESTestCase {
// Simple char filter that appends text to the term
public static class AppendCharFilter extends CharFilter {
private final char[] appendMe;
private int offsetInAppendMe = -1;
static Reader append(Reader input, String appendMe) {
try {
return new StringReader(Streams.copyToString(input) + appendMe);
} catch (IOException e) {
throw new UncheckedIOException(e);
}
}
public AppendCharFilter(Reader input, String appendMe) {
super(input);
this.appendMe = appendMe.toCharArray();
super(append(input, appendMe));
}
@Override
@ -440,24 +457,7 @@ public class AnalysisModuleTests extends ESTestCase {
@Override
public int read(char[] cbuf, int off, int len) throws IOException {
if (offsetInAppendMe < 0) {
int read = input.read(cbuf, off, len);
if (read == len) {
return read;
}
off += read;
len -= read;
int allowedLen = Math.min(len, appendMe.length);
System.arraycopy(appendMe, 0, cbuf, off, allowedLen);
offsetInAppendMe = allowedLen;
return read + allowedLen;
}
if (offsetInAppendMe >= appendMe.length) {
return -1;
}
int allowedLen = Math.max(len, appendMe.length - offsetInAppendMe);
System.arraycopy(appendMe, offsetInAppendMe, cbuf, off, allowedLen);
return allowedLen;
return input.read(cbuf, off, len);
}
}

View File

@ -22,11 +22,18 @@ import org.elasticsearch.action.admin.indices.alias.Alias;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequestBuilder;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.analysis.CharFilterFactory;
import org.elasticsearch.indices.analysis.AnalysisModule;
import org.elasticsearch.plugins.AnalysisPlugin;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.MockKeywordPlugin;
import org.hamcrest.core.IsNull;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
@ -38,6 +45,12 @@ import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.startsWith;
public class AnalyzeActionIT extends ESIntegTestCase {
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return Collections.singleton(MockKeywordPlugin.class);
}
public void testSimpleAnalyzerTests() throws Exception {
assertAcked(prepareCreate("test").addAlias(new Alias("alias")));
ensureGreen();

View File

@ -81,10 +81,12 @@ public class ScriptMetaDataTests extends AbstractSerializingTestCase<ScriptMetaD
XContentBuilder sourceBuilder = XContentFactory.jsonBuilder();
sourceBuilder.startObject().startObject("template").field("field", "value").endObject().endObject();
builder.storeScript("template", StoredScriptSource.parse(BytesReference.bytes(sourceBuilder), sourceBuilder.contentType()));
assertWarnings("the template context is now deprecated. Specify templates in a \"script\" element.");
sourceBuilder = XContentFactory.jsonBuilder();
sourceBuilder.startObject().field("template", "value").endObject();
builder.storeScript("template_field", StoredScriptSource.parse(BytesReference.bytes(sourceBuilder), sourceBuilder.contentType()));
assertWarnings("the template context is now deprecated. Specify templates in a \"script\" element.");
sourceBuilder = XContentFactory.jsonBuilder();
sourceBuilder.startObject().startObject("script").field("lang", "_lang").field("source", "_source").endObject().endObject();
@ -99,14 +101,19 @@ public class ScriptMetaDataTests extends AbstractSerializingTestCase<ScriptMetaD
public void testDiff() throws Exception {
ScriptMetaData.Builder builder = new ScriptMetaData.Builder(null);
builder.storeScript("1", StoredScriptSource.parse(new BytesArray("{\"foo\":\"abc\"}"), XContentType.JSON));
assertWarnings("scripts should not be stored without a context. Specify them in a \"script\" element.");
builder.storeScript("2", StoredScriptSource.parse(new BytesArray("{\"foo\":\"def\"}"), XContentType.JSON));
assertWarnings("scripts should not be stored without a context. Specify them in a \"script\" element.");
builder.storeScript("3", StoredScriptSource.parse(new BytesArray("{\"foo\":\"ghi\"}"), XContentType.JSON));
assertWarnings("scripts should not be stored without a context. Specify them in a \"script\" element.");
ScriptMetaData scriptMetaData1 = builder.build();
builder = new ScriptMetaData.Builder(scriptMetaData1);
builder.storeScript("2", StoredScriptSource.parse(new BytesArray("{\"foo\":\"changed\"}"), XContentType.JSON));
assertWarnings("scripts should not be stored without a context. Specify them in a \"script\" element.");
builder.deleteScript("3");
builder.storeScript("4", StoredScriptSource.parse(new BytesArray("{\"foo\":\"jkl\"}"), XContentType.JSON));
assertWarnings("scripts should not be stored without a context. Specify them in a \"script\" element.");
ScriptMetaData scriptMetaData2 = builder.build();
ScriptMetaData.ScriptMetadataDiff diff = (ScriptMetaData.ScriptMetadataDiff) scriptMetaData2.diff(scriptMetaData1);

View File

@ -50,7 +50,9 @@ public class StoredScriptSourceTests extends AbstractSerializingTestCase<StoredS
if (randomBoolean()) {
options.put(Script.CONTENT_TYPE_OPTION, xContentType.mediaType());
}
return StoredScriptSource.parse(BytesReference.bytes(template), xContentType);
StoredScriptSource source = StoredScriptSource.parse(BytesReference.bytes(template), xContentType);
assertWarnings("the template context is now deprecated. Specify templates in a \"script\" element.");
return source;
} catch (IOException e) {
throw new AssertionError("Failed to create test instance", e);
}

View File

@ -74,6 +74,7 @@ public class StoredScriptTests extends AbstractSerializingTestCase<StoredScriptS
StoredScriptSource source = new StoredScriptSource("mustache", "code", Collections.emptyMap());
assertThat(parsed, equalTo(source));
assertWarnings("the template context is now deprecated. Specify templates in a \"script\" element.");
}
// complex template with wrapper template object
@ -89,6 +90,7 @@ public class StoredScriptTests extends AbstractSerializingTestCase<StoredScriptS
StoredScriptSource source = new StoredScriptSource("mustache", code, Collections.emptyMap());
assertThat(parsed, equalTo(source));
assertWarnings("the template context is now deprecated. Specify templates in a \"script\" element.");
}
// complex template with no wrapper object
@ -104,6 +106,7 @@ public class StoredScriptTests extends AbstractSerializingTestCase<StoredScriptS
StoredScriptSource source = new StoredScriptSource("mustache", code, Collections.emptyMap());
assertThat(parsed, equalTo(source));
assertWarnings("the template context is now deprecated. Specify templates in a \"script\" element.");
}
// complex template using script as the field name
@ -223,7 +226,10 @@ public class StoredScriptTests extends AbstractSerializingTestCase<StoredScriptS
StoredScriptSource source = new StoredScriptSource(Script.DEFAULT_TEMPLATE_LANG, "", Collections.emptyMap());
assertThat(parsed, equalTo(source));
assertWarnings("empty templates should no longer be used");
assertWarnings(
"the template context is now deprecated. Specify templates in a \"script\" element.",
"empty templates should no longer be used"
);
}
try (XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON)) {

View File

@ -50,14 +50,15 @@ import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder.Field;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.InternalSettingsPlugin;
import org.elasticsearch.test.MockKeywordPlugin;
import org.hamcrest.Matcher;
import org.hamcrest.Matchers;
import org.joda.time.DateTime;
import org.joda.time.chrono.ISOChronology;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
@ -105,7 +106,7 @@ public class HighlighterSearchIT extends ESIntegTestCase {
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return Collections.singletonList(InternalSettingsPlugin.class);
return Arrays.asList(InternalSettingsPlugin.class, MockKeywordPlugin.class);
}
public void testHighlightingWithStoredKeyword() throws IOException {

View File

@ -32,15 +32,19 @@ import org.elasticsearch.index.query.MultiMatchQueryBuilder;
import org.elasticsearch.index.query.Operator;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.index.search.MatchQuery;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.sort.SortBuilders;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.test.ESIntegTestCase;
import org.elasticsearch.test.MockKeywordPlugin;
import org.junit.Before;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import java.util.concurrent.ExecutionException;
@ -72,6 +76,11 @@ import static org.hamcrest.Matchers.lessThan;
public class MultiMatchQueryIT extends ESIntegTestCase {
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return Collections.singleton(MockKeywordPlugin.class);
}
@Before
public void init() throws Exception {
CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(Settings.builder()

View File

@ -24,7 +24,6 @@ import org.apache.lucene.analysis.util.TokenFilterFactory;
import org.apache.lucene.analysis.util.TokenizerFactory;
import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.index.analysis.HunspellTokenFilterFactory;
import org.elasticsearch.index.analysis.KeywordTokenizerFactory;
import org.elasticsearch.index.analysis.MultiTermAwareComponent;
import org.elasticsearch.index.analysis.PreConfiguredCharFilter;
import org.elasticsearch.index.analysis.PreConfiguredTokenFilter;
@ -79,7 +78,7 @@ public abstract class AnalysisFactoryTestCase extends ESTestCase {
// exposed in ES
.put("classic", MovedToAnalysisCommon.class)
.put("edgengram", MovedToAnalysisCommon.class)
.put("keyword", KeywordTokenizerFactory.class)
.put("keyword", MovedToAnalysisCommon.class)
.put("letter", MovedToAnalysisCommon.class)
.put("lowercase", MovedToAnalysisCommon.class)
.put("ngram", MovedToAnalysisCommon.class)

View File

@ -0,0 +1,54 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.test;
import org.apache.lucene.analysis.MockTokenizer;
import org.apache.lucene.analysis.Tokenizer;
import org.elasticsearch.index.analysis.TokenizerFactory;
import org.elasticsearch.indices.analysis.AnalysisModule;
import org.elasticsearch.plugins.AnalysisPlugin;
import org.elasticsearch.plugins.Plugin;
import java.util.Map;
import static java.util.Collections.singletonMap;
/**
* Some tests rely on the keyword tokenizer, but this tokenizer isn't part of lucene-core and therefor not available
* in some modules. What this test plugin does, is use the mock tokenizer and advertise that as the keyword tokenizer.
*
* Most tests that need this test plugin use normalizers. When normalizers are constructed they try to resolve the
* keyword tokenizer, but if the keyword tokenizer isn't available then constructing normalizers will fail.
*/
public class MockKeywordPlugin extends Plugin implements AnalysisPlugin {
@Override
public Map<String, AnalysisModule.AnalysisProvider<TokenizerFactory>> getTokenizers() {
return singletonMap("keyword", (indexSettings, environment, name, settings) -> {
class Factory implements TokenizerFactory {
@Override
public Tokenizer create() {
return new MockTokenizer(MockTokenizer.KEYWORD, false);
}
}
return new Factory();
});
}
}

View File

@ -88,6 +88,7 @@ compileTestJava.options.compilerArgs << "-Xlint:-deprecation,-rawtypes,-serial,-
licenseHeaders {
approvedLicenses << 'BCrypt (BSD-like)'
additionalLicense 'BCRYP', 'BCrypt (BSD-like)', 'Copyright (c) 2006 Damien Miller <djm@mindrot.org>'
excludes << 'org/elasticsearch/xpack/core/ssl/DerParser.java'
}
// make LicenseSigner available for testing signed licenses

View File

@ -7,7 +7,6 @@ package org.elasticsearch.xpack.core;
import org.apache.logging.log4j.Logger;
import org.apache.lucene.util.SetOnce;
import org.bouncycastle.operator.OperatorCreationException;
import org.elasticsearch.SpecialPermission;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionRequest;
@ -125,7 +124,7 @@ public class XPackPlugin extends XPackClientPlugin implements ScriptPlugin, Exte
public XPackPlugin(
final Settings settings,
final Path configPath) throws IOException, DestroyFailedException, OperatorCreationException, GeneralSecurityException {
final Path configPath) {
super(settings);
this.settings = settings;
this.transportClientMode = transportClientMode(settings);

View File

@ -0,0 +1,308 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.core.ssl;
import org.bouncycastle.asn1.ASN1Encodable;
import org.bouncycastle.asn1.ASN1ObjectIdentifier;
import org.bouncycastle.asn1.DERSequence;
import org.bouncycastle.asn1.DERTaggedObject;
import org.bouncycastle.asn1.DERUTF8String;
import org.bouncycastle.asn1.pkcs.PKCSObjectIdentifiers;
import org.bouncycastle.asn1.x500.X500Name;
import org.bouncycastle.asn1.x509.AuthorityKeyIdentifier;
import org.bouncycastle.asn1.x509.BasicConstraints;
import org.bouncycastle.asn1.x509.Extension;
import org.bouncycastle.asn1.x509.ExtensionsGenerator;
import org.bouncycastle.asn1.x509.GeneralName;
import org.bouncycastle.asn1.x509.GeneralNames;
import org.bouncycastle.asn1.x509.Time;
import org.bouncycastle.cert.CertIOException;
import org.bouncycastle.cert.X509CertificateHolder;
import org.bouncycastle.cert.jcajce.JcaX509CertificateConverter;
import org.bouncycastle.cert.jcajce.JcaX509ExtensionUtils;
import org.bouncycastle.cert.jcajce.JcaX509v3CertificateBuilder;
import org.bouncycastle.jce.provider.BouncyCastleProvider;
import org.bouncycastle.operator.ContentSigner;
import org.bouncycastle.operator.OperatorCreationException;
import org.bouncycastle.operator.jcajce.JcaContentSignerBuilder;
import org.bouncycastle.pkcs.PKCS10CertificationRequest;
import org.bouncycastle.pkcs.jcajce.JcaPKCS10CertificationRequestBuilder;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.network.InetAddressHelper;
import org.elasticsearch.common.network.NetworkAddress;
import org.joda.time.DateTime;
import org.joda.time.DateTimeZone;
import javax.net.ssl.X509ExtendedKeyManager;
import javax.net.ssl.X509ExtendedTrustManager;
import javax.security.auth.x500.X500Principal;
import java.io.IOException;
import java.math.BigInteger;
import java.net.InetAddress;
import java.net.SocketException;
import java.security.KeyPair;
import java.security.KeyPairGenerator;
import java.security.KeyStore;
import java.security.NoSuchAlgorithmException;
import java.security.PrivateKey;
import java.security.SecureRandom;
import java.security.cert.Certificate;
import java.security.cert.CertificateException;
import java.security.cert.X509Certificate;
import java.util.HashSet;
import java.util.Locale;
import java.util.Objects;
import java.util.Set;
/**
* Utility methods that deal with {@link Certificate}, {@link KeyStore}, {@link X509ExtendedTrustManager}, {@link X509ExtendedKeyManager}
* and other certificate related objects.
*/
public class CertGenUtils {
private static final String CN_OID = "2.5.4.3";
private static final int SERIAL_BIT_LENGTH = 20 * 8;
private static final BouncyCastleProvider BC_PROV = new BouncyCastleProvider();
private CertGenUtils() {
}
/**
* Generates a CA certificate
*/
public static X509Certificate generateCACertificate(X500Principal x500Principal, KeyPair keyPair, int days)
throws OperatorCreationException, CertificateException, CertIOException, NoSuchAlgorithmException {
return generateSignedCertificate(x500Principal, null, keyPair, null, null, true, days, null);
}
/**
* Generates a signed certificate using the provided CA private key and
* information from the CA certificate
*
* @param principal the principal of the certificate; commonly referred to as the
* distinguished name (DN)
* @param subjectAltNames the subject alternative names that should be added to the
* certificate as an X509v3 extension. May be {@code null}
* @param keyPair the key pair that will be associated with the certificate
* @param caCert the CA certificate. If {@code null}, this results in a self signed
* certificate
* @param caPrivKey the CA private key. If {@code null}, this results in a self signed
* certificate
* @param days no of days certificate will be valid from now
* @return a signed {@link X509Certificate}
*/
public static X509Certificate generateSignedCertificate(X500Principal principal, GeneralNames subjectAltNames, KeyPair keyPair,
X509Certificate caCert, PrivateKey caPrivKey, int days)
throws OperatorCreationException, CertificateException, CertIOException, NoSuchAlgorithmException {
return generateSignedCertificate(principal, subjectAltNames, keyPair, caCert, caPrivKey, false, days, null);
}
/**
* Generates a signed certificate using the provided CA private key and
* information from the CA certificate
*
* @param principal the principal of the certificate; commonly referred to as the
* distinguished name (DN)
* @param subjectAltNames the subject alternative names that should be added to the
* certificate as an X509v3 extension. May be {@code null}
* @param keyPair the key pair that will be associated with the certificate
* @param caCert the CA certificate. If {@code null}, this results in a self signed
* certificate
* @param caPrivKey the CA private key. If {@code null}, this results in a self signed
* certificate
* @param days no of days certificate will be valid from now
* @param signatureAlgorithm algorithm used for signing certificate. If {@code null} or
* empty, then use default algorithm {@link CertGenUtils#getDefaultSignatureAlgorithm(PrivateKey)}
* @return a signed {@link X509Certificate}
*/
public static X509Certificate generateSignedCertificate(X500Principal principal, GeneralNames subjectAltNames, KeyPair keyPair,
X509Certificate caCert, PrivateKey caPrivKey,
int days, String signatureAlgorithm)
throws OperatorCreationException, CertificateException, CertIOException, NoSuchAlgorithmException {
return generateSignedCertificate(principal, subjectAltNames, keyPair, caCert, caPrivKey, false, days, signatureAlgorithm);
}
/**
* Generates a signed certificate
*
* @param principal the principal of the certificate; commonly referred to as the
* distinguished name (DN)
* @param subjectAltNames the subject alternative names that should be added to the
* certificate as an X509v3 extension. May be {@code null}
* @param keyPair the key pair that will be associated with the certificate
* @param caCert the CA certificate. If {@code null}, this results in a self signed
* certificate
* @param caPrivKey the CA private key. If {@code null}, this results in a self signed
* certificate
* @param isCa whether or not the generated certificate is a CA
* @param days no of days certificate will be valid from now
* @param signatureAlgorithm algorithm used for signing certificate. If {@code null} or
* empty, then use default algorithm {@link CertGenUtils#getDefaultSignatureAlgorithm(PrivateKey)}
* @return a signed {@link X509Certificate}
*/
private static X509Certificate generateSignedCertificate(X500Principal principal, GeneralNames subjectAltNames, KeyPair keyPair,
X509Certificate caCert, PrivateKey caPrivKey, boolean isCa,
int days, String signatureAlgorithm)
throws NoSuchAlgorithmException, CertificateException, CertIOException, OperatorCreationException {
Objects.requireNonNull(keyPair, "Key-Pair must not be null");
final DateTime notBefore = new DateTime(DateTimeZone.UTC);
if (days < 1) {
throw new IllegalArgumentException("the certificate must be valid for at least one day");
}
final DateTime notAfter = notBefore.plusDays(days);
final BigInteger serial = CertGenUtils.getSerial();
JcaX509ExtensionUtils extUtils = new JcaX509ExtensionUtils();
X500Name subject = X500Name.getInstance(principal.getEncoded());
final X500Name issuer;
final AuthorityKeyIdentifier authorityKeyIdentifier;
if (caCert != null) {
if (caCert.getBasicConstraints() < 0) {
throw new IllegalArgumentException("ca certificate is not a CA!");
}
issuer = X500Name.getInstance(caCert.getIssuerX500Principal().getEncoded());
authorityKeyIdentifier = extUtils.createAuthorityKeyIdentifier(caCert.getPublicKey());
} else {
issuer = subject;
authorityKeyIdentifier = extUtils.createAuthorityKeyIdentifier(keyPair.getPublic());
}
JcaX509v3CertificateBuilder builder =
new JcaX509v3CertificateBuilder(issuer, serial,
new Time(notBefore.toDate(), Locale.ROOT), new Time(notAfter.toDate(), Locale.ROOT), subject, keyPair.getPublic());
builder.addExtension(Extension.subjectKeyIdentifier, false, extUtils.createSubjectKeyIdentifier(keyPair.getPublic()));
builder.addExtension(Extension.authorityKeyIdentifier, false, authorityKeyIdentifier);
if (subjectAltNames != null) {
builder.addExtension(Extension.subjectAlternativeName, false, subjectAltNames);
}
builder.addExtension(Extension.basicConstraints, isCa, new BasicConstraints(isCa));
PrivateKey signingKey = caPrivKey != null ? caPrivKey : keyPair.getPrivate();
ContentSigner signer = new JcaContentSignerBuilder(
(Strings.isNullOrEmpty(signatureAlgorithm)) ? getDefaultSignatureAlgorithm(signingKey) : signatureAlgorithm)
.setProvider(CertGenUtils.BC_PROV).build(signingKey);
X509CertificateHolder certificateHolder = builder.build(signer);
return new JcaX509CertificateConverter().getCertificate(certificateHolder);
}
/**
* Based on the private key algorithm {@link PrivateKey#getAlgorithm()}
* determines default signing algorithm used by CertGenUtils
*
* @param key {@link PrivateKey}
* @return algorithm
*/
private static String getDefaultSignatureAlgorithm(PrivateKey key) {
String signatureAlgorithm = null;
switch (key.getAlgorithm()) {
case "RSA":
signatureAlgorithm = "SHA256withRSA";
break;
case "DSA":
signatureAlgorithm = "SHA256withDSA";
break;
case "EC":
signatureAlgorithm = "SHA256withECDSA";
break;
default:
throw new IllegalArgumentException("Unsupported algorithm : " + key.getAlgorithm()
+ " for signature, allowed values for private key algorithm are [RSA, DSA, EC]");
}
return signatureAlgorithm;
}
/**
* Generates a certificate signing request
*
* @param keyPair the key pair that will be associated by the certificate generated from the certificate signing request
* @param principal the principal of the certificate; commonly referred to as the distinguished name (DN)
* @param sanList the subject alternative names that should be added to the certificate as an X509v3 extension. May be
* {@code null}
* @return a certificate signing request
*/
static PKCS10CertificationRequest generateCSR(KeyPair keyPair, X500Principal principal, GeneralNames sanList)
throws IOException, OperatorCreationException {
Objects.requireNonNull(keyPair, "Key-Pair must not be null");
Objects.requireNonNull(keyPair.getPublic(), "Public-Key must not be null");
Objects.requireNonNull(principal, "Principal must not be null");
JcaPKCS10CertificationRequestBuilder builder = new JcaPKCS10CertificationRequestBuilder(principal, keyPair.getPublic());
if (sanList != null) {
ExtensionsGenerator extGen = new ExtensionsGenerator();
extGen.addExtension(Extension.subjectAlternativeName, false, sanList);
builder.addAttribute(PKCSObjectIdentifiers.pkcs_9_at_extensionRequest, extGen.generate());
}
return builder.build(new JcaContentSignerBuilder("SHA256withRSA").setProvider(CertGenUtils.BC_PROV).build(keyPair.getPrivate()));
}
/**
* Gets a random serial for a certificate that is generated from a {@link SecureRandom}
*/
public static BigInteger getSerial() {
SecureRandom random = new SecureRandom();
BigInteger serial = new BigInteger(SERIAL_BIT_LENGTH, random);
assert serial.compareTo(BigInteger.valueOf(0L)) >= 0;
return serial;
}
/**
* Generates a RSA key pair with the provided key size (in bits)
*/
public static KeyPair generateKeyPair(int keysize) throws NoSuchAlgorithmException {
// generate a private key
KeyPairGenerator keyPairGenerator = KeyPairGenerator.getInstance("RSA");
keyPairGenerator.initialize(keysize);
return keyPairGenerator.generateKeyPair();
}
/**
* Converts the {@link InetAddress} objects into a {@link GeneralNames} object that is used to represent subject alternative names.
*/
public static GeneralNames getSubjectAlternativeNames(boolean resolveName, Set<InetAddress> addresses) throws SocketException {
Set<GeneralName> generalNameList = new HashSet<>();
for (InetAddress address : addresses) {
if (address.isAnyLocalAddress()) {
// it is a wildcard address
for (InetAddress inetAddress : InetAddressHelper.getAllAddresses()) {
addSubjectAlternativeNames(resolveName, inetAddress, generalNameList);
}
} else {
addSubjectAlternativeNames(resolveName, address, generalNameList);
}
}
return new GeneralNames(generalNameList.toArray(new GeneralName[generalNameList.size()]));
}
@SuppressForbidden(reason = "need to use getHostName to resolve DNS name and getHostAddress to ensure we resolved the name")
private static void addSubjectAlternativeNames(boolean resolveName, InetAddress inetAddress, Set<GeneralName> list) {
String hostaddress = inetAddress.getHostAddress();
String ip = NetworkAddress.format(inetAddress);
list.add(new GeneralName(GeneralName.iPAddress, ip));
if (resolveName && (inetAddress.isLinkLocalAddress() == false)) {
String possibleHostName = inetAddress.getHostName();
if (possibleHostName.equals(hostaddress) == false) {
list.add(new GeneralName(GeneralName.dNSName, possibleHostName));
}
}
}
/**
* Creates an X.509 {@link GeneralName} for use as a <em>Common Name</em> in the certificate's <em>Subject Alternative Names</em>
* extension. A <em>common name</em> is a name with a tag of {@link GeneralName#otherName OTHER}, with an object-id that references
* the {@link #CN_OID cn} attribute, an explicit tag of '0', and a DER encoded UTF8 string for the name.
* This usage of using the {@code cn} OID as a <em>Subject Alternative Name</em> is <strong>non-standard</strong> and will not be
* recognised by other X.509/TLS implementations.
*/
public static GeneralName createCommonName(String cn) {
final ASN1Encodable[] sequence = {new ASN1ObjectIdentifier(CN_OID), new DERTaggedObject(true, 0, new DERUTF8String(cn))};
return new GeneralName(GeneralName.otherName, new DERSequence(sequence));
}
}

Some files were not shown because too many files have changed in this diff Show More