mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-02-17 02:14:54 +00:00
Merge branch 'master' into ccr
* master: Add get stored script and delete stored script to high level REST API - post backport fix Add get stored script and delete stored script to high level REST API (#31355) Core: Combine Action and GenericAction (#31405) Fix reference to XContentBuilder.string() (#31337) Avoid sending duplicate remote failed shard requests (#31313) Fix defaults in GeoShapeFieldMapper output (#31302) RestAPI: Reject forcemerge requests with a body (#30792) Packaging: Remove windows bin files from the tar distribution (#30596) Docs: Use the default distribution to test docs (#31251) [DOCS] Adds testing for security APIs (#31345) Clarify that IP range data can be specified in CIDR notation. (#31374) Use system context for cluster state update tasks (#31241) Percentile/Ranks should return null instead of NaN when empty (#30460) REST high-level client: add validate query API (#31077) Move language analyzers from server to analysis-common module. (#31300) [Test] Fix :example-plugins:rest-handler on Windows Expose lucene's RemoveDuplicatesTokenFilter (#31275) Reload secure settings for plugins (#31383) Remove some cases in FieldTypeLookupTests that are no longer relevant. (#31381) Ensure we don't use a remote profile if cluster name matches (#31331) [TEST] Double write alias fault (#30942) [DOCS] Fix version in SQL JDBC Maven template [DOCS] Improve install and setup section for SQL JDBC SQL: Fix rest endpoint names in node stats (#31371) Support for remote path in reindex api - post backport fix Closes #22913 [ML] Put ML filter API response should contain the filter (#31362) Support for remote path in reindex api (#31290) Add byte array pooling to nio http transport (#31349) Remove trial status info from start trial doc (#31365) [DOCS] Adds links to release notes and highlights add is-write-index flag to aliases (#30942) Add rollover-creation-date setting to rolled over index (#31144) [ML] Hold ML filter items in sorted set (#31338) [Tests] Fix edge case in ScriptedMetricAggregatorTests (#31357)
This commit is contained in:
commit
ec04366d29
@ -32,6 +32,8 @@ public class DocsTestPlugin extends RestTestPlugin {
|
||||
public void apply(Project project) {
|
||||
project.pluginManager.apply('elasticsearch.standalone-rest-test')
|
||||
super.apply(project)
|
||||
// The distribution can be configured with -Dtests.distribution on the command line
|
||||
project.integTestCluster.distribution = System.getProperty('tests.distribution', 'zip')
|
||||
// Docs are published separately so no need to assemble
|
||||
project.tasks.remove(project.assemble)
|
||||
project.build.dependsOn.remove('assemble')
|
||||
@ -43,6 +45,8 @@ public class DocsTestPlugin extends RestTestPlugin {
|
||||
'\\{version\\}':
|
||||
VersionProperties.elasticsearch.toString().replace('-SNAPSHOT', ''),
|
||||
'\\{lucene_version\\}' : VersionProperties.lucene.replaceAll('-snapshot-\\w+$', ''),
|
||||
'\\{build_flavor\\}' :
|
||||
project.integTestCluster.distribution.startsWith('oss-') ? 'oss' : 'default',
|
||||
]
|
||||
Task listSnippets = project.tasks.create('listSnippets', SnippetsTask)
|
||||
listSnippets.group 'Docs'
|
||||
|
@ -19,10 +19,9 @@
|
||||
package org.elasticsearch.plugin.noop.action.bulk;
|
||||
|
||||
import org.elasticsearch.action.Action;
|
||||
import org.elasticsearch.action.bulk.BulkRequest;
|
||||
import org.elasticsearch.action.bulk.BulkResponse;
|
||||
|
||||
public class NoopBulkAction extends Action<BulkRequest, BulkResponse> {
|
||||
public class NoopBulkAction extends Action<BulkResponse> {
|
||||
public static final String NAME = "mock:data/write/bulk";
|
||||
|
||||
public static final NoopBulkAction INSTANCE = new NoopBulkAction();
|
||||
|
@ -19,10 +19,9 @@
|
||||
package org.elasticsearch.plugin.noop.action.search;
|
||||
|
||||
import org.elasticsearch.action.Action;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
|
||||
public class NoopSearchAction extends Action<SearchRequest, SearchResponse> {
|
||||
public class NoopSearchAction extends Action<SearchResponse> {
|
||||
public static final NoopSearchAction INSTANCE = new NoopSearchAction();
|
||||
public static final String NAME = "mock:data/read/search";
|
||||
|
||||
|
@ -57,6 +57,8 @@ import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesRequ
|
||||
import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesResponse;
|
||||
import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest;
|
||||
import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateResponse;
|
||||
import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequest;
|
||||
import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryResponse;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
|
||||
import java.io.IOException;
|
||||
@ -661,6 +663,36 @@ public final class IndicesClient {
|
||||
PutIndexTemplateResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate a potentially expensive query without executing it.
|
||||
* <p>
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/search-validate.html"> Validate Query API
|
||||
* on elastic.co</a>
|
||||
* @param validateQueryRequest the request
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return the response
|
||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public ValidateQueryResponse validateQuery(ValidateQueryRequest validateQueryRequest, RequestOptions options) throws IOException {
|
||||
return restHighLevelClient.performRequestAndParseEntity(validateQueryRequest, RequestConverters::validateQuery, options,
|
||||
ValidateQueryResponse::fromXContent, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Asynchronously validate a potentially expensive query without executing it.
|
||||
* <p>
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/search-validate.html"> Validate Query API
|
||||
* on elastic.co</a>
|
||||
* @param validateQueryRequest the request
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener the listener to be notified upon request completion
|
||||
*/
|
||||
public void validateQueryAsync(ValidateQueryRequest validateQueryRequest, RequestOptions options,
|
||||
ActionListener<ValidateQueryResponse> listener) {
|
||||
restHighLevelClient.performRequestAsyncAndParseEntity(validateQueryRequest, RequestConverters::validateQuery, options,
|
||||
ValidateQueryResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets index templates using the Index Templates API
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/indices-templates.html"> Index Templates API
|
||||
|
@ -37,6 +37,8 @@ import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesRe
|
||||
import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest;
|
||||
import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest;
|
||||
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest;
|
||||
import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest;
|
||||
import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest;
|
||||
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
|
||||
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest;
|
||||
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest;
|
||||
@ -58,6 +60,7 @@ import org.elasticsearch.action.admin.indices.shrink.ResizeRequest;
|
||||
import org.elasticsearch.action.admin.indices.shrink.ResizeType;
|
||||
import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesRequest;
|
||||
import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest;
|
||||
import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequest;
|
||||
import org.elasticsearch.action.bulk.BulkRequest;
|
||||
import org.elasticsearch.action.delete.DeleteRequest;
|
||||
import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest;
|
||||
@ -856,6 +859,20 @@ final class RequestConverters {
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request validateQuery(ValidateQueryRequest validateQueryRequest) throws IOException {
|
||||
String[] indices = validateQueryRequest.indices() == null ? Strings.EMPTY_ARRAY : validateQueryRequest.indices();
|
||||
String[] types = validateQueryRequest.types() == null || indices.length <= 0 ? Strings.EMPTY_ARRAY : validateQueryRequest.types();
|
||||
String endpoint = endpoint(indices, types, "_validate/query");
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
Params params = new Params(request);
|
||||
params.withIndicesOptions(validateQueryRequest.indicesOptions());
|
||||
params.putParam("explain", Boolean.toString(validateQueryRequest.explain()));
|
||||
params.putParam("all_shards", Boolean.toString(validateQueryRequest.allShards()));
|
||||
params.putParam("rewrite", Boolean.toString(validateQueryRequest.rewrite()));
|
||||
request.setEntity(createEntity(validateQueryRequest, REQUEST_BODY_CONTENT_TYPE));
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request getAlias(GetAliasesRequest getAliasesRequest) {
|
||||
String[] indices = getAliasesRequest.indices() == null ? Strings.EMPTY_ARRAY : getAliasesRequest.indices();
|
||||
String[] aliases = getAliasesRequest.aliases() == null ? Strings.EMPTY_ARRAY : getAliasesRequest.aliases();
|
||||
@ -877,6 +894,23 @@ final class RequestConverters {
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request getScript(GetStoredScriptRequest getStoredScriptRequest) {
|
||||
String endpoint = new EndpointBuilder().addPathPartAsIs("_scripts").addPathPart(getStoredScriptRequest.id()).build();
|
||||
Request request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
Params params = new Params(request);
|
||||
params.withMasterTimeout(getStoredScriptRequest.masterNodeTimeout());
|
||||
return request;
|
||||
}
|
||||
|
||||
static Request deleteScript(DeleteStoredScriptRequest deleteStoredScriptRequest) {
|
||||
String endpoint = new EndpointBuilder().addPathPartAsIs("_scripts").addPathPart(deleteStoredScriptRequest.id()).build();
|
||||
Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
|
||||
Params params = new Params(request);
|
||||
params.withTimeout(deleteStoredScriptRequest.timeout());
|
||||
params.withMasterTimeout(deleteStoredScriptRequest.masterNodeTimeout());
|
||||
return request;
|
||||
}
|
||||
|
||||
private static HttpEntity createEntity(ToXContent toXContent, XContentType xContentType) throws IOException {
|
||||
BytesRef source = XContentHelper.toXContent(toXContent, xContentType, false).toBytesRef();
|
||||
return new ByteArrayEntity(source.bytes, source.offset, source.length, createContentType(xContentType));
|
||||
|
@ -26,6 +26,10 @@ import org.elasticsearch.ElasticsearchStatusException;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest;
|
||||
import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptResponse;
|
||||
import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest;
|
||||
import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptResponse;
|
||||
import org.elasticsearch.action.bulk.BulkRequest;
|
||||
import org.elasticsearch.action.bulk.BulkResponse;
|
||||
import org.elasticsearch.action.delete.DeleteRequest;
|
||||
@ -652,6 +656,62 @@ public class RestHighLevelClient implements Closeable {
|
||||
FieldCapabilitiesResponse::fromXContent, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Get stored script by id.
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/modules-scripting-using.html">
|
||||
* How to use scripts on elastic.co</a>
|
||||
* @param request the request
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return the response
|
||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public GetStoredScriptResponse getScript(GetStoredScriptRequest request, RequestOptions options) throws IOException {
|
||||
return performRequestAndParseEntity(request, RequestConverters::getScript, options,
|
||||
GetStoredScriptResponse::fromXContent, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Asynchronously get stored script by id.
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/modules-scripting-using.html">
|
||||
* How to use scripts on elastic.co</a>
|
||||
* @param request the request
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener the listener to be notified upon request completion
|
||||
*/
|
||||
public void getScriptAsync(GetStoredScriptRequest request, RequestOptions options,
|
||||
ActionListener<GetStoredScriptResponse> listener) {
|
||||
performRequestAsyncAndParseEntity(request, RequestConverters::getScript, options,
|
||||
GetStoredScriptResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete stored script by id.
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/modules-scripting-using.html">
|
||||
* How to use scripts on elastic.co</a>
|
||||
* @param request the request
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @return the response
|
||||
* @throws IOException in case there is a problem sending the request or parsing back the response
|
||||
*/
|
||||
public DeleteStoredScriptResponse deleteScript(DeleteStoredScriptRequest request, RequestOptions options) throws IOException {
|
||||
return performRequestAndParseEntity(request, RequestConverters::deleteScript, options,
|
||||
DeleteStoredScriptResponse::fromXContent, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Asynchronously delete stored script by id.
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/modules-scripting-using.html">
|
||||
* How to use scripts on elastic.co</a>
|
||||
* @param request the request
|
||||
* @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized
|
||||
* @param listener the listener to be notified upon request completion
|
||||
*/
|
||||
public void deleteScriptAsync(DeleteStoredScriptRequest request, RequestOptions options,
|
||||
ActionListener<DeleteStoredScriptResponse> listener) {
|
||||
performRequestAsyncAndParseEntity(request, RequestConverters::deleteScript, options,
|
||||
DeleteStoredScriptResponse::fromXContent, listener, emptySet());
|
||||
}
|
||||
|
||||
/**
|
||||
* Asynchronously executes a request using the Field Capabilities API.
|
||||
* See <a href="https://www.elastic.co/guide/en/elasticsearch/reference/current/search-field-caps.html">Field Capabilities API
|
||||
|
@ -20,6 +20,7 @@
|
||||
package org.elasticsearch.client;
|
||||
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.apache.http.client.methods.HttpPost;
|
||||
import org.apache.http.client.methods.HttpPut;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.ElasticsearchStatusException;
|
||||
@ -63,6 +64,8 @@ import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesRequ
|
||||
import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesResponse;
|
||||
import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest;
|
||||
import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateResponse;
|
||||
import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequest;
|
||||
import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryResponse;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
import org.elasticsearch.action.support.WriteRequest;
|
||||
@ -80,6 +83,8 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.json.JsonXContent;
|
||||
import org.elasticsearch.common.xcontent.support.XContentMapValues;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
|
||||
import java.io.IOException;
|
||||
@ -1155,6 +1160,40 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase {
|
||||
assertThat(unknownSettingError.getDetailedMessage(), containsString("unknown setting [index.this-setting-does-not-exist]"));
|
||||
}
|
||||
|
||||
public void testValidateQuery() throws IOException{
|
||||
String index = "some_index";
|
||||
createIndex(index, Settings.EMPTY);
|
||||
QueryBuilder builder = QueryBuilders
|
||||
.boolQuery()
|
||||
.must(QueryBuilders.queryStringQuery("*:*"))
|
||||
.filter(QueryBuilders.termQuery("user", "kimchy"));
|
||||
ValidateQueryRequest request = new ValidateQueryRequest(index).query(builder);
|
||||
request.explain(randomBoolean());
|
||||
ValidateQueryResponse response = execute(request, highLevelClient().indices()::validateQuery,
|
||||
highLevelClient().indices()::validateQueryAsync);
|
||||
assertTrue(response.isValid());
|
||||
}
|
||||
|
||||
public void testInvalidValidateQuery() throws IOException{
|
||||
String index = "shakespeare";
|
||||
|
||||
createIndex(index, Settings.EMPTY);
|
||||
Request postDoc = new Request(HttpPost.METHOD_NAME, "/" + index + "/1");
|
||||
postDoc.setJsonEntity(
|
||||
"{\"type\":\"act\",\"line_id\":1,\"play_name\":\"Henry IV\", \"speech_number\":\"\"," +
|
||||
"\"line_number\":\"\",\"speaker\":\"\",\"text_entry\":\"ACT I\"}");
|
||||
assertOK(client().performRequest(postDoc));
|
||||
|
||||
QueryBuilder builder = QueryBuilders
|
||||
.queryStringQuery("line_id:foo")
|
||||
.lenient(false);
|
||||
ValidateQueryRequest request = new ValidateQueryRequest(index).query(builder);
|
||||
request.explain(true);
|
||||
ValidateQueryResponse response = execute(request, highLevelClient().indices()::validateQuery,
|
||||
highLevelClient().indices()::validateQueryAsync);
|
||||
assertFalse(response.isValid());
|
||||
}
|
||||
|
||||
public void testGetIndexTemplate() throws Exception {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
|
@ -37,6 +37,8 @@ import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesRe
|
||||
import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest;
|
||||
import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest;
|
||||
import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest;
|
||||
import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest;
|
||||
import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest;
|
||||
import org.elasticsearch.action.admin.indices.alias.Alias;
|
||||
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
|
||||
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions;
|
||||
@ -60,6 +62,7 @@ import org.elasticsearch.action.admin.indices.shrink.ResizeRequest;
|
||||
import org.elasticsearch.action.admin.indices.shrink.ResizeType;
|
||||
import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesRequest;
|
||||
import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest;
|
||||
import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequest;
|
||||
import org.elasticsearch.action.bulk.BulkRequest;
|
||||
import org.elasticsearch.action.bulk.BulkShardRequest;
|
||||
import org.elasticsearch.action.delete.DeleteRequest;
|
||||
@ -1895,6 +1898,40 @@ public class RequestConvertersTests extends ESTestCase {
|
||||
assertToXContentBody(putTemplateRequest, request.getEntity());
|
||||
}
|
||||
|
||||
public void testValidateQuery() throws Exception {
|
||||
String[] indices = randomBoolean() ? null : randomIndicesNames(0, 5);
|
||||
String[] types = randomBoolean() ? generateRandomStringArray(5, 5, false, false) : null;
|
||||
ValidateQueryRequest validateQueryRequest;
|
||||
if (randomBoolean()) {
|
||||
validateQueryRequest = new ValidateQueryRequest(indices);
|
||||
} else {
|
||||
validateQueryRequest = new ValidateQueryRequest();
|
||||
validateQueryRequest.indices(indices);
|
||||
}
|
||||
validateQueryRequest.types(types);
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
setRandomIndicesOptions(validateQueryRequest::indicesOptions, validateQueryRequest::indicesOptions, expectedParams);
|
||||
validateQueryRequest.explain(randomBoolean());
|
||||
validateQueryRequest.rewrite(randomBoolean());
|
||||
validateQueryRequest.allShards(randomBoolean());
|
||||
expectedParams.put("explain", Boolean.toString(validateQueryRequest.explain()));
|
||||
expectedParams.put("rewrite", Boolean.toString(validateQueryRequest.rewrite()));
|
||||
expectedParams.put("all_shards", Boolean.toString(validateQueryRequest.allShards()));
|
||||
Request request = RequestConverters.validateQuery(validateQueryRequest);
|
||||
StringJoiner endpoint = new StringJoiner("/", "/", "");
|
||||
if (indices != null && indices.length > 0) {
|
||||
endpoint.add(String.join(",", indices));
|
||||
if (types != null && types.length > 0) {
|
||||
endpoint.add(String.join(",", types));
|
||||
}
|
||||
}
|
||||
endpoint.add("_validate/query");
|
||||
assertThat(request.getEndpoint(), equalTo(endpoint.toString()));
|
||||
assertThat(request.getParameters(), equalTo(expectedParams));
|
||||
assertToXContentBody(validateQueryRequest, request.getEntity());
|
||||
assertThat(request.getMethod(), equalTo(HttpGet.METHOD_NAME));
|
||||
}
|
||||
|
||||
public void testGetTemplateRequest() throws Exception {
|
||||
Map<String, String> encodes = new HashMap<>();
|
||||
encodes.put("log", "log");
|
||||
@ -1913,6 +1950,32 @@ public class RequestConvertersTests extends ESTestCase {
|
||||
assertThat(request.getEntity(), nullValue());
|
||||
}
|
||||
|
||||
public void testGetScriptRequest() {
|
||||
GetStoredScriptRequest getStoredScriptRequest = new GetStoredScriptRequest("x-script");
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
setRandomMasterTimeout(getStoredScriptRequest, expectedParams);
|
||||
|
||||
Request request = RequestConverters.getScript(getStoredScriptRequest);
|
||||
assertThat(request.getEndpoint(), equalTo("/_scripts/" + getStoredScriptRequest.id()));
|
||||
assertThat(request.getMethod(), equalTo(HttpGet.METHOD_NAME));
|
||||
assertThat(request.getParameters(), equalTo(expectedParams));
|
||||
assertThat(request.getEntity(), nullValue());
|
||||
}
|
||||
|
||||
public void testDeleteScriptRequest() {
|
||||
DeleteStoredScriptRequest deleteStoredScriptRequest = new DeleteStoredScriptRequest("x-script");
|
||||
|
||||
Map<String, String> expectedParams = new HashMap<>();
|
||||
setRandomTimeout(deleteStoredScriptRequest::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams);
|
||||
setRandomMasterTimeout(deleteStoredScriptRequest, expectedParams);
|
||||
|
||||
Request request = RequestConverters.deleteScript(deleteStoredScriptRequest);
|
||||
assertThat(request.getEndpoint(), equalTo("/_scripts/" + deleteStoredScriptRequest.id()));
|
||||
assertThat(request.getMethod(), equalTo(HttpDelete.METHOD_NAME));
|
||||
assertThat(request.getParameters(), equalTo(expectedParams));
|
||||
assertThat(request.getEntity(), nullValue());
|
||||
}
|
||||
|
||||
private static void assertToXContentBody(ToXContent expectedBody, HttpEntity actualEntity) throws IOException {
|
||||
BytesReference expectedBytes = XContentHelper.toXContent(expectedBody, REQUEST_BODY_CONTENT_TYPE, false);
|
||||
assertEquals(XContentType.JSON.mediaTypeWithoutParameters(), actualEntity.getContentType().getValue());
|
||||
|
@ -0,0 +1,105 @@
|
||||
package org.elasticsearch.client;/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
|
||||
import org.apache.http.entity.ContentType;
|
||||
import org.apache.http.entity.StringEntity;
|
||||
import org.apache.http.util.EntityUtils;
|
||||
import org.elasticsearch.ElasticsearchStatusException;
|
||||
import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest;
|
||||
import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptResponse;
|
||||
import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest;
|
||||
import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptResponse;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.StoredScriptSource;
|
||||
|
||||
import java.util.Collections;
|
||||
|
||||
import static java.util.Collections.emptyMap;
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class StoredScriptsIT extends ESRestHighLevelClientTestCase {
|
||||
|
||||
final String id = "calculate-score";
|
||||
|
||||
public void testGetStoredScript() throws Exception {
|
||||
final StoredScriptSource scriptSource =
|
||||
new StoredScriptSource("painless",
|
||||
"Math.log(_score * 2) + params.my_modifier",
|
||||
Collections.singletonMap(Script.CONTENT_TYPE_OPTION, XContentType.JSON.mediaType()));
|
||||
|
||||
final String script = Strings.toString(scriptSource.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS));
|
||||
// TODO: change to HighLevel PutStoredScriptRequest when it will be ready
|
||||
// so far - using low-level REST API
|
||||
Response putResponse =
|
||||
adminClient()
|
||||
.performRequest("PUT", "/_scripts/calculate-score", emptyMap(),
|
||||
new StringEntity("{\"script\":" + script + "}",
|
||||
ContentType.APPLICATION_JSON));
|
||||
assertEquals(putResponse.getStatusLine().getReasonPhrase(), 200, putResponse.getStatusLine().getStatusCode());
|
||||
assertEquals("{\"acknowledged\":true}", EntityUtils.toString(putResponse.getEntity()));
|
||||
|
||||
GetStoredScriptRequest getRequest = new GetStoredScriptRequest("calculate-score");
|
||||
getRequest.masterNodeTimeout("50s");
|
||||
|
||||
GetStoredScriptResponse getResponse = execute(getRequest, highLevelClient()::getScript,
|
||||
highLevelClient()::getScriptAsync);
|
||||
|
||||
assertThat(getResponse.getSource(), equalTo(scriptSource));
|
||||
}
|
||||
|
||||
public void testDeleteStoredScript() throws Exception {
|
||||
final StoredScriptSource scriptSource =
|
||||
new StoredScriptSource("painless",
|
||||
"Math.log(_score * 2) + params.my_modifier",
|
||||
Collections.singletonMap(Script.CONTENT_TYPE_OPTION, XContentType.JSON.mediaType()));
|
||||
|
||||
final String script = Strings.toString(scriptSource.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS));
|
||||
// TODO: change to HighLevel PutStoredScriptRequest when it will be ready
|
||||
// so far - using low-level REST API
|
||||
Response putResponse =
|
||||
adminClient()
|
||||
.performRequest("PUT", "/_scripts/" + id, emptyMap(),
|
||||
new StringEntity("{\"script\":" + script + "}",
|
||||
ContentType.APPLICATION_JSON));
|
||||
assertEquals(putResponse.getStatusLine().getReasonPhrase(), 200, putResponse.getStatusLine().getStatusCode());
|
||||
assertEquals("{\"acknowledged\":true}", EntityUtils.toString(putResponse.getEntity()));
|
||||
|
||||
DeleteStoredScriptRequest deleteRequest = new DeleteStoredScriptRequest(id);
|
||||
deleteRequest.masterNodeTimeout("50s");
|
||||
deleteRequest.timeout("50s");
|
||||
|
||||
DeleteStoredScriptResponse deleteResponse = execute(deleteRequest, highLevelClient()::deleteScript,
|
||||
highLevelClient()::deleteScriptAsync);
|
||||
|
||||
assertThat(deleteResponse.isAcknowledged(), equalTo(true));
|
||||
|
||||
GetStoredScriptRequest getRequest = new GetStoredScriptRequest(id);
|
||||
|
||||
final ElasticsearchStatusException statusException = expectThrows(ElasticsearchStatusException.class,
|
||||
() -> execute(getRequest, highLevelClient()::getScript,
|
||||
highLevelClient()::getScriptAsync));
|
||||
assertThat(statusException.status(), equalTo(RestStatus.NOT_FOUND));
|
||||
}
|
||||
}
|
@ -62,6 +62,9 @@ import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesRequ
|
||||
import org.elasticsearch.action.admin.indices.template.get.GetIndexTemplatesResponse;
|
||||
import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateRequest;
|
||||
import org.elasticsearch.action.admin.indices.template.put.PutIndexTemplateResponse;
|
||||
import org.elasticsearch.action.admin.indices.validate.query.QueryExplanation;
|
||||
import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequest;
|
||||
import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryResponse;
|
||||
import org.elasticsearch.action.support.ActiveShardCount;
|
||||
import org.elasticsearch.action.support.DefaultShardOperationFailedException;
|
||||
import org.elasticsearch.action.support.IndicesOptions;
|
||||
@ -81,6 +84,7 @@ import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.rest.RestStatus;
|
||||
|
||||
@ -2128,4 +2132,83 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase
|
||||
|
||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
}
|
||||
|
||||
public void testValidateQuery() throws IOException, InterruptedException {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
String index = "some_index";
|
||||
createIndex(index, Settings.EMPTY);
|
||||
|
||||
// tag::validate-query-request
|
||||
ValidateQueryRequest request = new ValidateQueryRequest(index); // <1>
|
||||
// end::validate-query-request
|
||||
|
||||
// tag::validate-query-request-query
|
||||
QueryBuilder builder = QueryBuilders
|
||||
.boolQuery() // <1>
|
||||
.must(QueryBuilders.queryStringQuery("*:*"))
|
||||
.filter(QueryBuilders.termQuery("user", "kimchy"));
|
||||
request.query(builder); // <2>
|
||||
// end::validate-query-request-query
|
||||
|
||||
// tag::validate-query-request-explain
|
||||
request.explain(true); // <1>
|
||||
// end::validate-query-request-explain
|
||||
|
||||
// tag::validate-query-request-allShards
|
||||
request.allShards(true); // <1>
|
||||
// end::validate-query-request-allShards
|
||||
|
||||
// tag::validate-query-request-rewrite
|
||||
request.rewrite(true); // <1>
|
||||
// end::validate-query-request-rewrite
|
||||
|
||||
// tag::validate-query-execute
|
||||
ValidateQueryResponse response = client.indices().validateQuery(request, RequestOptions.DEFAULT); // <1>
|
||||
// end::validate-query-execute
|
||||
|
||||
// tag::validate-query-response
|
||||
boolean isValid = response.isValid(); // <1>
|
||||
int totalShards = response.getTotalShards(); // <2>
|
||||
int successfulShards = response.getSuccessfulShards(); // <3>
|
||||
int failedShards = response.getFailedShards(); // <4>
|
||||
if (failedShards > 0) {
|
||||
for(DefaultShardOperationFailedException failure: response.getShardFailures()) { // <5>
|
||||
String failedIndex = failure.index(); // <6>
|
||||
int shardId = failure.shardId(); // <7>
|
||||
String reason = failure.reason(); // <8>
|
||||
}
|
||||
}
|
||||
for(QueryExplanation explanation: response.getQueryExplanation()) { // <9>
|
||||
String explanationIndex = explanation.getIndex(); // <10>
|
||||
int shardId = explanation.getShard(); // <11>
|
||||
String explanationString = explanation.getExplanation(); // <12>
|
||||
}
|
||||
// end::validate-query-response
|
||||
|
||||
// tag::validate-query-execute-listener
|
||||
ActionListener<ValidateQueryResponse> listener =
|
||||
new ActionListener<ValidateQueryResponse>() {
|
||||
@Override
|
||||
public void onResponse(ValidateQueryResponse validateQueryResponse) {
|
||||
// <1>
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
// <2>
|
||||
}
|
||||
};
|
||||
// end::validate-query-execute-listener
|
||||
|
||||
// Replace the empty listener by a blocking listener in test
|
||||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
listener = new LatchedActionListener<>(listener, latch);
|
||||
|
||||
// tag::validate-query-execute-async
|
||||
client.indices().validateQueryAsync(request, RequestOptions.DEFAULT, listener); // <1>
|
||||
// end::validate-query-execute-async
|
||||
|
||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,204 @@
|
||||
package org.elasticsearch.client.documentation;/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
import org.apache.http.entity.ContentType;
|
||||
import org.apache.http.entity.StringEntity;
|
||||
import org.apache.http.util.EntityUtils;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.LatchedActionListener;
|
||||
import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest;
|
||||
import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptResponse;
|
||||
import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest;
|
||||
import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptResponse;
|
||||
import org.elasticsearch.client.ESRestHighLevelClientTestCase;
|
||||
import org.elasticsearch.client.RequestOptions;
|
||||
import org.elasticsearch.client.Response;
|
||||
import org.elasticsearch.client.RestHighLevelClient;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.StoredScriptSource;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static java.util.Collections.emptyMap;
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
/**
|
||||
* This class is used to generate the Java Stored Scripts API documentation.
|
||||
* You need to wrap your code between two tags like:
|
||||
* // tag::example
|
||||
* // end::example
|
||||
*
|
||||
* Where example is your tag name.
|
||||
*
|
||||
* Then in the documentation, you can extract what is between tag and end tags with
|
||||
* ["source","java",subs="attributes,callouts,macros"]
|
||||
* --------------------------------------------------
|
||||
* include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[example]
|
||||
* --------------------------------------------------
|
||||
*
|
||||
* The column width of the code block is 84. If the code contains a line longer
|
||||
* than 84, the line will be cut and a horizontal scroll bar will be displayed.
|
||||
* (the code indentation of the tag is not included in the width)
|
||||
*/
|
||||
public class StoredScriptsDocumentationIT extends ESRestHighLevelClientTestCase {
|
||||
|
||||
public void testGetStoredScript() throws Exception {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
final StoredScriptSource scriptSource =
|
||||
new StoredScriptSource("painless",
|
||||
"Math.log(_score * 2) + params.my_modifier",
|
||||
Collections.singletonMap(Script.CONTENT_TYPE_OPTION, XContentType.JSON.mediaType()));
|
||||
|
||||
putStoredScript("calculate-score", scriptSource);
|
||||
|
||||
{
|
||||
// tag::get-stored-script-request
|
||||
GetStoredScriptRequest request = new GetStoredScriptRequest("calculate-score"); // <1>
|
||||
// end::get-stored-script-request
|
||||
|
||||
// tag::get-stored-script-request-masterTimeout
|
||||
request.masterNodeTimeout(TimeValue.timeValueSeconds(50)); // <1>
|
||||
request.masterNodeTimeout("50s"); // <2>
|
||||
// end::get-stored-script-request-masterTimeout
|
||||
|
||||
// tag::get-stored-script-execute
|
||||
GetStoredScriptResponse getResponse = client.getScript(request, RequestOptions.DEFAULT);
|
||||
// end::get-stored-script-execute
|
||||
|
||||
// tag::get-stored-script-response
|
||||
StoredScriptSource storedScriptSource = getResponse.getSource(); // <1>
|
||||
|
||||
String lang = storedScriptSource.getLang(); // <2>
|
||||
String source = storedScriptSource.getSource(); // <3>
|
||||
Map<String, String> options = storedScriptSource.getOptions(); // <4>
|
||||
// end::get-stored-script-response
|
||||
|
||||
assertThat(storedScriptSource, equalTo(scriptSource));
|
||||
|
||||
// tag::get-stored-script-execute-listener
|
||||
ActionListener<GetStoredScriptResponse> listener =
|
||||
new ActionListener<GetStoredScriptResponse>() {
|
||||
@Override
|
||||
public void onResponse(GetStoredScriptResponse response) {
|
||||
// <1>
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
// <2>
|
||||
}
|
||||
};
|
||||
// end::get-stored-script-execute-listener
|
||||
|
||||
// Replace the empty listener by a blocking listener in test
|
||||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
listener = new LatchedActionListener<>(listener, latch);
|
||||
|
||||
// tag::get-stored-script-execute-async
|
||||
client.getScriptAsync(request, RequestOptions.DEFAULT, listener); // <1>
|
||||
// end::get-stored-script-execute-async
|
||||
|
||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public void testDeleteStoredScript() throws Exception {
|
||||
RestHighLevelClient client = highLevelClient();
|
||||
|
||||
final StoredScriptSource scriptSource =
|
||||
new StoredScriptSource("painless",
|
||||
"Math.log(_score * 2) + params.my_modifier",
|
||||
Collections.singletonMap(Script.CONTENT_TYPE_OPTION, XContentType.JSON.mediaType()));
|
||||
|
||||
putStoredScript("calculate-score", scriptSource);
|
||||
|
||||
// tag::delete-stored-script-request
|
||||
DeleteStoredScriptRequest deleteRequest = new DeleteStoredScriptRequest("calculate-score"); // <1>
|
||||
// end::delete-stored-script-request
|
||||
|
||||
// tag::delete-stored-script-request-masterTimeout
|
||||
deleteRequest.masterNodeTimeout(TimeValue.timeValueSeconds(50)); // <1>
|
||||
deleteRequest.masterNodeTimeout("50s"); // <2>
|
||||
// end::delete-stored-script-request-masterTimeout
|
||||
|
||||
// tag::delete-stored-script-request-timeout
|
||||
deleteRequest.timeout(TimeValue.timeValueSeconds(60)); // <1>
|
||||
deleteRequest.timeout("60s"); // <2>
|
||||
// end::delete-stored-script-request-timeout
|
||||
|
||||
// tag::delete-stored-script-execute
|
||||
DeleteStoredScriptResponse deleteResponse = client.deleteScript(deleteRequest, RequestOptions.DEFAULT);
|
||||
// end::delete-stored-script-execute
|
||||
|
||||
// tag::delete-stored-script-response
|
||||
boolean acknowledged = deleteResponse.isAcknowledged();// <1>
|
||||
// end::delete-stored-script-response
|
||||
|
||||
putStoredScript("calculate-score", scriptSource);
|
||||
|
||||
// tag::delete-stored-script-execute-listener
|
||||
ActionListener<DeleteStoredScriptResponse> listener =
|
||||
new ActionListener<DeleteStoredScriptResponse>() {
|
||||
@Override
|
||||
public void onResponse(DeleteStoredScriptResponse response) {
|
||||
// <1>
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onFailure(Exception e) {
|
||||
// <2>
|
||||
}
|
||||
};
|
||||
// end::delete-stored-script-execute-listener
|
||||
|
||||
// Replace the empty listener by a blocking listener in test
|
||||
final CountDownLatch latch = new CountDownLatch(1);
|
||||
listener = new LatchedActionListener<>(listener, latch);
|
||||
|
||||
// tag::delete-stored-script-execute-async
|
||||
client.deleteScriptAsync(deleteRequest, RequestOptions.DEFAULT, listener); // <1>
|
||||
// end::delete-stored-script-execute-async
|
||||
|
||||
assertTrue(latch.await(30L, TimeUnit.SECONDS));
|
||||
}
|
||||
|
||||
private void putStoredScript(String id, StoredScriptSource scriptSource) throws IOException {
|
||||
final String script = Strings.toString(scriptSource.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS));
|
||||
// TODO: change to HighLevel PutStoredScriptRequest when it will be ready
|
||||
// so far - using low-level REST API
|
||||
Response putResponse =
|
||||
adminClient()
|
||||
.performRequest("PUT", "/_scripts/" + id, emptyMap(),
|
||||
new StringEntity("{\"script\":" + script + "}",
|
||||
ContentType.APPLICATION_JSON));
|
||||
assertEquals(putResponse.getStatusLine().getReasonPhrase(), 200, putResponse.getStatusLine().getStatusCode());
|
||||
assertEquals("{\"acknowledged\":true}", EntityUtils.toString(putResponse.getEntity()));
|
||||
}
|
||||
}
|
@ -18,7 +18,6 @@
|
||||
*/
|
||||
|
||||
import org.apache.tools.ant.taskdefs.condition.Os
|
||||
import org.apache.tools.ant.filters.FixCrLfFilter
|
||||
import org.elasticsearch.gradle.BuildPlugin
|
||||
import org.elasticsearch.gradle.EmptyDirTask
|
||||
import org.elasticsearch.gradle.LoggedExec
|
||||
@ -59,13 +58,6 @@ CopySpec archiveFiles(CopySpec modulesFiles, String distributionType, boolean os
|
||||
}
|
||||
into('bin') {
|
||||
with binFiles(distributionType, oss)
|
||||
with copySpec {
|
||||
from('../src/bin') {
|
||||
include '*.bat'
|
||||
filter(FixCrLfFilter, eol: FixCrLfFilter.CrLf.newInstance('crlf'))
|
||||
}
|
||||
MavenFilteringHack.filter(it, expansionsForDistribution(distributionType, oss))
|
||||
}
|
||||
}
|
||||
into('') {
|
||||
from {
|
||||
@ -88,9 +80,6 @@ CopySpec archiveFiles(CopySpec modulesFiles, String distributionType, boolean os
|
||||
}
|
||||
|
||||
with noticeFile
|
||||
from('../src') {
|
||||
include 'bin/*.exe'
|
||||
}
|
||||
into('modules') {
|
||||
with modulesFiles
|
||||
}
|
||||
|
@ -21,6 +21,7 @@ import org.elasticsearch.gradle.ConcatFilesTask
|
||||
import org.elasticsearch.gradle.MavenFilteringHack
|
||||
import org.elasticsearch.gradle.NoticeTask
|
||||
import org.elasticsearch.gradle.test.RunTask
|
||||
import org.apache.tools.ant.filters.FixCrLfFilter
|
||||
|
||||
import java.nio.file.Path
|
||||
|
||||
@ -281,15 +282,28 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) {
|
||||
|
||||
binFiles = { distributionType, oss ->
|
||||
copySpec {
|
||||
// non-windows files, for all distributions
|
||||
with copySpec {
|
||||
// main bin files, processed with distribution specific substitutions
|
||||
// everything except windows files
|
||||
from '../src/bin'
|
||||
exclude '*.exe'
|
||||
exclude '*.bat'
|
||||
eachFile { it.setMode(0755) }
|
||||
MavenFilteringHack.filter(it, expansionsForDistribution(distributionType, oss))
|
||||
}
|
||||
// windows files, only for zip
|
||||
if (distributionType == 'zip') {
|
||||
with copySpec {
|
||||
from '../src/bin'
|
||||
include '*.bat'
|
||||
filter(FixCrLfFilter, eol: FixCrLfFilter.CrLf.newInstance('crlf'))
|
||||
MavenFilteringHack.filter(it, expansionsForDistribution(distributionType, oss))
|
||||
}
|
||||
with copySpec {
|
||||
from '../src/bin'
|
||||
include '*.exe'
|
||||
}
|
||||
}
|
||||
// module provided bin files
|
||||
with copySpec {
|
||||
eachFile { it.setMode(0755) }
|
||||
if (oss) {
|
||||
@ -297,6 +311,9 @@ configure(subprojects.findAll { ['archives', 'packages'].contains(it.name) }) {
|
||||
} else {
|
||||
from project(':distribution').buildDefaultBin
|
||||
}
|
||||
if (distributionType != 'zip') {
|
||||
exclude '*.bat'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -5,6 +5,7 @@
|
||||
:branch: master
|
||||
:jdk: 1.8.0_131
|
||||
:jdk_major: 8
|
||||
:build_flavor: default
|
||||
|
||||
//////////
|
||||
release-state can be: released | prerelease | unreleased
|
||||
|
@ -20,7 +20,6 @@
|
||||
apply plugin: 'elasticsearch.docs-test'
|
||||
|
||||
integTestCluster {
|
||||
distribution = 'oss-zip'
|
||||
/* Enable regexes in painless so our tests don't complain about example
|
||||
* snippets that use them. */
|
||||
setting 'script.painless.regex.enabled', 'true'
|
||||
|
@ -99,11 +99,13 @@ Note that you can also add arrays with `startArray(String)` and
|
||||
other XContentBuilder objects.
|
||||
|
||||
If you need to see the generated JSON content, you can use the
|
||||
`string()` method.
|
||||
`Strings.toString()` method.
|
||||
|
||||
[source,java]
|
||||
--------------------------------------------------
|
||||
String json = builder.string();
|
||||
import org.elasticsearch.common.Strings;
|
||||
|
||||
String json = Strings.toString(builder);
|
||||
--------------------------------------------------
|
||||
|
||||
|
||||
|
113
docs/java-rest/high-level/indices/validate_query.asciidoc
Normal file
113
docs/java-rest/high-level/indices/validate_query.asciidoc
Normal file
@ -0,0 +1,113 @@
|
||||
[[java-rest-high-indices-validate-query]]
|
||||
=== Validate Query API
|
||||
|
||||
[[java-rest-high-indices-validate-query-request]]
|
||||
==== Validate Query Request
|
||||
|
||||
A `ValidateQueryRequest` requires one or more `indices` on which the query is validated. If no index
|
||||
is provided the request is executed on all indices.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[validate-query-request]
|
||||
--------------------------------------------------
|
||||
<1> The index on which to run the request.
|
||||
|
||||
In addition it also needs the query that needs to be validated. The query can be built using the `QueryBuilders` utility class.
|
||||
The following code snippet builds a sample boolean query.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[validate-query-request-query]
|
||||
--------------------------------------------------
|
||||
<1> Build the desired query.
|
||||
<2> Set it to the request.
|
||||
|
||||
==== Optional arguments
|
||||
The following arguments can optionally be provided:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[validate-query-request-explain]
|
||||
--------------------------------------------------
|
||||
<1> The explain parameter can be set to true to get more detailed information about why a query failed
|
||||
|
||||
By default, the request is executed on a single shard only, which is randomly selected. The detailed explanation of
|
||||
the query may depend on which shard is being hit, and therefore may vary from one request to another. So, in case of
|
||||
query rewrite the `allShards` parameter should be used to get response from all available shards.
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[validate-query-request-allShards]
|
||||
--------------------------------------------------
|
||||
<1> Set the allShards parameter.
|
||||
|
||||
When the query is valid, the explanation defaults to the string representation of that query. With rewrite set to true,
|
||||
the explanation is more detailed showing the actual Lucene query that will be executed
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[validate-query-request-rewrite]
|
||||
--------------------------------------------------
|
||||
<1> Set the rewrite parameter.
|
||||
|
||||
[[java-rest-high-indices-validate-query-sync]]
|
||||
==== Synchronous Execution
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[validate-query-execute]
|
||||
--------------------------------------------------
|
||||
<1> Execute the request and get back the response in a ValidateQueryResponse object.
|
||||
|
||||
[[java-rest-high-indices-validate-query-async]]
|
||||
==== Asynchronous Execution
|
||||
|
||||
The asynchronous execution of a validate query request requires both the `ValidateQueryRequest`
|
||||
instance and an `ActionListener` instance to be passed to the asynchronous
|
||||
method:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[validate-query-execute-async]
|
||||
--------------------------------------------------
|
||||
<1> The `ValidateQueryRequest` to execute and the `ActionListener` to use when
|
||||
the execution completes
|
||||
|
||||
The asynchronous method does not block and returns immediately. Once it is
|
||||
completed the `ActionListener` is called back using the `onResponse` method
|
||||
if the execution successfully completed or using the `onFailure` method if
|
||||
it failed.
|
||||
|
||||
A typical listener for `ValidateQueryResponse` looks like:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[validate-query-execute-listener]
|
||||
--------------------------------------------------
|
||||
<1> Called when the execution is successfully completed. The response is
|
||||
provided as an argument
|
||||
<2> Called in case of failure. The raised exception is provided as an argument
|
||||
|
||||
[[java-rest-high-indices-validate-query-response]]
|
||||
==== Validate Query Response
|
||||
|
||||
The returned `ValidateQueryResponse` allows to retrieve information about the executed
|
||||
operation as follows:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[validate-query-response]
|
||||
--------------------------------------------------
|
||||
<1> Check if the query is valid or not.
|
||||
<2> Get total number of shards.
|
||||
<3> Get number of shards that were successful.
|
||||
<4> Get number of shards that failed.
|
||||
<5> Get the shard failures as `DefaultShardOperationFailedException`.
|
||||
<6> Get the index of a failed shard.
|
||||
<7> Get the shard id of a failed shard.
|
||||
<8> Get the reason for shard failure.
|
||||
<9> Get the detailed explanation for the shards (if explain was set to `true`).
|
||||
<10> Get the index to which a particular explanation belongs.
|
||||
<11> Get the shard id to which a particular explanation belongs.
|
||||
<12> Get the actual explanation string.
|
81
docs/java-rest/high-level/script/delete_script.asciidoc
Normal file
81
docs/java-rest/high-level/script/delete_script.asciidoc
Normal file
@ -0,0 +1,81 @@
|
||||
[[java-rest-high-delete-stored-script]]
|
||||
|
||||
=== Delete Stored Script API
|
||||
|
||||
[[java-rest-high-delete-stored-script-request]]
|
||||
==== Delete Stored Script Request
|
||||
|
||||
A `DeleteStoredScriptRequest` requires an `id`:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[delete-stored-script-request]
|
||||
--------------------------------------------------
|
||||
<1> The id of the script
|
||||
|
||||
==== Optional arguments
|
||||
The following arguments can optionally be provided:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[delete-stored-script-request-timeout]
|
||||
--------------------------------------------------
|
||||
<1> Timeout to wait for the all the nodes to acknowledge the stored script is deleted as a `TimeValue`
|
||||
<2> Timeout to wait for the all the nodes to acknowledge the stored script is deleted as a `String`
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[delete-stored-script-request-masterTimeout]
|
||||
--------------------------------------------------
|
||||
<1> Timeout to connect to the master node as a `TimeValue`
|
||||
<2> Timeout to connect to the master node as a `String`
|
||||
|
||||
[[java-rest-high-delete-stored-script-sync]]
|
||||
==== Synchronous Execution
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[delete-stored-script-execute]
|
||||
--------------------------------------------------
|
||||
|
||||
[[java-rest-high-delete-stored-script-async]]
|
||||
==== Asynchronous Execution
|
||||
|
||||
The asynchronous execution of a delete stored script request requires both the `DeleteStoredScriptRequest`
|
||||
instance and an `ActionListener` instance to be passed to the asynchronous method:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[delete-stored-script-execute-async]
|
||||
--------------------------------------------------
|
||||
<1> The `DeleteStoredScriptRequest` to execute and the `ActionListener` to use when
|
||||
the execution completes
|
||||
|
||||
[[java-rest-high-delete-stored-script-listener]]
|
||||
===== Action Listener
|
||||
|
||||
The asynchronous method does not block and returns immediately. Once it is
|
||||
completed the `ActionListener` is called back using the `onResponse` method
|
||||
if the execution successfully completed or using the `onFailure` method if
|
||||
it failed.
|
||||
|
||||
A typical listener for `DeleteStoredScriptResponse` looks like:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[delete-stored-script-execute-listener]
|
||||
--------------------------------------------------
|
||||
<1> Called when the execution is successfully completed. The response is
|
||||
provided as an argument
|
||||
<2> Called in case of failure. The raised exception is provided as an argument
|
||||
|
||||
[[java-rest-high-delete-stored-script-response]]
|
||||
==== Delete Stored Script Response
|
||||
|
||||
The returned `DeleteStoredScriptResponse` allows to retrieve information about the
|
||||
executed operation as follows:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[delete-stored-script-response]
|
||||
--------------------------------------------------
|
||||
<1> Indicates whether all of the nodes have acknowledged the request
|
77
docs/java-rest/high-level/script/get_script.asciidoc
Normal file
77
docs/java-rest/high-level/script/get_script.asciidoc
Normal file
@ -0,0 +1,77 @@
|
||||
[[java-rest-high-get-stored-script]]
|
||||
|
||||
=== Get Stored Script API
|
||||
|
||||
[[java-rest-high-get-stored-script-request]]
|
||||
==== Get Stored Script Request
|
||||
|
||||
A `GetStoredScriptRequest` requires an `id`:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[get-stored-script-request]
|
||||
--------------------------------------------------
|
||||
<1> The id of the script
|
||||
|
||||
==== Optional arguments
|
||||
The following arguments can optionally be provided:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[get-stored-script-request-masterTimeout]
|
||||
--------------------------------------------------
|
||||
<1> Timeout to connect to the master node as a `TimeValue`
|
||||
<2> Timeout to connect to the master node as a `String`
|
||||
|
||||
[[java-rest-high-get-stored-script-sync]]
|
||||
==== Synchronous Execution
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[get-stored-script-execute]
|
||||
--------------------------------------------------
|
||||
|
||||
[[java-rest-high-get-stored-script-async]]
|
||||
==== Asynchronous Execution
|
||||
|
||||
The asynchronous execution of a get stored script request requires both the `GetStoredScriptRequest`
|
||||
instance and an `ActionListener` instance to be passed to the asynchronous method:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[get-stored-script-execute-async]
|
||||
--------------------------------------------------
|
||||
<1> The `GetStoredScriptRequest` to execute and the `ActionListener` to use when
|
||||
the execution completes
|
||||
|
||||
[[java-rest-high-get-stored-script-listener]]
|
||||
===== Action Listener
|
||||
|
||||
The asynchronous method does not block and returns immediately. Once it is
|
||||
completed the `ActionListener` is called back using the `onResponse` method
|
||||
if the execution successfully completed or using the `onFailure` method if
|
||||
it failed.
|
||||
|
||||
A typical listener for `GetStoredScriptResponse` looks like:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[get-stored-script-execute-listener]
|
||||
--------------------------------------------------
|
||||
<1> Called when the execution is successfully completed. The response is
|
||||
provided as an argument
|
||||
<2> Called in case of failure. The raised exception is provided as an argument
|
||||
|
||||
[[java-rest-high-get-stored-script-response]]
|
||||
==== Get Stored Script Response
|
||||
|
||||
The returned `GetStoredScriptResponse` allows to retrieve information about the
|
||||
executed operation as follows:
|
||||
|
||||
["source","java",subs="attributes,callouts,macros"]
|
||||
--------------------------------------------------
|
||||
include-tagged::{doc-tests}/StoredScriptsDocumentationIT.java[get-stored-script-response]
|
||||
--------------------------------------------------
|
||||
<1> The script object consists of a content and a metadata
|
||||
<2> The language the script is written in, which defaults to `painless`.
|
||||
<3> The content of the script
|
||||
<4> Any named options that should be passed into the script.
|
@ -73,6 +73,7 @@ Index Management::
|
||||
* <<java-rest-high-rollover-index>>
|
||||
* <<java-rest-high-indices-put-settings>>
|
||||
* <<java-rest-high-get-settings>>
|
||||
* <<java-rest-high-indices-validate-query>>
|
||||
|
||||
Mapping Management::
|
||||
* <<java-rest-high-put-mapping>>
|
||||
@ -103,6 +104,7 @@ include::indices/get_alias.asciidoc[]
|
||||
include::indices/put_settings.asciidoc[]
|
||||
include::indices/get_settings.asciidoc[]
|
||||
include::indices/put_template.asciidoc[]
|
||||
include::indices/validate_query.asciidoc[]
|
||||
include::indices/get_templates.asciidoc[]
|
||||
|
||||
== Cluster APIs
|
||||
@ -149,3 +151,14 @@ The Java High Level REST Client supports the following Tasks APIs:
|
||||
|
||||
include::tasks/list_tasks.asciidoc[]
|
||||
include::tasks/cancel_tasks.asciidoc[]
|
||||
|
||||
== Script APIs
|
||||
|
||||
The Java High Level REST Client supports the following Scripts APIs:
|
||||
|
||||
* <<java-rest-high-get-stored-script>>
|
||||
* <<java-rest-high-delete-stored-script>>
|
||||
|
||||
include::script/get_script.asciidoc[]
|
||||
include::script/delete_script.asciidoc[]
|
||||
|
||||
|
@ -372,7 +372,7 @@ This command should give you a JSON result:
|
||||
"cluster_uuid" : "AT69_T_DTp-1qgIJlatQqA",
|
||||
"version" : {
|
||||
"number" : "{version}",
|
||||
"build_flavor" : "oss",
|
||||
"build_flavor" : "{build_flavor}",
|
||||
"build_type" : "zip",
|
||||
"build_hash" : "f27399d",
|
||||
"build_date" : "2016-03-30T09:51:41.449Z",
|
||||
|
@ -95,4 +95,6 @@ include::tokenfilters/decimal-digit-tokenfilter.asciidoc[]
|
||||
|
||||
include::tokenfilters/fingerprint-tokenfilter.asciidoc[]
|
||||
|
||||
include::tokenfilters/minhash-tokenfilter.asciidoc[]
|
||||
include::tokenfilters/minhash-tokenfilter.asciidoc[]
|
||||
|
||||
include::tokenfilters/remove-duplicates-tokenfilter.asciidoc[]
|
@ -0,0 +1,5 @@
|
||||
[[analysis-remove-duplicates-tokenfilter]]
|
||||
=== Remove Duplicates Token Filter
|
||||
|
||||
A token filter of type `remove_duplicates` that drops identical tokens at the
|
||||
same position.
|
@ -9,15 +9,23 @@ For example:
|
||||
GET /_cat/nodeattrs?v
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[s/\?v/\?v&s=node,attr/]
|
||||
// Sort the resulting attributes so we can assert on them more easilly
|
||||
|
||||
Could look like:
|
||||
|
||||
[source,txt]
|
||||
--------------------------------------------------
|
||||
node host ip attr value
|
||||
EK_AsJb 127.0.0.1 127.0.0.1 testattr test
|
||||
...
|
||||
node-0 127.0.0.1 127.0.0.1 testattr test
|
||||
...
|
||||
--------------------------------------------------
|
||||
// TESTRESPONSE[s/EK_AsJb/.+/ _cat]
|
||||
// TESTRESPONSE[s/\.\.\.\n$/\n(.+ xpack\\.installed true\n)?\n/]
|
||||
// TESTRESPONSE[s/\.\.\.\n/(.+ ml\\..+\n)*/ _cat]
|
||||
// If xpack is not installed then neither ... with match anything
|
||||
// If xpack is installed then the first ... contains ml attributes
|
||||
// and the second contains xpack.installed=true
|
||||
|
||||
The first few columns (`node`, `host`, `ip`) give you basic info per node
|
||||
and the `attr` and `value` columns give you the custom node attributes,
|
||||
@ -46,15 +54,24 @@ mode (`v`). The header name will match the supplied value (e.g.,
|
||||
GET /_cat/nodeattrs?v&h=name,pid,attr,value
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[s/,value/,value&s=node,attr/]
|
||||
// Sort the resulting attributes so we can assert on them more easilly
|
||||
|
||||
Might look like:
|
||||
|
||||
[source,txt]
|
||||
--------------------------------------------------
|
||||
name pid attr value
|
||||
EK_AsJb 19566 testattr test
|
||||
...
|
||||
node-0 19566 testattr test
|
||||
...
|
||||
--------------------------------------------------
|
||||
// TESTRESPONSE[s/EK_AsJb/.+/ s/19566/\\d*/ _cat]
|
||||
// TESTRESPONSE[s/19566/\\d*/]
|
||||
// TESTRESPONSE[s/\.\.\.\n$/\n(.+ xpack\\.installed true\n)?\n/]
|
||||
// TESTRESPONSE[s/\.\.\.\n/(.+ ml\\..+\n)*/ _cat]
|
||||
// If xpack is not installed then neither ... with match anything
|
||||
// If xpack is installed then the first ... contains ml attributes
|
||||
// and the second contains xpack.installed=true
|
||||
|
||||
[cols="<,<,<,<,<",options="header",subs="normal"]
|
||||
|=======================================================================
|
||||
|
@ -8,9 +8,16 @@ The `templates` command provides information about existing templates.
|
||||
GET /_cat/templates?v&s=name
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[s/templates/templates\/template*/]
|
||||
// TEST[s/^/PUT _template\/template0\n{"index_patterns": "te*", "order": 0}\n/]
|
||||
// TEST[s/^/PUT _template\/template1\n{"index_patterns": "tea*", "order": 1}\n/]
|
||||
// TEST[s/^/PUT _template\/template2\n{"index_patterns": "teak*", "order": 2, "version": 7}\n/]
|
||||
// The substitions do two things:
|
||||
// 1. Filter the response to just templates matching the te* pattern
|
||||
// so that we only get the templates we expect regardless of which
|
||||
// templates exist. If xpack is installed there will be unexpected
|
||||
// templates.
|
||||
// 2. Create some templates to expect in the response.
|
||||
|
||||
which looks like
|
||||
|
||||
|
@ -18,18 +18,19 @@ node-0 analyze 0 0 0
|
||||
node-0 fetch_shard_started 0 0 0
|
||||
node-0 fetch_shard_store 0 0 0
|
||||
node-0 flush 0 0 0
|
||||
node-0 force_merge 0 0 0
|
||||
node-0 generic 0 0 0
|
||||
node-0 get 0 0 0
|
||||
node-0 listener 0 0 0
|
||||
node-0 management 1 0 0
|
||||
node-0 refresh 0 0 0
|
||||
node-0 search 0 0 0
|
||||
node-0 snapshot 0 0 0
|
||||
node-0 warmer 0 0 0
|
||||
...
|
||||
node-0 write 0 0 0
|
||||
--------------------------------------------------
|
||||
// TESTRESPONSE[s/\.\.\./(node-0 .+ 0 0 0\n)+/]
|
||||
// TESTRESPONSE[s/\d+/\\d+/ _cat]
|
||||
// The substitutions do two things:
|
||||
// 1. Expect any number of extra thread pools. This allows us to only list a
|
||||
// few thread pools. The list would be super long otherwise. In addition,
|
||||
// if xpack is installed then the list will contain more thread pools and
|
||||
// this way we don't have to assert about them.
|
||||
// 2. Expect any number of active, queued, or rejected items. We really don't
|
||||
// know how many there will be and we just want to assert that there are
|
||||
// numbers in the response, not *which* numbers are there.
|
||||
|
||||
The first column is the node name
|
||||
|
||||
@ -52,10 +53,16 @@ generic
|
||||
get
|
||||
listener
|
||||
management
|
||||
ml_autodetect (default distro only)
|
||||
ml_datafeed (default distro only)
|
||||
ml_utility (default distro only)
|
||||
refresh
|
||||
rollup_indexing (default distro only)`
|
||||
search
|
||||
security-token-key (default distro only)
|
||||
snapshot
|
||||
warmer
|
||||
watcher (default distro only)
|
||||
write
|
||||
--------------------------------------------------
|
||||
|
||||
|
@ -142,7 +142,7 @@ The result will look similar to:
|
||||
"host": "node-0.elastic.co",
|
||||
"ip": "192.168.17",
|
||||
"version": "{version}",
|
||||
"build_flavor": "oss",
|
||||
"build_flavor": "{build_flavor}",
|
||||
"build_type": "zip",
|
||||
"build_hash": "587409e",
|
||||
"roles": [
|
||||
@ -237,7 +237,7 @@ The result will look similar to:
|
||||
"host": "node-0.elastic.co",
|
||||
"ip": "192.168.17",
|
||||
"version": "{version}",
|
||||
"build_flavor": "oss",
|
||||
"build_flavor": "{build_flavor}",
|
||||
"build_type": "zip",
|
||||
"build_hash": "587409e",
|
||||
"roles": [],
|
||||
|
@ -192,23 +192,23 @@ Will return, for example:
|
||||
"description": "Ingest processor that extracts information from a user agent",
|
||||
"classname": "org.elasticsearch.ingest.useragent.IngestUserAgentPlugin",
|
||||
"has_native_controller": false
|
||||
}
|
||||
},
|
||||
...
|
||||
],
|
||||
"network_types" : {
|
||||
"transport_types" : {
|
||||
"netty4" : 1
|
||||
},
|
||||
"http_types" : {
|
||||
"netty4" : 1
|
||||
}
|
||||
}
|
||||
...
|
||||
}
|
||||
}
|
||||
--------------------------------------------------
|
||||
// TESTRESPONSE[s/"plugins": \[[^\]]*\]/"plugins": $body.$_path/]
|
||||
// TESTRESPONSE[s/\.\.\./"network_types": "replace_me"/]
|
||||
// TESTRESPONSE[s/: (\-)?[0-9]+/: $body.$_path/]
|
||||
// TESTRESPONSE[s/: "[^"]*"/: $body.$_path/]
|
||||
////
|
||||
The TESTRESPONSE above replace all the fields values by the expected ones in the test,
|
||||
because we don't really care about the field values but we want to check the fields names.
|
||||
////
|
||||
// These replacements do a few things:
|
||||
// 1. Ignore the contents of the `plugins` object because we don't know all of
|
||||
// the plugins that will be in it. And because we figure folks don't need to
|
||||
// see an exhaustive list anyway.
|
||||
// 2. The last ... contains more things that we don't think are important to
|
||||
// include in the output.
|
||||
// 3. All of the numbers and strings on the right hand side of *every* field in
|
||||
// the response are ignored. So we're really only asserting things about the
|
||||
// the shape of this response, not the values in it.
|
||||
|
@ -422,11 +422,11 @@ POST _reindex
|
||||
// TEST[s/"username": "user",//]
|
||||
// TEST[s/"password": "pass"//]
|
||||
|
||||
The `host` parameter must contain a scheme, host, and port (e.g.
|
||||
`https://otherhost:9200`). The `username` and `password` parameters are
|
||||
optional, and when they are present `_reindex` will connect to the remote
|
||||
Elasticsearch node using basic auth. Be sure to use `https` when using
|
||||
basic auth or the password will be sent in plain text.
|
||||
The `host` parameter must contain a scheme, host, port (e.g.
|
||||
`https://otherhost:9200`) and optional path (e.g. `https://otherhost:9200/proxy`).
|
||||
The `username` and `password` parameters are optional, and when they are present `_reindex`
|
||||
will connect to the remote Elasticsearch node using basic auth. Be sure to use `https` when
|
||||
using basic auth or the password will be sent in plain text.
|
||||
|
||||
Remote hosts have to be explicitly whitelisted in elasticsearch.yaml using the
|
||||
`reindex.remote.whitelist` property. It can be set to a comma delimited list
|
||||
|
@ -244,6 +244,94 @@ GET /alias2/_search?q=user:kimchy&routing=2,3
|
||||
// CONSOLE
|
||||
// TEST[continued]
|
||||
|
||||
[float]
|
||||
[[aliases-write-index]]
|
||||
==== Write Index
|
||||
|
||||
It is possible to associate the index pointed to by an alias as the write index.
|
||||
When specified, all index and update requests against an alias that point to multiple
|
||||
indices will attempt to resolve to the one index that is the write index.
|
||||
Only one index per alias can be assigned to be the write index at a time. If no write index is specified
|
||||
and there are multiple indices referenced by an alias, then writes will not be allowed.
|
||||
|
||||
It is possible to specify an index associated with an alias as a write index using both the aliases API
|
||||
and index creation API.
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
POST /_aliases
|
||||
{
|
||||
"actions" : [
|
||||
{
|
||||
"add" : {
|
||||
"index" : "test",
|
||||
"alias" : "alias1",
|
||||
"is_write_index" : true
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[s/^/PUT test\n/]
|
||||
|
||||
In this example, we associate the alias `alias1` to both `test` and `test2`, where
|
||||
`test` will be the index chosen for writing to.
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
PUT /alias1/_doc/1
|
||||
{
|
||||
"foo": "bar"
|
||||
}
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[continued]
|
||||
|
||||
The new document that was indexed to `/alias1/_doc/1` will be indexed as if it were
|
||||
`/test/_doc/1`.
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
GET /test/_doc/1
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[continued]
|
||||
|
||||
To swap which index is the write index for an alias, the Aliases API can be leveraged to
|
||||
do an atomic swap. The swap is not dependent on the ordering of the actions.
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
POST /_aliases
|
||||
{
|
||||
"actions" : [
|
||||
{
|
||||
"add" : {
|
||||
"index" : "test",
|
||||
"alias" : "alias1",
|
||||
"is_write_index" : true
|
||||
}
|
||||
}, {
|
||||
"add" : {
|
||||
"index" : "test2",
|
||||
"alias" : "alias1",
|
||||
"is_write_index" : false
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[s/^/PUT test\nPUT test2\n/]
|
||||
|
||||
[IMPORTANT]
|
||||
=====================================
|
||||
Aliases that do not explicitly set `is_write_index: true` for an index, and
|
||||
only reference one index, will have that referenced index behave as if it is the write index
|
||||
until an additional index is referenced. At that point, there will be no write index and
|
||||
writes will be rejected.
|
||||
=====================================
|
||||
|
||||
[float]
|
||||
[[alias-adding]]
|
||||
|
@ -168,6 +168,30 @@ This query produces a similar result:
|
||||
--------------------------------------------------
|
||||
// TESTRESPONSE[s/"took": 13/"took" : $body.took/]
|
||||
|
||||
[[ip-range]]
|
||||
==== IP Range
|
||||
|
||||
In addition to the range format above, IP ranges can be provided in
|
||||
https://en.wikipedia.org/wiki/Classless_Inter-Domain_Routing#CIDR_notation[CIDR] notation:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
PUT range_index/_mapping/_doc
|
||||
{
|
||||
"properties": {
|
||||
"ip_whitelist": {
|
||||
"type": "ip_range"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
PUT range_index/_doc/2
|
||||
{
|
||||
"ip_whitelist" : "192.168.0.0/16"
|
||||
}
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[setup:range_index]
|
||||
|
||||
[[range-params]]
|
||||
==== Parameters for range fields
|
||||
|
@ -17,6 +17,9 @@ As a general rule:
|
||||
* Migration between non-consecutive major versions -- e.g. `5.x` to `7.x` --
|
||||
is not supported.
|
||||
|
||||
See <<setup-upgrade>> for more info.
|
||||
For more information, see <<setup-upgrade>>.
|
||||
|
||||
See also <<release-highlights>> and <<es-release-notes>>.
|
||||
|
||||
--
|
||||
include::migrate_7_0.asciidoc[]
|
||||
|
@ -4,6 +4,8 @@
|
||||
This section discusses the changes that you need to be aware of when migrating
|
||||
your application to Elasticsearch 7.0.
|
||||
|
||||
See also <<release-highlights>> and <<es-release-notes>>.
|
||||
|
||||
[float]
|
||||
=== Indices created before 7.0
|
||||
|
||||
|
@ -8,3 +8,8 @@ The systemd service file `/usr/lib/systemd/system/elasticsearch.service`
|
||||
was previously marked as a configuration file in rpm and deb packages.
|
||||
Overrides to the systemd elasticsearch service should be made
|
||||
in `/etc/systemd/system/elasticsearch.service.d/override.conf`.
|
||||
|
||||
==== tar package no longer includes windows specific files
|
||||
|
||||
The tar package previously included files in the `bin` directory meant only
|
||||
for windows. These files have been removed. Use the `zip` package instead.
|
||||
|
@ -16,3 +16,9 @@ Cross-Cluster-Search::
|
||||
|
||||
Rest API::
|
||||
* The Clear Cache API only supports `POST` as HTTP method
|
||||
|
||||
Aggregations::
|
||||
* The Percentiles and PercentileRanks aggregations now return `null` in the REST response,
|
||||
instead of `NaN`. This makes it consistent with the rest of the aggregations. Note:
|
||||
this only applies to the REST response, the java objects continue to return `NaN` (also
|
||||
consistent with other aggregations)
|
@ -19,7 +19,7 @@ which should give you a response something like this:
|
||||
"cluster_uuid" : "AT69_T_DTp-1qgIJlatQqA",
|
||||
"version" : {
|
||||
"number" : "{version}",
|
||||
"build_flavor" : "oss",
|
||||
"build_flavor" : "{build_flavor}",
|
||||
"build_type" : "zip",
|
||||
"build_hash" : "f27399d",
|
||||
"build_date" : "2016-03-30T09:51:41.449Z",
|
||||
|
@ -19,8 +19,6 @@
|
||||
|
||||
package org.elasticsearch.common.util.concurrent;
|
||||
|
||||
import org.apache.lucene.store.AlreadyClosedException;
|
||||
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
/**
|
||||
@ -68,7 +66,7 @@ public abstract class AbstractRefCounted implements RefCounted {
|
||||
}
|
||||
|
||||
protected void alreadyClosed() {
|
||||
throw new AlreadyClosedException(name + " is already closed can't increment refCount current count [" + refCount.get() + "]");
|
||||
throw new IllegalStateException(name + " is already closed can't increment refCount current count [" + refCount.get() + "]");
|
||||
}
|
||||
|
||||
/**
|
@ -44,7 +44,7 @@ public interface RefCounted {
|
||||
*
|
||||
* @see #decRef
|
||||
* @see #tryIncRef()
|
||||
* @throws org.apache.lucene.store.AlreadyClosedException iff the reference counter can not be incremented.
|
||||
* @throws IllegalStateException iff the reference counter can not be incremented.
|
||||
*/
|
||||
void incRef();
|
||||
|
@ -18,7 +18,6 @@
|
||||
*/
|
||||
package org.elasticsearch.common.util.concurrent;
|
||||
|
||||
import org.apache.lucene.store.AlreadyClosedException;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.hamcrest.Matchers;
|
||||
|
||||
@ -70,14 +69,14 @@ public class RefCountedTests extends ESTestCase {
|
||||
try {
|
||||
counted.incRef();
|
||||
fail(" expected exception");
|
||||
} catch (AlreadyClosedException ex) {
|
||||
} catch (IllegalStateException ex) {
|
||||
assertThat(ex.getMessage(), equalTo("test is already closed can't increment refCount current count [0]"));
|
||||
}
|
||||
|
||||
try {
|
||||
counted.ensureOpen();
|
||||
fail(" expected exception");
|
||||
} catch (AlreadyClosedException ex) {
|
||||
} catch (IllegalStateException ex) {
|
||||
assertThat(ex.getMessage(), equalTo("closed"));
|
||||
}
|
||||
}
|
||||
@ -116,7 +115,7 @@ public class RefCountedTests extends ESTestCase {
|
||||
try {
|
||||
counted.ensureOpen();
|
||||
fail("expected to be closed");
|
||||
} catch (AlreadyClosedException ex) {
|
||||
} catch (IllegalStateException ex) {
|
||||
assertThat(ex.getMessage(), equalTo("closed"));
|
||||
}
|
||||
assertThat(counted.refCount(), is(0));
|
||||
@ -140,7 +139,7 @@ public class RefCountedTests extends ESTestCase {
|
||||
public void ensureOpen() {
|
||||
if (closed.get()) {
|
||||
assert this.refCount() == 0;
|
||||
throw new AlreadyClosedException("closed");
|
||||
throw new IllegalStateException("closed");
|
||||
}
|
||||
}
|
||||
}
|
@ -19,6 +19,7 @@
|
||||
|
||||
package org.elasticsearch.nio;
|
||||
|
||||
import org.elasticsearch.common.util.concurrent.AbstractRefCounted;
|
||||
import org.elasticsearch.nio.utils.ExceptionsHelper;
|
||||
|
||||
import java.nio.ByteBuffer;
|
||||
@ -41,6 +42,7 @@ public final class InboundChannelBuffer implements AutoCloseable {
|
||||
private static final int PAGE_MASK = PAGE_SIZE - 1;
|
||||
private static final int PAGE_SHIFT = Integer.numberOfTrailingZeros(PAGE_SIZE);
|
||||
private static final ByteBuffer[] EMPTY_BYTE_BUFFER_ARRAY = new ByteBuffer[0];
|
||||
private static final Page[] EMPTY_BYTE_PAGE_ARRAY = new Page[0];
|
||||
|
||||
|
||||
private final ArrayDeque<Page> pages;
|
||||
@ -152,6 +154,46 @@ public final class InboundChannelBuffer implements AutoCloseable {
|
||||
return buffers;
|
||||
}
|
||||
|
||||
/**
|
||||
* This method will return an array of {@link Page} representing the bytes from the beginning of
|
||||
* this buffer up through the index argument that was passed. The pages and buffers will be duplicates of
|
||||
* the internal components, so any modifications to the markers {@link ByteBuffer#position()},
|
||||
* {@link ByteBuffer#limit()}, etc will not modify the this class. Additionally, this will internally
|
||||
* retain the underlying pages, so the pages returned by this method must be closed.
|
||||
*
|
||||
* @param to the index to slice up to
|
||||
* @return the pages
|
||||
*/
|
||||
public Page[] sliceAndRetainPagesTo(long to) {
|
||||
if (to > capacity) {
|
||||
throw new IndexOutOfBoundsException("can't slice a channel buffer with capacity [" + capacity +
|
||||
"], with slice parameters to [" + to + "]");
|
||||
} else if (to == 0) {
|
||||
return EMPTY_BYTE_PAGE_ARRAY;
|
||||
}
|
||||
long indexWithOffset = to + offset;
|
||||
int pageCount = pageIndex(indexWithOffset);
|
||||
int finalLimit = indexInPage(indexWithOffset);
|
||||
if (finalLimit != 0) {
|
||||
pageCount += 1;
|
||||
}
|
||||
|
||||
Page[] pages = new Page[pageCount];
|
||||
Iterator<Page> pageIterator = this.pages.iterator();
|
||||
Page firstPage = pageIterator.next().duplicate();
|
||||
ByteBuffer firstBuffer = firstPage.byteBuffer;
|
||||
firstBuffer.position(firstBuffer.position() + offset);
|
||||
pages[0] = firstPage;
|
||||
for (int i = 1; i < pages.length; i++) {
|
||||
pages[i] = pageIterator.next().duplicate();
|
||||
}
|
||||
if (finalLimit != 0) {
|
||||
pages[pages.length - 1].byteBuffer.limit(finalLimit);
|
||||
}
|
||||
|
||||
return pages;
|
||||
}
|
||||
|
||||
/**
|
||||
* This method will return an array of {@link ByteBuffer} representing the bytes from the index passed
|
||||
* through the end of this buffer. The buffers will be duplicates of the internal buffers, so any
|
||||
@ -231,16 +273,49 @@ public final class InboundChannelBuffer implements AutoCloseable {
|
||||
public static class Page implements AutoCloseable {
|
||||
|
||||
private final ByteBuffer byteBuffer;
|
||||
private final Runnable closeable;
|
||||
// This is reference counted as some implementations want to retain the byte pages by calling
|
||||
// sliceAndRetainPagesTo. With reference counting we can increment the reference count, return the
|
||||
// pages, and safely close them when this channel buffer is done with them. The reference count
|
||||
// would be 1 at that point, meaning that the pages will remain until the implementation closes
|
||||
// theirs.
|
||||
private final RefCountedCloseable refCountedCloseable;
|
||||
|
||||
public Page(ByteBuffer byteBuffer, Runnable closeable) {
|
||||
this(byteBuffer, new RefCountedCloseable(closeable));
|
||||
}
|
||||
|
||||
private Page(ByteBuffer byteBuffer, RefCountedCloseable refCountedCloseable) {
|
||||
this.byteBuffer = byteBuffer;
|
||||
this.closeable = closeable;
|
||||
this.refCountedCloseable = refCountedCloseable;
|
||||
}
|
||||
|
||||
private Page duplicate() {
|
||||
refCountedCloseable.incRef();
|
||||
return new Page(byteBuffer.duplicate(), refCountedCloseable);
|
||||
}
|
||||
|
||||
public ByteBuffer getByteBuffer() {
|
||||
return byteBuffer;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
closeable.run();
|
||||
refCountedCloseable.decRef();
|
||||
}
|
||||
|
||||
private static class RefCountedCloseable extends AbstractRefCounted {
|
||||
|
||||
private final Runnable closeable;
|
||||
|
||||
private RefCountedCloseable(Runnable closeable) {
|
||||
super("byte array page");
|
||||
this.closeable = closeable;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void closeInternal() {
|
||||
closeable.run();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -31,7 +31,8 @@ public class InboundChannelBufferTests extends ESTestCase {
|
||||
|
||||
private static final int PAGE_SIZE = BigArrays.PAGE_SIZE_IN_BYTES;
|
||||
private final Supplier<InboundChannelBuffer.Page> defaultPageSupplier = () ->
|
||||
new InboundChannelBuffer.Page(ByteBuffer.allocate(BigArrays.BYTE_PAGE_SIZE), () -> {});
|
||||
new InboundChannelBuffer.Page(ByteBuffer.allocate(BigArrays.BYTE_PAGE_SIZE), () -> {
|
||||
});
|
||||
|
||||
public void testNewBufferHasSinglePage() {
|
||||
InboundChannelBuffer channelBuffer = new InboundChannelBuffer(defaultPageSupplier);
|
||||
@ -167,6 +168,49 @@ public class InboundChannelBufferTests extends ESTestCase {
|
||||
expectThrows(IllegalStateException.class, () -> channelBuffer.ensureCapacity(1));
|
||||
}
|
||||
|
||||
public void testCloseRetainedPages() {
|
||||
ConcurrentLinkedQueue<AtomicBoolean> queue = new ConcurrentLinkedQueue<>();
|
||||
Supplier<InboundChannelBuffer.Page> supplier = () -> {
|
||||
AtomicBoolean atomicBoolean = new AtomicBoolean();
|
||||
queue.add(atomicBoolean);
|
||||
return new InboundChannelBuffer.Page(ByteBuffer.allocate(PAGE_SIZE), () -> atomicBoolean.set(true));
|
||||
};
|
||||
InboundChannelBuffer channelBuffer = new InboundChannelBuffer(supplier);
|
||||
channelBuffer.ensureCapacity(PAGE_SIZE * 4);
|
||||
|
||||
assertEquals(4, queue.size());
|
||||
|
||||
for (AtomicBoolean closedRef : queue) {
|
||||
assertFalse(closedRef.get());
|
||||
}
|
||||
|
||||
InboundChannelBuffer.Page[] pages = channelBuffer.sliceAndRetainPagesTo(PAGE_SIZE * 2);
|
||||
|
||||
pages[1].close();
|
||||
|
||||
for (AtomicBoolean closedRef : queue) {
|
||||
assertFalse(closedRef.get());
|
||||
}
|
||||
|
||||
channelBuffer.close();
|
||||
|
||||
int i = 0;
|
||||
for (AtomicBoolean closedRef : queue) {
|
||||
if (i < 1) {
|
||||
assertFalse(closedRef.get());
|
||||
} else {
|
||||
assertTrue(closedRef.get());
|
||||
}
|
||||
++i;
|
||||
}
|
||||
|
||||
pages[0].close();
|
||||
|
||||
for (AtomicBoolean closedRef : queue) {
|
||||
assertTrue(closedRef.get());
|
||||
}
|
||||
}
|
||||
|
||||
public void testAccessByteBuffers() {
|
||||
InboundChannelBuffer channelBuffer = new InboundChannelBuffer(defaultPageSupplier);
|
||||
|
||||
|
@ -37,6 +37,7 @@ import org.apache.lucene.analysis.charfilter.HTMLStripCharFilter;
|
||||
import org.apache.lucene.analysis.cjk.CJKAnalyzer;
|
||||
import org.apache.lucene.analysis.cjk.CJKBigramFilter;
|
||||
import org.apache.lucene.analysis.cjk.CJKWidthFilter;
|
||||
import org.apache.lucene.analysis.ckb.SoraniAnalyzer;
|
||||
import org.apache.lucene.analysis.ckb.SoraniNormalizationFilter;
|
||||
import org.apache.lucene.analysis.commongrams.CommonGramsFilter;
|
||||
import org.apache.lucene.analysis.core.DecimalDigitFilter;
|
||||
@ -52,17 +53,27 @@ import org.apache.lucene.analysis.da.DanishAnalyzer;
|
||||
import org.apache.lucene.analysis.de.GermanAnalyzer;
|
||||
import org.apache.lucene.analysis.de.GermanNormalizationFilter;
|
||||
import org.apache.lucene.analysis.de.GermanStemFilter;
|
||||
import org.apache.lucene.analysis.el.GreekAnalyzer;
|
||||
import org.apache.lucene.analysis.en.EnglishAnalyzer;
|
||||
import org.apache.lucene.analysis.en.KStemFilter;
|
||||
import org.apache.lucene.analysis.en.PorterStemFilter;
|
||||
import org.apache.lucene.analysis.es.SpanishAnalyzer;
|
||||
import org.apache.lucene.analysis.eu.BasqueAnalyzer;
|
||||
import org.apache.lucene.analysis.fa.PersianAnalyzer;
|
||||
import org.apache.lucene.analysis.fa.PersianNormalizationFilter;
|
||||
import org.apache.lucene.analysis.fi.FinnishAnalyzer;
|
||||
import org.apache.lucene.analysis.fr.FrenchAnalyzer;
|
||||
import org.apache.lucene.analysis.ga.IrishAnalyzer;
|
||||
import org.apache.lucene.analysis.gl.GalicianAnalyzer;
|
||||
import org.apache.lucene.analysis.hi.HindiAnalyzer;
|
||||
import org.apache.lucene.analysis.hi.HindiNormalizationFilter;
|
||||
import org.apache.lucene.analysis.hu.HungarianAnalyzer;
|
||||
import org.apache.lucene.analysis.hy.ArmenianAnalyzer;
|
||||
import org.apache.lucene.analysis.id.IndonesianAnalyzer;
|
||||
import org.apache.lucene.analysis.in.IndicNormalizationFilter;
|
||||
import org.apache.lucene.analysis.it.ItalianAnalyzer;
|
||||
import org.apache.lucene.analysis.lt.LithuanianAnalyzer;
|
||||
import org.apache.lucene.analysis.lv.LatvianAnalyzer;
|
||||
import org.apache.lucene.analysis.miscellaneous.ASCIIFoldingFilter;
|
||||
import org.apache.lucene.analysis.miscellaneous.DisableGraphAttribute;
|
||||
import org.apache.lucene.analysis.miscellaneous.KeywordRepeatFilter;
|
||||
@ -79,19 +90,26 @@ import org.apache.lucene.analysis.ngram.EdgeNGramTokenizer;
|
||||
import org.apache.lucene.analysis.ngram.NGramTokenFilter;
|
||||
import org.apache.lucene.analysis.ngram.NGramTokenizer;
|
||||
import org.apache.lucene.analysis.nl.DutchAnalyzer;
|
||||
import org.apache.lucene.analysis.no.NorwegianAnalyzer;
|
||||
import org.apache.lucene.analysis.path.PathHierarchyTokenizer;
|
||||
import org.apache.lucene.analysis.pattern.PatternTokenizer;
|
||||
import org.apache.lucene.analysis.payloads.DelimitedPayloadTokenFilter;
|
||||
import org.apache.lucene.analysis.payloads.TypeAsPayloadTokenFilter;
|
||||
import org.apache.lucene.analysis.pt.PortugueseAnalyzer;
|
||||
import org.apache.lucene.analysis.reverse.ReverseStringFilter;
|
||||
import org.apache.lucene.analysis.ro.RomanianAnalyzer;
|
||||
import org.apache.lucene.analysis.ru.RussianAnalyzer;
|
||||
import org.apache.lucene.analysis.shingle.ShingleFilter;
|
||||
import org.apache.lucene.analysis.snowball.SnowballFilter;
|
||||
import org.apache.lucene.analysis.standard.ClassicFilter;
|
||||
import org.apache.lucene.analysis.standard.ClassicTokenizer;
|
||||
import org.apache.lucene.analysis.standard.StandardAnalyzer;
|
||||
import org.apache.lucene.analysis.standard.UAX29URLEmailTokenizer;
|
||||
import org.apache.lucene.analysis.sv.SwedishAnalyzer;
|
||||
import org.apache.lucene.analysis.th.ThaiAnalyzer;
|
||||
import org.apache.lucene.analysis.th.ThaiTokenizer;
|
||||
import org.apache.lucene.analysis.tr.ApostropheFilter;
|
||||
import org.apache.lucene.analysis.tr.TurkishAnalyzer;
|
||||
import org.apache.lucene.analysis.util.ElisionFilter;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
@ -130,6 +148,8 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin {
|
||||
analyzers.put("standard_html_strip", StandardHtmlStripAnalyzerProvider::new);
|
||||
analyzers.put("pattern", PatternAnalyzerProvider::new);
|
||||
analyzers.put("snowball", SnowballAnalyzerProvider::new);
|
||||
|
||||
// Language analyzers:
|
||||
analyzers.put("arabic", ArabicAnalyzerProvider::new);
|
||||
analyzers.put("armenian", ArmenianAnalyzerProvider::new);
|
||||
analyzers.put("basque", BasqueAnalyzerProvider::new);
|
||||
@ -147,6 +167,24 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin {
|
||||
analyzers.put("french", FrenchAnalyzerProvider::new);
|
||||
analyzers.put("galician", GalicianAnalyzerProvider::new);
|
||||
analyzers.put("german", GermanAnalyzerProvider::new);
|
||||
analyzers.put("greek", GreekAnalyzerProvider::new);
|
||||
analyzers.put("hindi", HindiAnalyzerProvider::new);
|
||||
analyzers.put("hungarian", HungarianAnalyzerProvider::new);
|
||||
analyzers.put("indonesian", IndonesianAnalyzerProvider::new);
|
||||
analyzers.put("irish", IrishAnalyzerProvider::new);
|
||||
analyzers.put("italian", ItalianAnalyzerProvider::new);
|
||||
analyzers.put("latvian", LatvianAnalyzerProvider::new);
|
||||
analyzers.put("lithuanian", LithuanianAnalyzerProvider::new);
|
||||
analyzers.put("norwegian", NorwegianAnalyzerProvider::new);
|
||||
analyzers.put("persian", PersianAnalyzerProvider::new);
|
||||
analyzers.put("portuguese", PortugueseAnalyzerProvider::new);
|
||||
analyzers.put("romanian", RomanianAnalyzerProvider::new);
|
||||
analyzers.put("russian", RussianAnalyzerProvider::new);
|
||||
analyzers.put("sorani", SoraniAnalyzerProvider::new);
|
||||
analyzers.put("spanish", SpanishAnalyzerProvider::new);
|
||||
analyzers.put("swedish", SwedishAnalyzerProvider::new);
|
||||
analyzers.put("turkish", TurkishAnalyzerProvider::new);
|
||||
analyzers.put("thai", ThaiAnalyzerProvider::new);
|
||||
return analyzers;
|
||||
}
|
||||
|
||||
@ -194,6 +232,7 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin {
|
||||
filters.put("pattern_replace", requriesAnalysisSettings(PatternReplaceTokenFilterFactory::new));
|
||||
filters.put("persian_normalization", PersianNormalizationFilterFactory::new);
|
||||
filters.put("porter_stem", PorterStemTokenFilterFactory::new);
|
||||
filters.put("remove_duplicates", RemoveDuplicatesTokenFilterFactory::new);
|
||||
filters.put("reverse", ReverseTokenFilterFactory::new);
|
||||
filters.put("russian_stem", RussianStemTokenFilterFactory::new);
|
||||
filters.put("scandinavian_folding", ScandinavianFoldingFilterFactory::new);
|
||||
@ -247,13 +286,15 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin {
|
||||
@Override
|
||||
public List<PreBuiltAnalyzerProviderFactory> getPreBuiltAnalyzerProviderFactories() {
|
||||
List<PreBuiltAnalyzerProviderFactory> analyzers = new ArrayList<>();
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("standard_html_strip", CachingStrategy.LUCENE,
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("standard_html_strip", CachingStrategy.ELASTICSEARCH,
|
||||
() -> new StandardHtmlStripAnalyzer(CharArraySet.EMPTY_SET)));
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("pattern", CachingStrategy.ELASTICSEARCH,
|
||||
() -> new PatternAnalyzer(Regex.compile("\\W+" /*PatternAnalyzer.NON_WORD_PATTERN*/, null), true,
|
||||
CharArraySet.EMPTY_SET)));
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("snowball", CachingStrategy.LUCENE,
|
||||
() -> new SnowballAnalyzer("English", StopAnalyzer.ENGLISH_STOP_WORDS_SET)));
|
||||
|
||||
// Language analyzers:
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("arabic", CachingStrategy.LUCENE, ArabicAnalyzer::new));
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("armenian", CachingStrategy.LUCENE, ArmenianAnalyzer::new));
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("basque", CachingStrategy.LUCENE, BasqueAnalyzer::new));
|
||||
@ -262,7 +303,7 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin {
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("bulgarian", CachingStrategy.LUCENE, BulgarianAnalyzer::new));
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("catalan", CachingStrategy.LUCENE, CatalanAnalyzer::new));
|
||||
// chinese analyzer: only for old indices, best effort
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("chinese", CachingStrategy.LUCENE, StandardAnalyzer::new));
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("chinese", CachingStrategy.ONE, StandardAnalyzer::new));
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("cjk", CachingStrategy.LUCENE, CJKAnalyzer::new));
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("czech", CachingStrategy.LUCENE, CzechAnalyzer::new));
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("danish", CachingStrategy.LUCENE, DanishAnalyzer::new));
|
||||
@ -272,6 +313,24 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin {
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("french", CachingStrategy.LUCENE, FrenchAnalyzer::new));
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("galician", CachingStrategy.LUCENE, GalicianAnalyzer::new));
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("german", CachingStrategy.LUCENE, GermanAnalyzer::new));
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("greek", CachingStrategy.LUCENE, GreekAnalyzer::new));
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("hindi", CachingStrategy.LUCENE, HindiAnalyzer::new));
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("hungarian", CachingStrategy.LUCENE, HungarianAnalyzer::new));
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("indonesian", CachingStrategy.LUCENE, IndonesianAnalyzer::new));
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("irish", CachingStrategy.LUCENE, IrishAnalyzer::new));
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("italian", CachingStrategy.LUCENE, ItalianAnalyzer::new));
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("latvian", CachingStrategy.LUCENE, LatvianAnalyzer::new));
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("lithuanian", CachingStrategy.LUCENE, LithuanianAnalyzer::new));
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("norwegian", CachingStrategy.LUCENE, NorwegianAnalyzer::new));
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("persian", CachingStrategy.LUCENE, PersianAnalyzer::new));
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("portuguese", CachingStrategy.LUCENE, PortugueseAnalyzer::new));
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("romanian", CachingStrategy.LUCENE, RomanianAnalyzer::new));
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("russian", CachingStrategy.LUCENE, RussianAnalyzer::new));
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("sorani", CachingStrategy.LUCENE, SoraniAnalyzer::new));
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("spanish", CachingStrategy.LUCENE, SpanishAnalyzer::new));
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("swedish", CachingStrategy.LUCENE, SwedishAnalyzer::new));
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("turkish", CachingStrategy.LUCENE, TurkishAnalyzer::new));
|
||||
analyzers.add(new PreBuiltAnalyzerProviderFactory("thai", CachingStrategy.LUCENE, ThaiAnalyzer::new));
|
||||
return analyzers;
|
||||
}
|
||||
|
||||
|
@ -17,18 +17,20 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.analysis;
|
||||
package org.elasticsearch.analysis.common;
|
||||
|
||||
import org.apache.lucene.analysis.el.GreekAnalyzer;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.Analysis;
|
||||
|
||||
public class GreekAnalyzerProvider extends AbstractIndexAnalyzerProvider<GreekAnalyzer> {
|
||||
|
||||
private final GreekAnalyzer analyzer;
|
||||
|
||||
public GreekAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
GreekAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
super(indexSettings, name, settings);
|
||||
analyzer = new GreekAnalyzer(
|
||||
Analysis.parseStopWords(env, settings, GreekAnalyzer.getDefaultStopSet()));
|
@ -17,19 +17,21 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.analysis;
|
||||
package org.elasticsearch.analysis.common;
|
||||
|
||||
import org.apache.lucene.analysis.CharArraySet;
|
||||
import org.apache.lucene.analysis.hi.HindiAnalyzer;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.Analysis;
|
||||
|
||||
public class HindiAnalyzerProvider extends AbstractIndexAnalyzerProvider<HindiAnalyzer> {
|
||||
|
||||
private final HindiAnalyzer analyzer;
|
||||
|
||||
public HindiAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
HindiAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
super(indexSettings, name, settings);
|
||||
analyzer = new HindiAnalyzer(
|
||||
Analysis.parseStopWords(env, settings, HindiAnalyzer.getDefaultStopSet()),
|
@ -17,19 +17,21 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.analysis;
|
||||
package org.elasticsearch.analysis.common;
|
||||
|
||||
import org.apache.lucene.analysis.CharArraySet;
|
||||
import org.apache.lucene.analysis.hu.HungarianAnalyzer;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.Analysis;
|
||||
|
||||
public class HungarianAnalyzerProvider extends AbstractIndexAnalyzerProvider<HungarianAnalyzer> {
|
||||
|
||||
private final HungarianAnalyzer analyzer;
|
||||
|
||||
public HungarianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
HungarianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
super(indexSettings, name, settings);
|
||||
analyzer = new HungarianAnalyzer(
|
||||
Analysis.parseStopWords(env, settings, HungarianAnalyzer.getDefaultStopSet()),
|
@ -17,19 +17,21 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.analysis;
|
||||
package org.elasticsearch.analysis.common;
|
||||
|
||||
import org.apache.lucene.analysis.CharArraySet;
|
||||
import org.apache.lucene.analysis.id.IndonesianAnalyzer;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.Analysis;
|
||||
|
||||
public class IndonesianAnalyzerProvider extends AbstractIndexAnalyzerProvider<IndonesianAnalyzer> {
|
||||
|
||||
private final IndonesianAnalyzer analyzer;
|
||||
|
||||
public IndonesianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
IndonesianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
super(indexSettings, name, settings);
|
||||
analyzer = new IndonesianAnalyzer(
|
||||
Analysis.parseStopWords(env, settings, IndonesianAnalyzer.getDefaultStopSet()),
|
@ -17,13 +17,15 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.analysis;
|
||||
package org.elasticsearch.analysis.common;
|
||||
|
||||
import org.apache.lucene.analysis.CharArraySet;
|
||||
import org.apache.lucene.analysis.ga.IrishAnalyzer;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.Analysis;
|
||||
|
||||
/**
|
||||
* Provider for {@link IrishAnalyzer}
|
||||
@ -32,7 +34,7 @@ public class IrishAnalyzerProvider extends AbstractIndexAnalyzerProvider<IrishAn
|
||||
|
||||
private final IrishAnalyzer analyzer;
|
||||
|
||||
public IrishAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
IrishAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
super(indexSettings, name, settings);
|
||||
analyzer = new IrishAnalyzer(
|
||||
Analysis.parseStopWords(env, settings, IrishAnalyzer.getDefaultStopSet()),
|
@ -17,19 +17,21 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.analysis;
|
||||
package org.elasticsearch.analysis.common;
|
||||
|
||||
import org.apache.lucene.analysis.CharArraySet;
|
||||
import org.apache.lucene.analysis.it.ItalianAnalyzer;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.Analysis;
|
||||
|
||||
public class ItalianAnalyzerProvider extends AbstractIndexAnalyzerProvider<ItalianAnalyzer> {
|
||||
|
||||
private final ItalianAnalyzer analyzer;
|
||||
|
||||
public ItalianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
ItalianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
super(indexSettings, name, settings);
|
||||
analyzer = new ItalianAnalyzer(
|
||||
Analysis.parseStopWords(env, settings, ItalianAnalyzer.getDefaultStopSet()),
|
@ -17,19 +17,21 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.analysis;
|
||||
package org.elasticsearch.analysis.common;
|
||||
|
||||
import org.apache.lucene.analysis.CharArraySet;
|
||||
import org.apache.lucene.analysis.lv.LatvianAnalyzer;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.Analysis;
|
||||
|
||||
public class LatvianAnalyzerProvider extends AbstractIndexAnalyzerProvider<LatvianAnalyzer> {
|
||||
|
||||
private final LatvianAnalyzer analyzer;
|
||||
|
||||
public LatvianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
LatvianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
super(indexSettings, name, settings);
|
||||
analyzer = new LatvianAnalyzer(
|
||||
Analysis.parseStopWords(env, settings, LatvianAnalyzer.getDefaultStopSet()),
|
@ -17,13 +17,15 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.analysis;
|
||||
package org.elasticsearch.analysis.common;
|
||||
|
||||
import org.apache.lucene.analysis.CharArraySet;
|
||||
import org.apache.lucene.analysis.lt.LithuanianAnalyzer;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.Analysis;
|
||||
|
||||
/**
|
||||
* Provider for {@link LithuanianAnalyzer}
|
||||
@ -32,7 +34,7 @@ public class LithuanianAnalyzerProvider extends AbstractIndexAnalyzerProvider<Li
|
||||
|
||||
private final LithuanianAnalyzer analyzer;
|
||||
|
||||
public LithuanianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
LithuanianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
super(indexSettings, name, settings);
|
||||
analyzer = new LithuanianAnalyzer(
|
||||
Analysis.parseStopWords(env, settings, LithuanianAnalyzer.getDefaultStopSet()),
|
@ -17,19 +17,21 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.analysis;
|
||||
package org.elasticsearch.analysis.common;
|
||||
|
||||
import org.apache.lucene.analysis.CharArraySet;
|
||||
import org.apache.lucene.analysis.no.NorwegianAnalyzer;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.Analysis;
|
||||
|
||||
public class NorwegianAnalyzerProvider extends AbstractIndexAnalyzerProvider<NorwegianAnalyzer> {
|
||||
|
||||
private final NorwegianAnalyzer analyzer;
|
||||
|
||||
public NorwegianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
NorwegianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
super(indexSettings, name, settings);
|
||||
analyzer = new NorwegianAnalyzer(
|
||||
Analysis.parseStopWords(env, settings, NorwegianAnalyzer.getDefaultStopSet()),
|
@ -17,18 +17,20 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.analysis;
|
||||
package org.elasticsearch.analysis.common;
|
||||
|
||||
import org.apache.lucene.analysis.fa.PersianAnalyzer;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.Analysis;
|
||||
|
||||
public class PersianAnalyzerProvider extends AbstractIndexAnalyzerProvider<PersianAnalyzer> {
|
||||
|
||||
private final PersianAnalyzer analyzer;
|
||||
|
||||
public PersianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
PersianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
super(indexSettings, name, settings);
|
||||
analyzer = new PersianAnalyzer(Analysis.parseStopWords(env, settings, PersianAnalyzer.getDefaultStopSet()));
|
||||
analyzer.setVersion(version);
|
@ -17,19 +17,21 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.analysis;
|
||||
package org.elasticsearch.analysis.common;
|
||||
|
||||
import org.apache.lucene.analysis.CharArraySet;
|
||||
import org.apache.lucene.analysis.pt.PortugueseAnalyzer;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.Analysis;
|
||||
|
||||
public class PortugueseAnalyzerProvider extends AbstractIndexAnalyzerProvider<PortugueseAnalyzer> {
|
||||
|
||||
private final PortugueseAnalyzer analyzer;
|
||||
|
||||
public PortugueseAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
PortugueseAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
super(indexSettings, name, settings);
|
||||
analyzer = new PortugueseAnalyzer(
|
||||
Analysis.parseStopWords(env, settings, PortugueseAnalyzer.getDefaultStopSet()),
|
@ -0,0 +1,42 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.analysis.common;
|
||||
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.analysis.miscellaneous.RemoveDuplicatesTokenFilter;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.analysis.AbstractTokenFilterFactory;
|
||||
|
||||
/**
|
||||
* Filter factory for the lucene RemoveDuplicatesTokenFilter
|
||||
*/
|
||||
class RemoveDuplicatesTokenFilterFactory extends AbstractTokenFilterFactory {
|
||||
|
||||
RemoveDuplicatesTokenFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
super(indexSettings, name, settings);
|
||||
}
|
||||
|
||||
@Override
|
||||
public TokenStream create(TokenStream tokenStream) {
|
||||
return new RemoveDuplicatesTokenFilter(tokenStream);
|
||||
}
|
||||
}
|
@ -17,19 +17,21 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.analysis;
|
||||
package org.elasticsearch.analysis.common;
|
||||
|
||||
import org.apache.lucene.analysis.CharArraySet;
|
||||
import org.apache.lucene.analysis.ro.RomanianAnalyzer;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.Analysis;
|
||||
|
||||
public class RomanianAnalyzerProvider extends AbstractIndexAnalyzerProvider<RomanianAnalyzer> {
|
||||
|
||||
private final RomanianAnalyzer analyzer;
|
||||
|
||||
public RomanianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
RomanianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
super(indexSettings, name, settings);
|
||||
analyzer = new RomanianAnalyzer(
|
||||
Analysis.parseStopWords(env, settings, RomanianAnalyzer.getDefaultStopSet()),
|
@ -17,19 +17,21 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.analysis;
|
||||
package org.elasticsearch.analysis.common;
|
||||
|
||||
import org.apache.lucene.analysis.CharArraySet;
|
||||
import org.apache.lucene.analysis.ru.RussianAnalyzer;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.Analysis;
|
||||
|
||||
public class RussianAnalyzerProvider extends AbstractIndexAnalyzerProvider<RussianAnalyzer> {
|
||||
|
||||
private final RussianAnalyzer analyzer;
|
||||
|
||||
public RussianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
RussianAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
super(indexSettings, name, settings);
|
||||
analyzer = new RussianAnalyzer(
|
||||
Analysis.parseStopWords(env, settings, RussianAnalyzer.getDefaultStopSet()),
|
@ -17,13 +17,15 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.analysis;
|
||||
package org.elasticsearch.analysis.common;
|
||||
|
||||
import org.apache.lucene.analysis.CharArraySet;
|
||||
import org.apache.lucene.analysis.ckb.SoraniAnalyzer;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.Analysis;
|
||||
|
||||
/**
|
||||
* Provider for {@link SoraniAnalyzer}
|
||||
@ -32,7 +34,7 @@ public class SoraniAnalyzerProvider extends AbstractIndexAnalyzerProvider<Sorani
|
||||
|
||||
private final SoraniAnalyzer analyzer;
|
||||
|
||||
public SoraniAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
SoraniAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
super(indexSettings, name, settings);
|
||||
analyzer = new SoraniAnalyzer(
|
||||
Analysis.parseStopWords(env, settings, SoraniAnalyzer.getDefaultStopSet()),
|
@ -17,19 +17,21 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.analysis;
|
||||
package org.elasticsearch.analysis.common;
|
||||
|
||||
import org.apache.lucene.analysis.CharArraySet;
|
||||
import org.apache.lucene.analysis.es.SpanishAnalyzer;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.Analysis;
|
||||
|
||||
public class SpanishAnalyzerProvider extends AbstractIndexAnalyzerProvider<SpanishAnalyzer> {
|
||||
|
||||
private final SpanishAnalyzer analyzer;
|
||||
|
||||
public SpanishAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
SpanishAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
super(indexSettings, name, settings);
|
||||
analyzer = new SpanishAnalyzer(
|
||||
Analysis.parseStopWords(env, settings, SpanishAnalyzer.getDefaultStopSet()),
|
@ -17,19 +17,21 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.analysis;
|
||||
package org.elasticsearch.analysis.common;
|
||||
|
||||
import org.apache.lucene.analysis.CharArraySet;
|
||||
import org.apache.lucene.analysis.sv.SwedishAnalyzer;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.Analysis;
|
||||
|
||||
public class SwedishAnalyzerProvider extends AbstractIndexAnalyzerProvider<SwedishAnalyzer> {
|
||||
|
||||
private final SwedishAnalyzer analyzer;
|
||||
|
||||
public SwedishAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
SwedishAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
super(indexSettings, name, settings);
|
||||
analyzer = new SwedishAnalyzer(
|
||||
Analysis.parseStopWords(env, settings, SwedishAnalyzer.getDefaultStopSet()),
|
@ -17,18 +17,20 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.analysis;
|
||||
package org.elasticsearch.analysis.common;
|
||||
|
||||
import org.apache.lucene.analysis.th.ThaiAnalyzer;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.Analysis;
|
||||
|
||||
public class ThaiAnalyzerProvider extends AbstractIndexAnalyzerProvider<ThaiAnalyzer> {
|
||||
|
||||
private final ThaiAnalyzer analyzer;
|
||||
|
||||
public ThaiAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
ThaiAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
super(indexSettings, name, settings);
|
||||
analyzer = new ThaiAnalyzer(
|
||||
Analysis.parseStopWords(env, settings, ThaiAnalyzer.getDefaultStopSet()));
|
@ -17,19 +17,21 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.analysis;
|
||||
package org.elasticsearch.analysis.common;
|
||||
|
||||
import org.apache.lucene.analysis.CharArraySet;
|
||||
import org.apache.lucene.analysis.tr.TurkishAnalyzer;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.IndexSettings;
|
||||
import org.elasticsearch.index.analysis.AbstractIndexAnalyzerProvider;
|
||||
import org.elasticsearch.index.analysis.Analysis;
|
||||
|
||||
public class TurkishAnalyzerProvider extends AbstractIndexAnalyzerProvider<TurkishAnalyzer> {
|
||||
|
||||
private final TurkishAnalyzer analyzer;
|
||||
|
||||
public TurkishAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
TurkishAnalyzerProvider(IndexSettings indexSettings, Environment env, String name, Settings settings) {
|
||||
super(indexSettings, name, settings);
|
||||
analyzer = new TurkishAnalyzer(
|
||||
Analysis.parseStopWords(env, settings, TurkishAnalyzer.getDefaultStopSet()),
|
@ -0,0 +1,61 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.analysis.common;
|
||||
|
||||
import org.apache.lucene.analysis.CannedTokenStream;
|
||||
import org.apache.lucene.analysis.Token;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.env.Environment;
|
||||
import org.elasticsearch.index.analysis.AnalysisTestsHelper;
|
||||
import org.elasticsearch.index.analysis.TokenFilterFactory;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
import org.elasticsearch.test.ESTokenStreamTestCase;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
|
||||
public class RemoveDuplicatesFilterFactoryTests extends ESTokenStreamTestCase {
|
||||
|
||||
public void testRemoveDuplicatesFilter() throws IOException {
|
||||
Settings settings = Settings.builder()
|
||||
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
|
||||
.put("index.analysis.filter.removedups.type", "remove_duplicates")
|
||||
.build();
|
||||
ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisPlugin());
|
||||
TokenFilterFactory tokenFilter = analysis.tokenFilter.get("removedups");
|
||||
assertThat(tokenFilter, instanceOf(RemoveDuplicatesTokenFilterFactory.class));
|
||||
|
||||
CannedTokenStream cts = new CannedTokenStream(
|
||||
new Token("a", 1, 0, 1),
|
||||
new Token("b", 1, 2, 3),
|
||||
new Token("c", 0, 2, 3),
|
||||
new Token("b", 0, 2, 3),
|
||||
new Token("d", 1, 4, 5)
|
||||
);
|
||||
|
||||
assertTokenStreamContents(tokenFilter.create(cts), new String[]{
|
||||
"a", "b", "c", "d"
|
||||
}, new int[]{
|
||||
1, 1, 0, 1
|
||||
});
|
||||
}
|
||||
|
||||
}
|
@ -592,3 +592,525 @@
|
||||
analyzer: my_analyzer
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: tisch }
|
||||
|
||||
---
|
||||
"greek":
|
||||
- do:
|
||||
indices.create:
|
||||
index: test
|
||||
body:
|
||||
settings:
|
||||
analysis:
|
||||
analyzer:
|
||||
my_analyzer:
|
||||
type: greek
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
body:
|
||||
text: Μία
|
||||
analyzer: greek
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: μια }
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
index: test
|
||||
body:
|
||||
text: Μία
|
||||
analyzer: my_analyzer
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: μια }
|
||||
|
||||
---
|
||||
"hindi":
|
||||
- do:
|
||||
indices.create:
|
||||
index: test
|
||||
body:
|
||||
settings:
|
||||
analysis:
|
||||
analyzer:
|
||||
my_analyzer:
|
||||
type: hindi
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
body:
|
||||
text: हिन्दी
|
||||
analyzer: hindi
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: हिंद }
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
index: test
|
||||
body:
|
||||
text: हिन्दी
|
||||
analyzer: my_analyzer
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: हिंद }
|
||||
|
||||
---
|
||||
"hungarian":
|
||||
- do:
|
||||
indices.create:
|
||||
index: test
|
||||
body:
|
||||
settings:
|
||||
analysis:
|
||||
analyzer:
|
||||
my_analyzer:
|
||||
type: hungarian
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
body:
|
||||
text: babakocsi
|
||||
analyzer: hungarian
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: babakocs }
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
index: test
|
||||
body:
|
||||
text: babakocsi
|
||||
analyzer: my_analyzer
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: babakocs }
|
||||
|
||||
---
|
||||
"indonesian":
|
||||
- do:
|
||||
indices.create:
|
||||
index: test
|
||||
body:
|
||||
settings:
|
||||
analysis:
|
||||
analyzer:
|
||||
my_analyzer:
|
||||
type: indonesian
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
body:
|
||||
text: peledakan
|
||||
analyzer: indonesian
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: ledak }
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
index: test
|
||||
body:
|
||||
text: peledakan
|
||||
analyzer: my_analyzer
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: ledak }
|
||||
|
||||
---
|
||||
"irish":
|
||||
- do:
|
||||
indices.create:
|
||||
index: test
|
||||
body:
|
||||
settings:
|
||||
analysis:
|
||||
analyzer:
|
||||
my_analyzer:
|
||||
type: irish
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
body:
|
||||
text: siopadóireacht
|
||||
analyzer: irish
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: siopadóir }
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
index: test
|
||||
body:
|
||||
text: siopadóireacht
|
||||
analyzer: my_analyzer
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: siopadóir }
|
||||
|
||||
---
|
||||
"italian":
|
||||
- do:
|
||||
indices.create:
|
||||
index: test
|
||||
body:
|
||||
settings:
|
||||
analysis:
|
||||
analyzer:
|
||||
my_analyzer:
|
||||
type: italian
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
body:
|
||||
text: abbandonata
|
||||
analyzer: italian
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: abbandonat }
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
index: test
|
||||
body:
|
||||
text: abbandonata
|
||||
analyzer: my_analyzer
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: abbandonat }
|
||||
|
||||
---
|
||||
"latvian":
|
||||
- do:
|
||||
indices.create:
|
||||
index: test
|
||||
body:
|
||||
settings:
|
||||
analysis:
|
||||
analyzer:
|
||||
my_analyzer:
|
||||
type: latvian
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
body:
|
||||
text: tirgiem
|
||||
analyzer: latvian
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: tirg }
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
index: test
|
||||
body:
|
||||
text: tirgiem
|
||||
analyzer: my_analyzer
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: tirg }
|
||||
|
||||
---
|
||||
"lithuanian":
|
||||
- do:
|
||||
indices.create:
|
||||
index: test
|
||||
body:
|
||||
settings:
|
||||
analysis:
|
||||
analyzer:
|
||||
my_analyzer:
|
||||
type: lithuanian
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
body:
|
||||
text: vaikų
|
||||
analyzer: lithuanian
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: vaik }
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
index: test
|
||||
body:
|
||||
text: vaikų
|
||||
analyzer: my_analyzer
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: vaik }
|
||||
|
||||
---
|
||||
"norwegian":
|
||||
- do:
|
||||
indices.create:
|
||||
index: test
|
||||
body:
|
||||
settings:
|
||||
analysis:
|
||||
analyzer:
|
||||
my_analyzer:
|
||||
type: norwegian
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
body:
|
||||
text: havnedistriktene
|
||||
analyzer: norwegian
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: havnedistrikt }
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
index: test
|
||||
body:
|
||||
text: havnedistriktene
|
||||
analyzer: my_analyzer
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: havnedistrikt }
|
||||
|
||||
---
|
||||
"persian":
|
||||
- do:
|
||||
indices.create:
|
||||
index: test
|
||||
body:
|
||||
settings:
|
||||
analysis:
|
||||
analyzer:
|
||||
my_analyzer:
|
||||
type: persian
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
body:
|
||||
text: میخورد
|
||||
analyzer: persian
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: خورد }
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
index: test
|
||||
body:
|
||||
text: میخورد
|
||||
analyzer: my_analyzer
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: خورد }
|
||||
|
||||
---
|
||||
"portuguese":
|
||||
- do:
|
||||
indices.create:
|
||||
index: test
|
||||
body:
|
||||
settings:
|
||||
analysis:
|
||||
analyzer:
|
||||
my_analyzer:
|
||||
type: portuguese
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
body:
|
||||
text: quilométricas
|
||||
analyzer: portuguese
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: quilometric }
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
index: test
|
||||
body:
|
||||
text: quilométricas
|
||||
analyzer: my_analyzer
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: quilometric }
|
||||
|
||||
---
|
||||
"romanian":
|
||||
- do:
|
||||
indices.create:
|
||||
index: test
|
||||
body:
|
||||
settings:
|
||||
analysis:
|
||||
analyzer:
|
||||
my_analyzer:
|
||||
type: romanian
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
body:
|
||||
text: absenţa
|
||||
analyzer: romanian
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: absenţ }
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
index: test
|
||||
body:
|
||||
text: absenţa
|
||||
analyzer: my_analyzer
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: absenţ }
|
||||
|
||||
---
|
||||
"russian":
|
||||
- do:
|
||||
indices.create:
|
||||
index: test
|
||||
body:
|
||||
settings:
|
||||
analysis:
|
||||
analyzer:
|
||||
my_analyzer:
|
||||
type: russian
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
body:
|
||||
text: Вместе с тем о
|
||||
analyzer: russian
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: вмест }
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
index: test
|
||||
body:
|
||||
text: Вместе с тем о
|
||||
analyzer: my_analyzer
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: вмест }
|
||||
|
||||
---
|
||||
"sorani":
|
||||
- do:
|
||||
indices.create:
|
||||
index: test
|
||||
body:
|
||||
settings:
|
||||
analysis:
|
||||
analyzer:
|
||||
my_analyzer:
|
||||
type: sorani
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
body:
|
||||
text: پیاوە
|
||||
analyzer: sorani
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: پیاو }
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
index: test
|
||||
body:
|
||||
text: پیاوە
|
||||
analyzer: my_analyzer
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: پیاو }
|
||||
|
||||
---
|
||||
"spanish":
|
||||
- do:
|
||||
indices.create:
|
||||
index: test
|
||||
body:
|
||||
settings:
|
||||
analysis:
|
||||
analyzer:
|
||||
my_analyzer:
|
||||
type: spanish
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
body:
|
||||
text: chicana
|
||||
analyzer: spanish
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: chican }
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
index: test
|
||||
body:
|
||||
text: chicana
|
||||
analyzer: my_analyzer
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: chican }
|
||||
|
||||
---
|
||||
"swedish":
|
||||
- do:
|
||||
indices.create:
|
||||
index: test
|
||||
body:
|
||||
settings:
|
||||
analysis:
|
||||
analyzer:
|
||||
my_analyzer:
|
||||
type: swedish
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
body:
|
||||
text: jaktkarlarne
|
||||
analyzer: swedish
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: jaktkarl }
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
index: test
|
||||
body:
|
||||
text: jaktkarlarne
|
||||
analyzer: my_analyzer
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: jaktkarl }
|
||||
|
||||
---
|
||||
"turkish":
|
||||
- do:
|
||||
indices.create:
|
||||
index: test
|
||||
body:
|
||||
settings:
|
||||
analysis:
|
||||
analyzer:
|
||||
my_analyzer:
|
||||
type: turkish
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
body:
|
||||
text: ağacı
|
||||
analyzer: turkish
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: ağaç }
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
index: test
|
||||
body:
|
||||
text: ağacı
|
||||
analyzer: my_analyzer
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: ağaç }
|
||||
|
||||
---
|
||||
"thai":
|
||||
- do:
|
||||
indices.create:
|
||||
index: test
|
||||
body:
|
||||
settings:
|
||||
analysis:
|
||||
analyzer:
|
||||
my_analyzer:
|
||||
type: thai
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
body:
|
||||
text: ๑๒๓๔
|
||||
analyzer: thai
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: "1234" }
|
||||
|
||||
- do:
|
||||
indices.analyze:
|
||||
index: test
|
||||
body:
|
||||
text: ๑๒๓๔
|
||||
analyzer: my_analyzer
|
||||
- length: { tokens: 1 }
|
||||
- match: { tokens.0.token: "1234" }
|
||||
|
@ -52,7 +52,7 @@ import static org.elasticsearch.ingest.common.IngestCommonPlugin.GROK_PATTERNS;
|
||||
import static org.elasticsearch.rest.RestRequest.Method.GET;
|
||||
import static org.elasticsearch.rest.RestStatus.OK;
|
||||
|
||||
public class GrokProcessorGetAction extends Action<GrokProcessorGetAction.Request, GrokProcessorGetAction.Response> {
|
||||
public class GrokProcessorGetAction extends Action<GrokProcessorGetAction.Response> {
|
||||
|
||||
public static final GrokProcessorGetAction INSTANCE = new GrokProcessorGetAction();
|
||||
public static final String NAME = "cluster:admin/ingest/processor/grok/get";
|
||||
|
@ -21,7 +21,7 @@ package org.elasticsearch.script.mustache;
|
||||
|
||||
import org.elasticsearch.action.Action;
|
||||
|
||||
public class MultiSearchTemplateAction extends Action<MultiSearchTemplateRequest, MultiSearchTemplateResponse> {
|
||||
public class MultiSearchTemplateAction extends Action<MultiSearchTemplateResponse> {
|
||||
|
||||
public static final MultiSearchTemplateAction INSTANCE = new MultiSearchTemplateAction();
|
||||
public static final String NAME = "indices:data/read/msearch/template";
|
||||
|
@ -21,7 +21,7 @@ package org.elasticsearch.script.mustache;
|
||||
|
||||
import org.elasticsearch.action.Action;
|
||||
|
||||
public class SearchTemplateAction extends Action<SearchTemplateRequest, SearchTemplateResponse> {
|
||||
public class SearchTemplateAction extends Action<SearchTemplateResponse> {
|
||||
|
||||
public static final SearchTemplateAction INSTANCE = new SearchTemplateAction();
|
||||
public static final String NAME = "indices:data/read/search/template";
|
||||
|
@ -62,7 +62,7 @@ import static org.elasticsearch.rest.RestRequest.Method.GET;
|
||||
import static org.elasticsearch.rest.RestRequest.Method.POST;
|
||||
import static org.elasticsearch.rest.RestStatus.OK;
|
||||
|
||||
public class PainlessExecuteAction extends Action<PainlessExecuteAction.Request, PainlessExecuteAction.Response> {
|
||||
public class PainlessExecuteAction extends Action<PainlessExecuteAction.Response> {
|
||||
|
||||
static final PainlessExecuteAction INSTANCE = new PainlessExecuteAction();
|
||||
private static final String NAME = "cluster:admin/scripts/painless/execute";
|
||||
|
@ -24,7 +24,7 @@ import org.elasticsearch.action.Action;
|
||||
/**
|
||||
* Action for explaining evaluating search ranking results.
|
||||
*/
|
||||
public class RankEvalAction extends Action<RankEvalRequest, RankEvalResponse> {
|
||||
public class RankEvalAction extends Action<RankEvalResponse> {
|
||||
|
||||
public static final RankEvalAction INSTANCE = new RankEvalAction();
|
||||
public static final String NAME = "indices:data/read/rank_eval";
|
||||
|
@ -25,7 +25,7 @@ import org.elasticsearch.client.ElasticsearchClient;
|
||||
|
||||
public class RankEvalRequestBuilder extends ActionRequestBuilder<RankEvalRequest, RankEvalResponse> {
|
||||
|
||||
public RankEvalRequestBuilder(ElasticsearchClient client, Action<RankEvalRequest, RankEvalResponse> action,
|
||||
public RankEvalRequestBuilder(ElasticsearchClient client, Action<RankEvalResponse> action,
|
||||
RankEvalRequest request) {
|
||||
super(client, action, request);
|
||||
}
|
||||
|
@ -20,7 +20,7 @@
|
||||
package org.elasticsearch.index.reindex;
|
||||
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.GenericAction;
|
||||
import org.elasticsearch.action.Action;
|
||||
import org.elasticsearch.action.support.ActiveShardCount;
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
@ -38,7 +38,7 @@ import java.util.Map;
|
||||
|
||||
public abstract class AbstractBaseReindexRestHandler<
|
||||
Request extends AbstractBulkByScrollRequest<Request>,
|
||||
A extends GenericAction<Request, BulkByScrollResponse>
|
||||
A extends Action<BulkByScrollResponse>
|
||||
> extends BaseRestHandler {
|
||||
|
||||
private final A action;
|
||||
|
@ -19,7 +19,7 @@
|
||||
|
||||
package org.elasticsearch.index.reindex;
|
||||
|
||||
import org.elasticsearch.action.GenericAction;
|
||||
import org.elasticsearch.action.Action;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
@ -38,7 +38,7 @@ import java.util.function.Consumer;
|
||||
*/
|
||||
public abstract class AbstractBulkByQueryRestHandler<
|
||||
Request extends AbstractBulkByScrollRequest<Request>,
|
||||
A extends GenericAction<Request, BulkByScrollResponse>> extends AbstractBaseReindexRestHandler<Request, A> {
|
||||
A extends Action<BulkByScrollResponse>> extends AbstractBaseReindexRestHandler<Request, A> {
|
||||
|
||||
protected AbstractBulkByQueryRestHandler(Settings settings, A action) {
|
||||
super(settings, action);
|
||||
|
@ -61,7 +61,7 @@ class BulkByScrollParallelizationHelper {
|
||||
static <Request extends AbstractBulkByScrollRequest<Request>> void startSlicedAction(
|
||||
Request request,
|
||||
BulkByScrollTask task,
|
||||
Action<Request, BulkByScrollResponse> action,
|
||||
Action<BulkByScrollResponse> action,
|
||||
ActionListener<BulkByScrollResponse> listener,
|
||||
Client client,
|
||||
DiscoveryNode node,
|
||||
@ -85,7 +85,7 @@ class BulkByScrollParallelizationHelper {
|
||||
private static <Request extends AbstractBulkByScrollRequest<Request>> void sliceConditionally(
|
||||
Request request,
|
||||
BulkByScrollTask task,
|
||||
Action<Request, BulkByScrollResponse> action,
|
||||
Action<BulkByScrollResponse> action,
|
||||
ActionListener<BulkByScrollResponse> listener,
|
||||
Client client,
|
||||
DiscoveryNode node,
|
||||
@ -118,7 +118,7 @@ class BulkByScrollParallelizationHelper {
|
||||
|
||||
private static <Request extends AbstractBulkByScrollRequest<Request>> void sendSubRequests(
|
||||
Client client,
|
||||
Action<Request, BulkByScrollResponse> action,
|
||||
Action<BulkByScrollResponse> action,
|
||||
String localNodeId,
|
||||
BulkByScrollTask task,
|
||||
Request request,
|
||||
|
@ -57,7 +57,7 @@ import static org.elasticsearch.rest.RestRequest.Method.POST;
|
||||
*/
|
||||
public class RestReindexAction extends AbstractBaseReindexRestHandler<ReindexRequest, ReindexAction> {
|
||||
static final ObjectParser<ReindexRequest, Void> PARSER = new ObjectParser<>("reindex");
|
||||
private static final Pattern HOST_PATTERN = Pattern.compile("(?<scheme>[^:]+)://(?<host>[^:]+):(?<port>\\d+)");
|
||||
private static final Pattern HOST_PATTERN = Pattern.compile("(?<scheme>[^:]+)://(?<host>[^:]+):(?<port>\\d+)(?<pathPrefix>/.*)?");
|
||||
|
||||
static {
|
||||
ObjectParser.Parser<ReindexRequest, Void> sourceParser = (parser, request, context) -> {
|
||||
@ -139,10 +139,12 @@ public class RestReindexAction extends AbstractBaseReindexRestHandler<ReindexReq
|
||||
String hostInRequest = requireNonNull(extractString(remote, "host"), "[host] must be specified to reindex from a remote cluster");
|
||||
Matcher hostMatcher = HOST_PATTERN.matcher(hostInRequest);
|
||||
if (false == hostMatcher.matches()) {
|
||||
throw new IllegalArgumentException("[host] must be of the form [scheme]://[host]:[port] but was [" + hostInRequest + "]");
|
||||
throw new IllegalArgumentException("[host] must be of the form [scheme]://[host]:[port](/[pathPrefix])? but was ["
|
||||
+ hostInRequest + "]");
|
||||
}
|
||||
String scheme = hostMatcher.group("scheme");
|
||||
String host = hostMatcher.group("host");
|
||||
String pathPrefix = hostMatcher.group("pathPrefix");
|
||||
int port = Integer.parseInt(hostMatcher.group("port"));
|
||||
Map<String, String> headers = extractStringStringMap(remote, "headers");
|
||||
TimeValue socketTimeout = extractTimeValue(remote, "socket_timeout", RemoteInfo.DEFAULT_SOCKET_TIMEOUT);
|
||||
@ -151,7 +153,8 @@ public class RestReindexAction extends AbstractBaseReindexRestHandler<ReindexReq
|
||||
throw new IllegalArgumentException(
|
||||
"Unsupported fields in [remote]: [" + Strings.collectionToCommaDelimitedString(remote.keySet()) + "]");
|
||||
}
|
||||
return new RemoteInfo(scheme, host, port, queryForRemote(source), username, password, headers, socketTimeout, connectTimeout);
|
||||
return new RemoteInfo(scheme, host, port, pathPrefix, queryForRemote(source),
|
||||
username, password, headers, socketTimeout, connectTimeout);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -22,7 +22,7 @@ package org.elasticsearch.index.reindex;
|
||||
import org.elasticsearch.action.Action;
|
||||
import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse;
|
||||
|
||||
public class RethrottleAction extends Action<RethrottleRequest, ListTasksResponse> {
|
||||
public class RethrottleAction extends Action<ListTasksResponse> {
|
||||
public static final RethrottleAction INSTANCE = new RethrottleAction();
|
||||
public static final String NAME = "cluster:admin/reindex/rethrottle";
|
||||
|
||||
|
@ -29,7 +29,7 @@ import org.elasticsearch.client.ElasticsearchClient;
|
||||
*/
|
||||
public class RethrottleRequestBuilder extends TasksRequestBuilder<RethrottleRequest, ListTasksResponse, RethrottleRequestBuilder> {
|
||||
public RethrottleRequestBuilder(ElasticsearchClient client,
|
||||
Action<RethrottleRequest, ListTasksResponse> action) {
|
||||
Action<ListTasksResponse> action) {
|
||||
super(client, action, new RethrottleRequest());
|
||||
}
|
||||
|
||||
|
@ -37,6 +37,7 @@ import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.bulk.BackoffPolicy;
|
||||
import org.elasticsearch.action.bulk.BulkItemResponse.Failure;
|
||||
import org.elasticsearch.client.RestClientBuilder;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.xcontent.DeprecationHandler;
|
||||
import org.elasticsearch.index.reindex.ScrollableHitSource.SearchFailure;
|
||||
@ -206,34 +207,39 @@ public class TransportReindexAction extends HandledTransportAction<ReindexReques
|
||||
for (Map.Entry<String, String> header : remoteInfo.getHeaders().entrySet()) {
|
||||
clientHeaders[i++] = new BasicHeader(header.getKey(), header.getValue());
|
||||
}
|
||||
return RestClient.builder(new HttpHost(remoteInfo.getHost(), remoteInfo.getPort(), remoteInfo.getScheme()))
|
||||
.setDefaultHeaders(clientHeaders)
|
||||
.setRequestConfigCallback(c -> {
|
||||
c.setConnectTimeout(Math.toIntExact(remoteInfo.getConnectTimeout().millis()));
|
||||
c.setSocketTimeout(Math.toIntExact(remoteInfo.getSocketTimeout().millis()));
|
||||
return c;
|
||||
})
|
||||
.setHttpClientConfigCallback(c -> {
|
||||
// Enable basic auth if it is configured
|
||||
if (remoteInfo.getUsername() != null) {
|
||||
UsernamePasswordCredentials creds = new UsernamePasswordCredentials(remoteInfo.getUsername(),
|
||||
remoteInfo.getPassword());
|
||||
CredentialsProvider credentialsProvider = new BasicCredentialsProvider();
|
||||
credentialsProvider.setCredentials(AuthScope.ANY, creds);
|
||||
c.setDefaultCredentialsProvider(credentialsProvider);
|
||||
}
|
||||
// Stick the task id in the thread name so we can track down tasks from stack traces
|
||||
AtomicInteger threads = new AtomicInteger();
|
||||
c.setThreadFactory(r -> {
|
||||
String name = "es-client-" + taskId + "-" + threads.getAndIncrement();
|
||||
Thread t = new Thread(r, name);
|
||||
threadCollector.add(t);
|
||||
return t;
|
||||
});
|
||||
// Limit ourselves to one reactor thread because for now the search process is single threaded.
|
||||
c.setDefaultIOReactorConfig(IOReactorConfig.custom().setIoThreadCount(1).build());
|
||||
return c;
|
||||
}).build();
|
||||
final RestClientBuilder builder =
|
||||
RestClient.builder(new HttpHost(remoteInfo.getHost(), remoteInfo.getPort(), remoteInfo.getScheme()))
|
||||
.setDefaultHeaders(clientHeaders)
|
||||
.setRequestConfigCallback(c -> {
|
||||
c.setConnectTimeout(Math.toIntExact(remoteInfo.getConnectTimeout().millis()));
|
||||
c.setSocketTimeout(Math.toIntExact(remoteInfo.getSocketTimeout().millis()));
|
||||
return c;
|
||||
})
|
||||
.setHttpClientConfigCallback(c -> {
|
||||
// Enable basic auth if it is configured
|
||||
if (remoteInfo.getUsername() != null) {
|
||||
UsernamePasswordCredentials creds = new UsernamePasswordCredentials(remoteInfo.getUsername(),
|
||||
remoteInfo.getPassword());
|
||||
CredentialsProvider credentialsProvider = new BasicCredentialsProvider();
|
||||
credentialsProvider.setCredentials(AuthScope.ANY, creds);
|
||||
c.setDefaultCredentialsProvider(credentialsProvider);
|
||||
}
|
||||
// Stick the task id in the thread name so we can track down tasks from stack traces
|
||||
AtomicInteger threads = new AtomicInteger();
|
||||
c.setThreadFactory(r -> {
|
||||
String name = "es-client-" + taskId + "-" + threads.getAndIncrement();
|
||||
Thread t = new Thread(r, name);
|
||||
threadCollector.add(t);
|
||||
return t;
|
||||
});
|
||||
// Limit ourselves to one reactor thread because for now the search process is single threaded.
|
||||
c.setDefaultIOReactorConfig(IOReactorConfig.custom().setIoThreadCount(1).build());
|
||||
return c;
|
||||
});
|
||||
if (Strings.hasLength(remoteInfo.getPathPrefix()) && "/".equals(remoteInfo.getPathPrefix()) == false) {
|
||||
builder.setPathPrefix(remoteInfo.getPathPrefix());
|
||||
}
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -745,7 +745,7 @@ public class AsyncBulkByScrollActionTests extends ESTestCase {
|
||||
@SuppressWarnings("unchecked")
|
||||
protected <Request extends ActionRequest, Response extends ActionResponse,
|
||||
RequestBuilder extends ActionRequestBuilder<Request, Response>> void doExecute(
|
||||
Action<Request, Response> action, Request request, ActionListener<Response> listener) {
|
||||
Action<Response> action, Request request, ActionListener<Response> listener) {
|
||||
if (false == expectedHeaders.equals(threadPool().getThreadContext().getHeaders())) {
|
||||
listener.onFailure(
|
||||
new RuntimeException("Expected " + expectedHeaders + " but got " + threadPool().getThreadContext().getHeaders()));
|
||||
|
@ -34,20 +34,22 @@ import static org.hamcrest.Matchers.hasSize;
|
||||
|
||||
public class ReindexFromRemoteBuildRestClientTests extends RestClientBuilderTestCase {
|
||||
public void testBuildRestClient() throws Exception {
|
||||
RemoteInfo remoteInfo = new RemoteInfo("https", "localhost", 9200, new BytesArray("ignored"), null, null, emptyMap(),
|
||||
for(final String path: new String[]{"", null, "/", "path"}) {
|
||||
RemoteInfo remoteInfo = new RemoteInfo("https", "localhost", 9200, path, new BytesArray("ignored"), null, null, emptyMap(),
|
||||
RemoteInfo.DEFAULT_SOCKET_TIMEOUT, RemoteInfo.DEFAULT_CONNECT_TIMEOUT);
|
||||
long taskId = randomLong();
|
||||
List<Thread> threads = synchronizedList(new ArrayList<>());
|
||||
RestClient client = TransportReindexAction.buildRestClient(remoteInfo, taskId, threads);
|
||||
try {
|
||||
assertBusy(() -> assertThat(threads, hasSize(2)));
|
||||
int i = 0;
|
||||
for (Thread thread : threads) {
|
||||
assertEquals("es-client-" + taskId + "-" + i, thread.getName());
|
||||
i++;
|
||||
long taskId = randomLong();
|
||||
List<Thread> threads = synchronizedList(new ArrayList<>());
|
||||
RestClient client = TransportReindexAction.buildRestClient(remoteInfo, taskId, threads);
|
||||
try {
|
||||
assertBusy(() -> assertThat(threads, hasSize(2)));
|
||||
int i = 0;
|
||||
for (Thread thread : threads) {
|
||||
assertEquals("es-client-" + taskId + "-" + i, thread.getName());
|
||||
i++;
|
||||
}
|
||||
} finally {
|
||||
client.close();
|
||||
}
|
||||
} finally {
|
||||
client.close();
|
||||
}
|
||||
}
|
||||
|
||||
@ -57,7 +59,7 @@ public class ReindexFromRemoteBuildRestClientTests extends RestClientBuilderTest
|
||||
for (int i = 0; i < numHeaders; ++i) {
|
||||
headers.put("header" + i, Integer.toString(i));
|
||||
}
|
||||
RemoteInfo remoteInfo = new RemoteInfo("https", "localhost", 9200, new BytesArray("ignored"), null, null,
|
||||
RemoteInfo remoteInfo = new RemoteInfo("https", "localhost", 9200, null, new BytesArray("ignored"), null, null,
|
||||
headers, RemoteInfo.DEFAULT_SOCKET_TIMEOUT, RemoteInfo.DEFAULT_CONNECT_TIMEOUT);
|
||||
long taskId = randomLong();
|
||||
List<Thread> threads = synchronizedList(new ArrayList<>());
|
||||
|
@ -49,7 +49,7 @@ public class ReindexFromRemoteWhitelistTests extends ESTestCase {
|
||||
* Build a {@link RemoteInfo}, defaulting values that we don't care about in this test to values that don't hurt anything.
|
||||
*/
|
||||
private RemoteInfo newRemoteInfo(String host, int port) {
|
||||
return new RemoteInfo(randomAlphaOfLength(5), host, port, new BytesArray("test"), null, null, emptyMap(),
|
||||
return new RemoteInfo(randomAlphaOfLength(5), host, port, null, new BytesArray("test"), null, null, emptyMap(),
|
||||
RemoteInfo.DEFAULT_SOCKET_TIMEOUT, RemoteInfo.DEFAULT_CONNECT_TIMEOUT);
|
||||
}
|
||||
|
||||
@ -63,7 +63,7 @@ public class ReindexFromRemoteWhitelistTests extends ESTestCase {
|
||||
|
||||
public void testWhitelistedByPrefix() {
|
||||
checkRemoteWhitelist(buildRemoteWhitelist(singletonList("*.example.com:9200")),
|
||||
new RemoteInfo(randomAlphaOfLength(5), "es.example.com", 9200, new BytesArray("test"), null, null, emptyMap(),
|
||||
new RemoteInfo(randomAlphaOfLength(5), "es.example.com", 9200, null, new BytesArray("test"), null, null, emptyMap(),
|
||||
RemoteInfo.DEFAULT_SOCKET_TIMEOUT, RemoteInfo.DEFAULT_CONNECT_TIMEOUT));
|
||||
checkRemoteWhitelist(buildRemoteWhitelist(singletonList("*.example.com:9200")),
|
||||
newRemoteInfo("6e134134a1.us-east-1.aws.example.com", 9200));
|
||||
|
@ -104,8 +104,9 @@ public class ReindexFromRemoteWithAuthTests extends ESSingleNodeTestCase {
|
||||
* Build a {@link RemoteInfo}, defaulting values that we don't care about in this test to values that don't hurt anything.
|
||||
*/
|
||||
private RemoteInfo newRemoteInfo(String username, String password, Map<String, String> headers) {
|
||||
return new RemoteInfo("http", address.getAddress(), address.getPort(), new BytesArray("{\"match_all\":{}}"), username, password,
|
||||
headers, RemoteInfo.DEFAULT_SOCKET_TIMEOUT, RemoteInfo.DEFAULT_CONNECT_TIMEOUT);
|
||||
return new RemoteInfo("http", address.getAddress(), address.getPort(), null,
|
||||
new BytesArray("{\"match_all\":{}}"), username, password, headers,
|
||||
RemoteInfo.DEFAULT_SOCKET_TIMEOUT, RemoteInfo.DEFAULT_CONNECT_TIMEOUT);
|
||||
}
|
||||
|
||||
public void testReindexFromRemoteWithAuthentication() throws Exception {
|
||||
|
@ -88,10 +88,10 @@ public class ReindexSourceTargetValidationTests extends ESTestCase {
|
||||
|
||||
public void testRemoteInfoSkipsValidation() {
|
||||
// The index doesn't have to exist
|
||||
succeeds(new RemoteInfo(randomAlphaOfLength(5), "test", 9200, new BytesArray("test"), null, null, emptyMap(),
|
||||
succeeds(new RemoteInfo(randomAlphaOfLength(5), "test", 9200, null, new BytesArray("test"), null, null, emptyMap(),
|
||||
RemoteInfo.DEFAULT_SOCKET_TIMEOUT, RemoteInfo.DEFAULT_CONNECT_TIMEOUT), "does_not_exist", "target");
|
||||
// And it doesn't matter if they are the same index. They are considered to be different because the remote one is, well, remote.
|
||||
succeeds(new RemoteInfo(randomAlphaOfLength(5), "test", 9200, new BytesArray("test"), null, null, emptyMap(),
|
||||
succeeds(new RemoteInfo(randomAlphaOfLength(5), "test", 9200, null, new BytesArray("test"), null, null, emptyMap(),
|
||||
RemoteInfo.DEFAULT_SOCKET_TIMEOUT, RemoteInfo.DEFAULT_CONNECT_TIMEOUT), "target", "target");
|
||||
}
|
||||
|
||||
|
@ -89,6 +89,7 @@ public class RestReindexActionTests extends ESTestCase {
|
||||
assertEquals("http", info.getScheme());
|
||||
assertEquals("example.com", info.getHost());
|
||||
assertEquals(9200, info.getPort());
|
||||
assertNull(info.getPathPrefix());
|
||||
assertEquals(RemoteInfo.DEFAULT_SOCKET_TIMEOUT, info.getSocketTimeout()); // Didn't set the timeout so we should get the default
|
||||
assertEquals(RemoteInfo.DEFAULT_CONNECT_TIMEOUT, info.getConnectTimeout()); // Didn't set the timeout so we should get the default
|
||||
|
||||
@ -96,8 +97,30 @@ public class RestReindexActionTests extends ESTestCase {
|
||||
assertEquals("https", info.getScheme());
|
||||
assertEquals("other.example.com", info.getHost());
|
||||
assertEquals(9201, info.getPort());
|
||||
assertNull(info.getPathPrefix());
|
||||
assertEquals(RemoteInfo.DEFAULT_SOCKET_TIMEOUT, info.getSocketTimeout());
|
||||
assertEquals(RemoteInfo.DEFAULT_CONNECT_TIMEOUT, info.getConnectTimeout());
|
||||
|
||||
info = buildRemoteInfoHostTestCase("https://other.example.com:9201/");
|
||||
assertEquals("https", info.getScheme());
|
||||
assertEquals("other.example.com", info.getHost());
|
||||
assertEquals(9201, info.getPort());
|
||||
assertEquals("/", info.getPathPrefix());
|
||||
assertEquals(RemoteInfo.DEFAULT_SOCKET_TIMEOUT, info.getSocketTimeout());
|
||||
assertEquals(RemoteInfo.DEFAULT_CONNECT_TIMEOUT, info.getConnectTimeout());
|
||||
|
||||
info = buildRemoteInfoHostTestCase("https://other.example.com:9201/proxy-path/");
|
||||
assertEquals("https", info.getScheme());
|
||||
assertEquals("other.example.com", info.getHost());
|
||||
assertEquals(9201, info.getPort());
|
||||
assertEquals("/proxy-path/", info.getPathPrefix());
|
||||
assertEquals(RemoteInfo.DEFAULT_SOCKET_TIMEOUT, info.getSocketTimeout());
|
||||
assertEquals(RemoteInfo.DEFAULT_CONNECT_TIMEOUT, info.getConnectTimeout());
|
||||
|
||||
final IllegalArgumentException exception = expectThrows(IllegalArgumentException.class,
|
||||
() -> buildRemoteInfoHostTestCase("https"));
|
||||
assertEquals("[host] must be of the form [scheme]://[host]:[port](/[pathPrefix])? but was [https]",
|
||||
exception.getMessage());
|
||||
}
|
||||
|
||||
public void testReindexFromRemoteRequestParsing() throws IOException {
|
||||
|
@ -124,8 +124,10 @@ public class RetryTests extends ESIntegTestCase {
|
||||
assertNotNull(masterNode);
|
||||
|
||||
TransportAddress address = masterNode.getHttp().getAddress().publishAddress();
|
||||
RemoteInfo remote = new RemoteInfo("http", address.getAddress(), address.getPort(), new BytesArray("{\"match_all\":{}}"), null,
|
||||
null, emptyMap(), RemoteInfo.DEFAULT_SOCKET_TIMEOUT, RemoteInfo.DEFAULT_CONNECT_TIMEOUT);
|
||||
RemoteInfo remote =
|
||||
new RemoteInfo("http", address.getAddress(), address.getPort(), null,
|
||||
new BytesArray("{\"match_all\":{}}"), null, null, emptyMap(),
|
||||
RemoteInfo.DEFAULT_SOCKET_TIMEOUT, RemoteInfo.DEFAULT_CONNECT_TIMEOUT);
|
||||
ReindexRequestBuilder request = new ReindexRequestBuilder(client, ReindexAction.INSTANCE).source("source").destination("dest")
|
||||
.setRemoteInfo(remote);
|
||||
return request;
|
||||
|
@ -63,8 +63,9 @@ public class RoundTripTests extends ESTestCase {
|
||||
}
|
||||
TimeValue socketTimeout = parseTimeValue(randomPositiveTimeValue(), "socketTimeout");
|
||||
TimeValue connectTimeout = parseTimeValue(randomPositiveTimeValue(), "connectTimeout");
|
||||
reindex.setRemoteInfo(new RemoteInfo(randomAlphaOfLength(5), randomAlphaOfLength(5), port, query, username, password, headers,
|
||||
socketTimeout, connectTimeout));
|
||||
reindex.setRemoteInfo(
|
||||
new RemoteInfo(randomAlphaOfLength(5), randomAlphaOfLength(5), port, null,
|
||||
query, username, password, headers, socketTimeout, connectTimeout));
|
||||
}
|
||||
ReindexRequest tripped = new ReindexRequest();
|
||||
roundTrip(reindex, tripped);
|
||||
|
@ -26,17 +26,21 @@ import org.elasticsearch.test.ESTestCase;
|
||||
import static java.util.Collections.emptyMap;
|
||||
|
||||
public class RemoteInfoTests extends ESTestCase {
|
||||
private RemoteInfo newRemoteInfo(String scheme, String username, String password) {
|
||||
return new RemoteInfo(scheme, "testhost", 12344, new BytesArray("testquery"), username, password, emptyMap(),
|
||||
private RemoteInfo newRemoteInfo(String scheme, String prefixPath, String username, String password) {
|
||||
return new RemoteInfo(scheme, "testhost", 12344, prefixPath, new BytesArray("testquery"), username, password, emptyMap(),
|
||||
RemoteInfo.DEFAULT_SOCKET_TIMEOUT, RemoteInfo.DEFAULT_CONNECT_TIMEOUT);
|
||||
}
|
||||
|
||||
public void testToString() {
|
||||
assertEquals("host=testhost port=12344 query=testquery", newRemoteInfo("http", null, null).toString());
|
||||
assertEquals("host=testhost port=12344 query=testquery username=testuser", newRemoteInfo("http", "testuser", null).toString());
|
||||
assertEquals("host=testhost port=12344 query=testquery",
|
||||
newRemoteInfo("http", null, null, null).toString());
|
||||
assertEquals("host=testhost port=12344 query=testquery username=testuser",
|
||||
newRemoteInfo("http", null, "testuser", null).toString());
|
||||
assertEquals("host=testhost port=12344 query=testquery username=testuser password=<<>>",
|
||||
newRemoteInfo("http", "testuser", "testpass").toString());
|
||||
newRemoteInfo("http", null, "testuser", "testpass").toString());
|
||||
assertEquals("scheme=https host=testhost port=12344 query=testquery username=testuser password=<<>>",
|
||||
newRemoteInfo("https", "testuser", "testpass").toString());
|
||||
newRemoteInfo("https", null, "testuser", "testpass").toString());
|
||||
assertEquals("scheme=https host=testhost port=12344 pathPrefix=prxy query=testquery username=testuser password=<<>>",
|
||||
newRemoteInfo("https", "prxy", "testuser", "testpass").toString());
|
||||
}
|
||||
}
|
||||
|
@ -87,8 +87,8 @@ public class Netty4Plugin extends Plugin implements NetworkPlugin {
|
||||
|
||||
@Override
|
||||
public Map<String, Supplier<HttpServerTransport>> getHttpTransports(Settings settings, ThreadPool threadPool, BigArrays bigArrays,
|
||||
PageCacheRecycler pageCacheRecycler,
|
||||
CircuitBreakerService circuitBreakerService,
|
||||
NamedWriteableRegistry namedWriteableRegistry,
|
||||
NamedXContentRegistry xContentRegistry,
|
||||
NetworkService networkService,
|
||||
HttpServerTransport.Dispatcher dispatcher) {
|
||||
|
@ -0,0 +1,61 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.discovery.ec2;
|
||||
|
||||
import com.amazonaws.services.ec2.AmazonEC2;
|
||||
|
||||
import org.elasticsearch.common.lease.Releasable;
|
||||
import org.elasticsearch.common.util.concurrent.AbstractRefCounted;
|
||||
|
||||
/**
|
||||
* Handles the shutdown of the wrapped {@link AmazonEC2} using reference
|
||||
* counting.
|
||||
*/
|
||||
public class AmazonEc2Reference extends AbstractRefCounted implements Releasable {
|
||||
|
||||
private final AmazonEC2 client;
|
||||
|
||||
AmazonEc2Reference(AmazonEC2 client) {
|
||||
super("AWS_EC2_CLIENT");
|
||||
this.client = client;
|
||||
}
|
||||
|
||||
/**
|
||||
* Call when the client is not needed anymore.
|
||||
*/
|
||||
@Override
|
||||
public void close() {
|
||||
decRef();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the underlying `AmazonEC2` client. All method calls are permitted BUT
|
||||
* NOT shutdown. Shutdown is called when reference count reaches 0.
|
||||
*/
|
||||
public AmazonEC2 client() {
|
||||
return client;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void closeInternal() {
|
||||
client.shutdown();
|
||||
}
|
||||
|
||||
}
|
@ -19,22 +19,17 @@
|
||||
|
||||
package org.elasticsearch.discovery.ec2;
|
||||
|
||||
import com.amazonaws.ClientConfiguration;
|
||||
import com.amazonaws.Protocol;
|
||||
import com.amazonaws.services.ec2.AmazonEC2;
|
||||
import org.elasticsearch.common.settings.SecureSetting;
|
||||
import org.elasticsearch.common.settings.SecureString;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Setting.Property;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.function.Function;
|
||||
|
||||
interface AwsEc2Service {
|
||||
interface AwsEc2Service extends Closeable {
|
||||
Setting<Boolean> AUTO_ATTRIBUTE_SETTING = Setting.boolSetting("cloud.node.auto_attributes", false, Property.NodeScope);
|
||||
|
||||
class HostType {
|
||||
@ -45,36 +40,6 @@ interface AwsEc2Service {
|
||||
public static final String TAG_PREFIX = "tag:";
|
||||
}
|
||||
|
||||
/** The access key (ie login id) for connecting to ec2. */
|
||||
Setting<SecureString> ACCESS_KEY_SETTING = SecureSetting.secureString("discovery.ec2.access_key", null);
|
||||
|
||||
/** The secret key (ie password) for connecting to ec2. */
|
||||
Setting<SecureString> SECRET_KEY_SETTING = SecureSetting.secureString("discovery.ec2.secret_key", null);
|
||||
|
||||
/** An override for the ec2 endpoint to connect to. */
|
||||
Setting<String> ENDPOINT_SETTING = new Setting<>("discovery.ec2.endpoint", "",
|
||||
s -> s.toLowerCase(Locale.ROOT), Property.NodeScope);
|
||||
|
||||
/** The protocol to use to connect to to ec2. */
|
||||
Setting<Protocol> PROTOCOL_SETTING = new Setting<>("discovery.ec2.protocol", "https",
|
||||
s -> Protocol.valueOf(s.toUpperCase(Locale.ROOT)), Property.NodeScope);
|
||||
|
||||
/** The host name of a proxy to connect to ec2 through. */
|
||||
Setting<String> PROXY_HOST_SETTING = Setting.simpleString("discovery.ec2.proxy.host", Property.NodeScope);
|
||||
|
||||
/** The port of a proxy to connect to ec2 through. */
|
||||
Setting<Integer> PROXY_PORT_SETTING = Setting.intSetting("discovery.ec2.proxy.port", 80, 0, 1<<16, Property.NodeScope);
|
||||
|
||||
/** The username of a proxy to connect to s3 through. */
|
||||
Setting<SecureString> PROXY_USERNAME_SETTING = SecureSetting.secureString("discovery.ec2.proxy.username", null);
|
||||
|
||||
/** The password of a proxy to connect to s3 through. */
|
||||
Setting<SecureString> PROXY_PASSWORD_SETTING = SecureSetting.secureString("discovery.ec2.proxy.password", null);
|
||||
|
||||
/** The socket timeout for connecting to s3. */
|
||||
Setting<TimeValue> READ_TIMEOUT_SETTING = Setting.timeSetting("discovery.ec2.read_timeout",
|
||||
TimeValue.timeValueMillis(ClientConfiguration.DEFAULT_SOCKET_TIMEOUT), Property.NodeScope);
|
||||
|
||||
/**
|
||||
* discovery.ec2.host_type: The type of host type to use to communicate with other instances.
|
||||
* Can be one of private_ip, public_ip, private_dns, public_dns or tag:XXXX where
|
||||
@ -87,26 +52,24 @@ interface AwsEc2Service {
|
||||
* discovery.ec2.any_group: If set to false, will require all security groups to be present for the instance to be used for the
|
||||
* discovery. Defaults to true.
|
||||
*/
|
||||
Setting<Boolean> ANY_GROUP_SETTING =
|
||||
Setting.boolSetting("discovery.ec2.any_group", true, Property.NodeScope);
|
||||
Setting<Boolean> ANY_GROUP_SETTING = Setting.boolSetting("discovery.ec2.any_group", true, Property.NodeScope);
|
||||
/**
|
||||
* discovery.ec2.groups: Either a comma separated list or array based list of (security) groups. Only instances with the provided
|
||||
* security groups will be used in the cluster discovery. (NOTE: You could provide either group NAME or group ID.)
|
||||
*/
|
||||
Setting<List<String>> GROUPS_SETTING =
|
||||
Setting.listSetting("discovery.ec2.groups", new ArrayList<>(), s -> s.toString(), Property.NodeScope);
|
||||
Setting<List<String>> GROUPS_SETTING = Setting.listSetting("discovery.ec2.groups", new ArrayList<>(), s -> s.toString(),
|
||||
Property.NodeScope);
|
||||
/**
|
||||
* discovery.ec2.availability_zones: Either a comma separated list or array based list of availability zones. Only instances within
|
||||
* the provided availability zones will be used in the cluster discovery.
|
||||
*/
|
||||
Setting<List<String>> AVAILABILITY_ZONES_SETTING =
|
||||
Setting.listSetting("discovery.ec2.availability_zones", Collections.emptyList(), s -> s.toString(),
|
||||
Property.NodeScope);
|
||||
Setting<List<String>> AVAILABILITY_ZONES_SETTING = Setting.listSetting("discovery.ec2.availability_zones", Collections.emptyList(),
|
||||
s -> s.toString(), Property.NodeScope);
|
||||
/**
|
||||
* discovery.ec2.node_cache_time: How long the list of hosts is cached to prevent further requests to the AWS API. Defaults to 10s.
|
||||
*/
|
||||
Setting<TimeValue> NODE_CACHE_TIME_SETTING =
|
||||
Setting.timeSetting("discovery.ec2.node_cache_time", TimeValue.timeValueSeconds(10), Property.NodeScope);
|
||||
Setting<TimeValue> NODE_CACHE_TIME_SETTING = Setting.timeSetting("discovery.ec2.node_cache_time", TimeValue.timeValueSeconds(10),
|
||||
Property.NodeScope);
|
||||
|
||||
/**
|
||||
* discovery.ec2.tag.*: The ec2 discovery can filter machines to include in the cluster based on tags (and not just groups).
|
||||
@ -115,7 +78,22 @@ interface AwsEc2Service {
|
||||
* instance to be included.
|
||||
*/
|
||||
Setting.AffixSetting<List<String>> TAG_SETTING = Setting.prefixKeySetting("discovery.ec2.tag.",
|
||||
key -> Setting.listSetting(key, Collections.emptyList(), Function.identity(), Property.NodeScope));
|
||||
key -> Setting.listSetting(key, Collections.emptyList(), Function.identity(), Property.NodeScope));
|
||||
|
||||
/**
|
||||
* Builds then caches an {@code AmazonEC2} client using the current client
|
||||
* settings. Returns an {@code AmazonEc2Reference} wrapper which should be
|
||||
* released as soon as it is not required anymore.
|
||||
*/
|
||||
AmazonEc2Reference client();
|
||||
|
||||
/**
|
||||
* Updates the settings for building the client and releases the cached one.
|
||||
* Future client requests will use the new settings to lazily built the new
|
||||
* client.
|
||||
*
|
||||
* @param clientSettings the new refreshed settings
|
||||
*/
|
||||
void refreshAndClearCache(Ec2ClientSettings clientSettings);
|
||||
|
||||
AmazonEC2 client();
|
||||
}
|
||||
|
@ -19,12 +19,9 @@
|
||||
|
||||
package org.elasticsearch.discovery.ec2;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.io.IOException;
|
||||
import java.util.Random;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
|
||||
import com.amazonaws.AmazonClientException;
|
||||
import com.amazonaws.AmazonWebServiceRequest;
|
||||
import com.amazonaws.ClientConfiguration;
|
||||
import com.amazonaws.auth.AWSCredentialsProvider;
|
||||
import com.amazonaws.auth.BasicAWSCredentials;
|
||||
@ -35,112 +32,117 @@ import com.amazonaws.retry.RetryPolicy;
|
||||
import com.amazonaws.services.ec2.AmazonEC2;
|
||||
import com.amazonaws.services.ec2.AmazonEC2Client;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.ElasticsearchException;
|
||||
import org.elasticsearch.common.Randomness;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.settings.SecureString;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.util.LazyInitializable;
|
||||
|
||||
class AwsEc2ServiceImpl extends AbstractComponent implements AwsEc2Service, Closeable {
|
||||
class AwsEc2ServiceImpl extends AbstractComponent implements AwsEc2Service {
|
||||
|
||||
public static final String EC2_METADATA_URL = "http://169.254.169.254/latest/meta-data/";
|
||||
|
||||
private AmazonEC2Client client;
|
||||
private final AtomicReference<LazyInitializable<AmazonEc2Reference, ElasticsearchException>> lazyClientReference =
|
||||
new AtomicReference<>();
|
||||
|
||||
AwsEc2ServiceImpl(Settings settings) {
|
||||
super(settings);
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized AmazonEC2 client() {
|
||||
if (client != null) {
|
||||
return client;
|
||||
private AmazonEC2 buildClient(Ec2ClientSettings clientSettings) {
|
||||
final AWSCredentialsProvider credentials = buildCredentials(logger, clientSettings);
|
||||
final ClientConfiguration configuration = buildConfiguration(logger, clientSettings);
|
||||
final AmazonEC2 client = buildClient(credentials, configuration);
|
||||
if (Strings.hasText(clientSettings.endpoint)) {
|
||||
logger.debug("using explicit ec2 endpoint [{}]", clientSettings.endpoint);
|
||||
client.setEndpoint(clientSettings.endpoint);
|
||||
}
|
||||
|
||||
this.client = new AmazonEC2Client(buildCredentials(logger, settings), buildConfiguration(logger, settings));
|
||||
String endpoint = findEndpoint(logger, settings);
|
||||
if (endpoint != null) {
|
||||
client.setEndpoint(endpoint);
|
||||
}
|
||||
|
||||
return this.client;
|
||||
return client;
|
||||
}
|
||||
|
||||
protected static AWSCredentialsProvider buildCredentials(Logger logger, Settings settings) {
|
||||
AWSCredentialsProvider credentials;
|
||||
|
||||
try (SecureString key = ACCESS_KEY_SETTING.get(settings);
|
||||
SecureString secret = SECRET_KEY_SETTING.get(settings)) {
|
||||
if (key.length() == 0 && secret.length() == 0) {
|
||||
logger.debug("Using either environment variables, system properties or instance profile credentials");
|
||||
credentials = new DefaultAWSCredentialsProviderChain();
|
||||
} else {
|
||||
logger.debug("Using basic key/secret credentials");
|
||||
credentials = new StaticCredentialsProvider(new BasicAWSCredentials(key.toString(), secret.toString()));
|
||||
}
|
||||
}
|
||||
|
||||
return credentials;
|
||||
// proxy for testing
|
||||
AmazonEC2 buildClient(AWSCredentialsProvider credentials, ClientConfiguration configuration) {
|
||||
final AmazonEC2 client = new AmazonEC2Client(credentials, configuration);
|
||||
return client;
|
||||
}
|
||||
|
||||
protected static ClientConfiguration buildConfiguration(Logger logger, Settings settings) {
|
||||
ClientConfiguration clientConfiguration = new ClientConfiguration();
|
||||
// pkg private for tests
|
||||
static ClientConfiguration buildConfiguration(Logger logger, Ec2ClientSettings clientSettings) {
|
||||
final ClientConfiguration clientConfiguration = new ClientConfiguration();
|
||||
// the response metadata cache is only there for diagnostics purposes,
|
||||
// but can force objects from every response to the old generation.
|
||||
clientConfiguration.setResponseMetadataCacheSize(0);
|
||||
clientConfiguration.setProtocol(PROTOCOL_SETTING.get(settings));
|
||||
|
||||
if (PROXY_HOST_SETTING.exists(settings)) {
|
||||
String proxyHost = PROXY_HOST_SETTING.get(settings);
|
||||
Integer proxyPort = PROXY_PORT_SETTING.get(settings);
|
||||
try (SecureString proxyUsername = PROXY_USERNAME_SETTING.get(settings);
|
||||
SecureString proxyPassword = PROXY_PASSWORD_SETTING.get(settings)) {
|
||||
|
||||
clientConfiguration
|
||||
.withProxyHost(proxyHost)
|
||||
.withProxyPort(proxyPort)
|
||||
.withProxyUsername(proxyUsername.toString())
|
||||
.withProxyPassword(proxyPassword.toString());
|
||||
}
|
||||
clientConfiguration.setProtocol(clientSettings.protocol);
|
||||
if (Strings.hasText(clientSettings.proxyHost)) {
|
||||
// TODO: remove this leniency, these settings should exist together and be validated
|
||||
clientConfiguration.setProxyHost(clientSettings.proxyHost);
|
||||
clientConfiguration.setProxyPort(clientSettings.proxyPort);
|
||||
clientConfiguration.setProxyUsername(clientSettings.proxyUsername);
|
||||
clientConfiguration.setProxyPassword(clientSettings.proxyPassword);
|
||||
}
|
||||
|
||||
// Increase the number of retries in case of 5xx API responses
|
||||
final Random rand = Randomness.get();
|
||||
RetryPolicy retryPolicy = new RetryPolicy(
|
||||
final RetryPolicy retryPolicy = new RetryPolicy(
|
||||
RetryPolicy.RetryCondition.NO_RETRY_CONDITION,
|
||||
new RetryPolicy.BackoffStrategy() {
|
||||
@Override
|
||||
public long delayBeforeNextRetry(AmazonWebServiceRequest originalRequest,
|
||||
AmazonClientException exception,
|
||||
int retriesAttempted) {
|
||||
// with 10 retries the max delay time is 320s/320000ms (10 * 2^5 * 1 * 1000)
|
||||
logger.warn("EC2 API request failed, retry again. Reason was:", exception);
|
||||
return 1000L * (long) (10d * Math.pow(2, retriesAttempted / 2.0d) * (1.0d + rand.nextDouble()));
|
||||
}
|
||||
(originalRequest, exception, retriesAttempted) -> {
|
||||
// with 10 retries the max delay time is 320s/320000ms (10 * 2^5 * 1 * 1000)
|
||||
logger.warn("EC2 API request failed, retry again. Reason was:", exception);
|
||||
return 1000L * (long) (10d * Math.pow(2, retriesAttempted / 2.0d) * (1.0d + rand.nextDouble()));
|
||||
},
|
||||
10,
|
||||
false);
|
||||
clientConfiguration.setRetryPolicy(retryPolicy);
|
||||
clientConfiguration.setSocketTimeout((int) READ_TIMEOUT_SETTING.get(settings).millis());
|
||||
|
||||
clientConfiguration.setSocketTimeout(clientSettings.readTimeoutMillis);
|
||||
return clientConfiguration;
|
||||
}
|
||||
|
||||
protected static String findEndpoint(Logger logger, Settings settings) {
|
||||
String endpoint = null;
|
||||
if (ENDPOINT_SETTING.exists(settings)) {
|
||||
endpoint = ENDPOINT_SETTING.get(settings);
|
||||
logger.debug("using explicit ec2 endpoint [{}]", endpoint);
|
||||
// pkg private for tests
|
||||
static AWSCredentialsProvider buildCredentials(Logger logger, Ec2ClientSettings clientSettings) {
|
||||
final BasicAWSCredentials credentials = clientSettings.credentials;
|
||||
if (credentials == null) {
|
||||
logger.debug("Using either environment variables, system properties or instance profile credentials");
|
||||
return new DefaultAWSCredentialsProviderChain();
|
||||
} else {
|
||||
logger.debug("Using basic key/secret credentials");
|
||||
return new StaticCredentialsProvider(credentials);
|
||||
}
|
||||
return endpoint;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
if (client != null) {
|
||||
client.shutdown();
|
||||
public AmazonEc2Reference client() {
|
||||
final LazyInitializable<AmazonEc2Reference, ElasticsearchException> clientReference = this.lazyClientReference.get();
|
||||
if (clientReference == null) {
|
||||
throw new IllegalStateException("Missing ec2 client configs");
|
||||
}
|
||||
return clientReference.getOrCompute();
|
||||
}
|
||||
|
||||
// Ensure that IdleConnectionReaper is shutdown
|
||||
/**
|
||||
* Refreshes the settings for the AmazonEC2 client. The new client will be build
|
||||
* using these new settings. The old client is usable until released. On release it
|
||||
* will be destroyed instead of being returned to the cache.
|
||||
*/
|
||||
@Override
|
||||
public void refreshAndClearCache(Ec2ClientSettings clientSettings) {
|
||||
final LazyInitializable<AmazonEc2Reference, ElasticsearchException> newClient = new LazyInitializable<>(
|
||||
() -> new AmazonEc2Reference(buildClient(clientSettings)), clientReference -> clientReference.incRef(),
|
||||
clientReference -> clientReference.decRef());
|
||||
final LazyInitializable<AmazonEc2Reference, ElasticsearchException> oldClient = this.lazyClientReference.getAndSet(newClient);
|
||||
if (oldClient != null) {
|
||||
oldClient.reset();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void close() {
|
||||
final LazyInitializable<AmazonEc2Reference, ElasticsearchException> clientReference = this.lazyClientReference.getAndSet(null);
|
||||
if (clientReference != null) {
|
||||
clientReference.reset();
|
||||
}
|
||||
// shutdown IdleConnectionReaper background thread
|
||||
// it will be restarted on new client usage
|
||||
IdleConnectionReaper.shutdown();
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -20,7 +20,6 @@
|
||||
package org.elasticsearch.discovery.ec2;
|
||||
|
||||
import com.amazonaws.AmazonClientException;
|
||||
import com.amazonaws.services.ec2.AmazonEC2;
|
||||
import com.amazonaws.services.ec2.model.DescribeInstancesRequest;
|
||||
import com.amazonaws.services.ec2.model.DescribeInstancesResult;
|
||||
import com.amazonaws.services.ec2.model.Filter;
|
||||
@ -59,7 +58,7 @@ class AwsEc2UnicastHostsProvider extends AbstractComponent implements UnicastHos
|
||||
|
||||
private final TransportService transportService;
|
||||
|
||||
private final AmazonEC2 client;
|
||||
private final AwsEc2Service awsEc2Service;
|
||||
|
||||
private final boolean bindAnyGroup;
|
||||
|
||||
@ -76,7 +75,7 @@ class AwsEc2UnicastHostsProvider extends AbstractComponent implements UnicastHos
|
||||
AwsEc2UnicastHostsProvider(Settings settings, TransportService transportService, AwsEc2Service awsEc2Service) {
|
||||
super(settings);
|
||||
this.transportService = transportService;
|
||||
this.client = awsEc2Service.client();
|
||||
this.awsEc2Service = awsEc2Service;
|
||||
|
||||
this.hostType = AwsEc2Service.HOST_TYPE_SETTING.get(settings);
|
||||
this.discoNodes = new DiscoNodesCache(AwsEc2Service.NODE_CACHE_TIME_SETTING.get(settings));
|
||||
@ -103,31 +102,31 @@ class AwsEc2UnicastHostsProvider extends AbstractComponent implements UnicastHos
|
||||
|
||||
protected List<DiscoveryNode> fetchDynamicNodes() {
|
||||
|
||||
List<DiscoveryNode> discoNodes = new ArrayList<>();
|
||||
final List<DiscoveryNode> discoNodes = new ArrayList<>();
|
||||
|
||||
DescribeInstancesResult descInstances;
|
||||
try {
|
||||
final DescribeInstancesResult descInstances;
|
||||
try (AmazonEc2Reference clientReference = awsEc2Service.client()) {
|
||||
// Query EC2 API based on AZ, instance state, and tag.
|
||||
|
||||
// NOTE: we don't filter by security group during the describe instances request for two reasons:
|
||||
// 1. differences in VPCs require different parameters during query (ID vs Name)
|
||||
// 2. We want to use two different strategies: (all security groups vs. any security groups)
|
||||
descInstances = SocketAccess.doPrivileged(() -> client.describeInstances(buildDescribeInstancesRequest()));
|
||||
} catch (AmazonClientException e) {
|
||||
descInstances = SocketAccess.doPrivileged(() -> clientReference.client().describeInstances(buildDescribeInstancesRequest()));
|
||||
} catch (final AmazonClientException e) {
|
||||
logger.info("Exception while retrieving instance list from AWS API: {}", e.getMessage());
|
||||
logger.debug("Full exception:", e);
|
||||
return discoNodes;
|
||||
}
|
||||
|
||||
logger.trace("building dynamic unicast discovery nodes...");
|
||||
for (Reservation reservation : descInstances.getReservations()) {
|
||||
for (Instance instance : reservation.getInstances()) {
|
||||
for (final Reservation reservation : descInstances.getReservations()) {
|
||||
for (final Instance instance : reservation.getInstances()) {
|
||||
// lets see if we can filter based on groups
|
||||
if (!groups.isEmpty()) {
|
||||
List<GroupIdentifier> instanceSecurityGroups = instance.getSecurityGroups();
|
||||
List<String> securityGroupNames = new ArrayList<>(instanceSecurityGroups.size());
|
||||
List<String> securityGroupIds = new ArrayList<>(instanceSecurityGroups.size());
|
||||
for (GroupIdentifier sg : instanceSecurityGroups) {
|
||||
final List<GroupIdentifier> instanceSecurityGroups = instance.getSecurityGroups();
|
||||
final List<String> securityGroupNames = new ArrayList<>(instanceSecurityGroups.size());
|
||||
final List<String> securityGroupIds = new ArrayList<>(instanceSecurityGroups.size());
|
||||
for (final GroupIdentifier sg : instanceSecurityGroups) {
|
||||
securityGroupNames.add(sg.getGroupName());
|
||||
securityGroupIds.add(sg.getGroupId());
|
||||
}
|
||||
@ -162,10 +161,10 @@ class AwsEc2UnicastHostsProvider extends AbstractComponent implements UnicastHos
|
||||
address = instance.getPublicIpAddress();
|
||||
} else if (hostType.startsWith(TAG_PREFIX)) {
|
||||
// Reading the node host from its metadata
|
||||
String tagName = hostType.substring(TAG_PREFIX.length());
|
||||
final String tagName = hostType.substring(TAG_PREFIX.length());
|
||||
logger.debug("reading hostname from [{}] instance tag", tagName);
|
||||
List<Tag> tags = instance.getTags();
|
||||
for (Tag tag : tags) {
|
||||
final List<Tag> tags = instance.getTags();
|
||||
for (final Tag tag : tags) {
|
||||
if (tag.getKey().equals(tagName)) {
|
||||
address = tag.getValue();
|
||||
logger.debug("using [{}] as the instance address", address);
|
||||
@ -177,13 +176,13 @@ class AwsEc2UnicastHostsProvider extends AbstractComponent implements UnicastHos
|
||||
if (address != null) {
|
||||
try {
|
||||
// we only limit to 1 port per address, makes no sense to ping 100 ports
|
||||
TransportAddress[] addresses = transportService.addressesFromString(address, 1);
|
||||
final TransportAddress[] addresses = transportService.addressesFromString(address, 1);
|
||||
for (int i = 0; i < addresses.length; i++) {
|
||||
logger.trace("adding {}, address {}, transport_address {}", instance.getInstanceId(), address, addresses[i]);
|
||||
discoNodes.add(new DiscoveryNode(instance.getInstanceId(), "#cloud-" + instance.getInstanceId() + "-" + i,
|
||||
addresses[i], emptyMap(), emptySet(), Version.CURRENT.minimumCompatibilityVersion()));
|
||||
}
|
||||
} catch (Exception e) {
|
||||
} catch (final Exception e) {
|
||||
final String finalAddress = address;
|
||||
logger.warn(
|
||||
(Supplier<?>)
|
||||
@ -201,12 +200,12 @@ class AwsEc2UnicastHostsProvider extends AbstractComponent implements UnicastHos
|
||||
}
|
||||
|
||||
private DescribeInstancesRequest buildDescribeInstancesRequest() {
|
||||
DescribeInstancesRequest describeInstancesRequest = new DescribeInstancesRequest()
|
||||
final DescribeInstancesRequest describeInstancesRequest = new DescribeInstancesRequest()
|
||||
.withFilters(
|
||||
new Filter("instance-state-name").withValues("running", "pending")
|
||||
);
|
||||
|
||||
for (Map.Entry<String, List<String>> tagFilter : tags.entrySet()) {
|
||||
for (final Map.Entry<String, List<String>> tagFilter : tags.entrySet()) {
|
||||
// for a given tag key, OR relationship for multiple different values
|
||||
describeInstancesRequest.withFilters(
|
||||
new Filter("tag:" + tagFilter.getKey()).withValues(tagFilter.getValue())
|
||||
@ -238,7 +237,7 @@ class AwsEc2UnicastHostsProvider extends AbstractComponent implements UnicastHos
|
||||
|
||||
@Override
|
||||
protected List<DiscoveryNode> refresh() {
|
||||
List<DiscoveryNode> nodes = fetchDynamicNodes();
|
||||
final List<DiscoveryNode> nodes = fetchDynamicNodes();
|
||||
empty = nodes.isEmpty();
|
||||
return nodes;
|
||||
}
|
||||
|
@ -0,0 +1,145 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.discovery.ec2;
|
||||
|
||||
import com.amazonaws.ClientConfiguration;
|
||||
import com.amazonaws.Protocol;
|
||||
import com.amazonaws.auth.BasicAWSCredentials;
|
||||
|
||||
import org.elasticsearch.common.settings.SecureSetting;
|
||||
import org.elasticsearch.common.settings.SecureString;
|
||||
import org.elasticsearch.common.settings.Setting;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.settings.Setting.Property;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import java.util.Locale;
|
||||
|
||||
/**
|
||||
* A container for settings used to create an EC2 client.
|
||||
*/
|
||||
final class Ec2ClientSettings {
|
||||
|
||||
/** The access key (ie login id) for connecting to ec2. */
|
||||
static final Setting<SecureString> ACCESS_KEY_SETTING = SecureSetting.secureString("discovery.ec2.access_key", null);
|
||||
|
||||
/** The secret key (ie password) for connecting to ec2. */
|
||||
static final Setting<SecureString> SECRET_KEY_SETTING = SecureSetting.secureString("discovery.ec2.secret_key", null);
|
||||
|
||||
/** The host name of a proxy to connect to ec2 through. */
|
||||
static final Setting<String> PROXY_HOST_SETTING = Setting.simpleString("discovery.ec2.proxy.host", Property.NodeScope);
|
||||
|
||||
/** The port of a proxy to connect to ec2 through. */
|
||||
static final Setting<Integer> PROXY_PORT_SETTING = Setting.intSetting("discovery.ec2.proxy.port", 80, 0, 1 << 16, Property.NodeScope);
|
||||
|
||||
/** An override for the ec2 endpoint to connect to. */
|
||||
static final Setting<String> ENDPOINT_SETTING = new Setting<>("discovery.ec2.endpoint", "", s -> s.toLowerCase(Locale.ROOT),
|
||||
Property.NodeScope);
|
||||
|
||||
/** The protocol to use to connect to to ec2. */
|
||||
static final Setting<Protocol> PROTOCOL_SETTING = new Setting<>("discovery.ec2.protocol", "https",
|
||||
s -> Protocol.valueOf(s.toUpperCase(Locale.ROOT)), Property.NodeScope);
|
||||
|
||||
/** The username of a proxy to connect to s3 through. */
|
||||
static final Setting<SecureString> PROXY_USERNAME_SETTING = SecureSetting.secureString("discovery.ec2.proxy.username", null);
|
||||
|
||||
/** The password of a proxy to connect to s3 through. */
|
||||
static final Setting<SecureString> PROXY_PASSWORD_SETTING = SecureSetting.secureString("discovery.ec2.proxy.password", null);
|
||||
|
||||
/** The socket timeout for connecting to s3. */
|
||||
static final Setting<TimeValue> READ_TIMEOUT_SETTING = Setting.timeSetting("discovery.ec2.read_timeout",
|
||||
TimeValue.timeValueMillis(ClientConfiguration.DEFAULT_SOCKET_TIMEOUT), Property.NodeScope);
|
||||
|
||||
/** Credentials to authenticate with ec2. */
|
||||
final BasicAWSCredentials credentials;
|
||||
|
||||
/**
|
||||
* The ec2 endpoint the client should talk to, or empty string to use the
|
||||
* default.
|
||||
*/
|
||||
final String endpoint;
|
||||
|
||||
/** The protocol to use to talk to ec2. Defaults to https. */
|
||||
final Protocol protocol;
|
||||
|
||||
/** An optional proxy host that requests to ec2 should be made through. */
|
||||
final String proxyHost;
|
||||
|
||||
/** The port number the proxy host should be connected on. */
|
||||
final int proxyPort;
|
||||
|
||||
// these should be "secure" yet the api for the ec2 client only takes String, so
|
||||
// storing them
|
||||
// as SecureString here won't really help with anything
|
||||
/** An optional username for the proxy host, for basic authentication. */
|
||||
final String proxyUsername;
|
||||
|
||||
/** An optional password for the proxy host, for basic authentication. */
|
||||
final String proxyPassword;
|
||||
|
||||
/** The read timeout for the ec2 client. */
|
||||
final int readTimeoutMillis;
|
||||
|
||||
protected Ec2ClientSettings(BasicAWSCredentials credentials, String endpoint, Protocol protocol, String proxyHost, int proxyPort,
|
||||
String proxyUsername, String proxyPassword, int readTimeoutMillis) {
|
||||
this.credentials = credentials;
|
||||
this.endpoint = endpoint;
|
||||
this.protocol = protocol;
|
||||
this.proxyHost = proxyHost;
|
||||
this.proxyPort = proxyPort;
|
||||
this.proxyUsername = proxyUsername;
|
||||
this.proxyPassword = proxyPassword;
|
||||
this.readTimeoutMillis = readTimeoutMillis;
|
||||
}
|
||||
|
||||
static BasicAWSCredentials loadCredentials(Settings settings) {
|
||||
try (SecureString accessKey = ACCESS_KEY_SETTING.get(settings);
|
||||
SecureString secretKey = SECRET_KEY_SETTING.get(settings);) {
|
||||
if (accessKey.length() != 0) {
|
||||
if (secretKey.length() != 0) {
|
||||
return new BasicAWSCredentials(accessKey.toString(), secretKey.toString());
|
||||
} else {
|
||||
throw new IllegalArgumentException("Missing secret key for ec2 client.");
|
||||
}
|
||||
} else if (secretKey.length() != 0) {
|
||||
throw new IllegalArgumentException("Missing access key for ec2 client.");
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
// pkg private for tests
|
||||
/** Parse settings for a single client. */
|
||||
static Ec2ClientSettings getClientSettings(Settings settings) {
|
||||
final BasicAWSCredentials credentials = loadCredentials(settings);
|
||||
try (SecureString proxyUsername = PROXY_USERNAME_SETTING.get(settings);
|
||||
SecureString proxyPassword = PROXY_PASSWORD_SETTING.get(settings)) {
|
||||
return new Ec2ClientSettings(
|
||||
credentials,
|
||||
ENDPOINT_SETTING.get(settings),
|
||||
PROTOCOL_SETTING.get(settings),
|
||||
PROXY_HOST_SETTING.get(settings),
|
||||
PROXY_PORT_SETTING.get(settings),
|
||||
proxyUsername.toString(),
|
||||
proxyPassword.toString(),
|
||||
(int)READ_TIMEOUT_SETTING.get(settings).millis());
|
||||
}
|
||||
}
|
||||
|
||||
}
|
@ -21,8 +21,6 @@ package org.elasticsearch.discovery.ec2;
|
||||
|
||||
import com.amazonaws.util.json.Jackson;
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.core.internal.io.IOUtils;
|
||||
import org.apache.lucene.util.SetOnce;
|
||||
import org.elasticsearch.SpecialPermission;
|
||||
import org.elasticsearch.common.SuppressForbidden;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
@ -33,10 +31,10 @@ import org.elasticsearch.discovery.zen.UnicastHostsProvider;
|
||||
import org.elasticsearch.node.Node;
|
||||
import org.elasticsearch.plugins.DiscoveryPlugin;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.plugins.ReloadablePlugin;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.Closeable;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
@ -52,7 +50,7 @@ import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
public class Ec2DiscoveryPlugin extends Plugin implements DiscoveryPlugin, Closeable {
|
||||
public class Ec2DiscoveryPlugin extends Plugin implements DiscoveryPlugin, ReloadablePlugin {
|
||||
|
||||
private static Logger logger = Loggers.getLogger(Ec2DiscoveryPlugin.class);
|
||||
public static final String EC2 = "ec2";
|
||||
@ -68,22 +66,27 @@ public class Ec2DiscoveryPlugin extends Plugin implements DiscoveryPlugin, Close
|
||||
// ClientConfiguration clinit has some classloader problems
|
||||
// TODO: fix that
|
||||
Class.forName("com.amazonaws.ClientConfiguration");
|
||||
} catch (ClassNotFoundException e) {
|
||||
} catch (final ClassNotFoundException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
return null;
|
||||
});
|
||||
}
|
||||
|
||||
private Settings settings;
|
||||
// stashed when created in order to properly close
|
||||
private final SetOnce<AwsEc2ServiceImpl> ec2Service = new SetOnce<>();
|
||||
private final Settings settings;
|
||||
// protected for testing
|
||||
protected final AwsEc2Service ec2Service;
|
||||
|
||||
public Ec2DiscoveryPlugin(Settings settings) {
|
||||
this.settings = settings;
|
||||
this(settings, new AwsEc2ServiceImpl(settings));
|
||||
}
|
||||
|
||||
|
||||
protected Ec2DiscoveryPlugin(Settings settings, AwsEc2ServiceImpl ec2Service) {
|
||||
this.settings = settings;
|
||||
this.ec2Service = ec2Service;
|
||||
// eagerly load client settings when secure settings are accessible
|
||||
reload(settings);
|
||||
}
|
||||
|
||||
@Override
|
||||
public NetworkService.CustomNameResolver getCustomNameResolver(Settings settings) {
|
||||
@ -94,25 +97,22 @@ public class Ec2DiscoveryPlugin extends Plugin implements DiscoveryPlugin, Close
|
||||
@Override
|
||||
public Map<String, Supplier<UnicastHostsProvider>> getZenHostsProviders(TransportService transportService,
|
||||
NetworkService networkService) {
|
||||
return Collections.singletonMap(EC2, () -> {
|
||||
ec2Service.set(new AwsEc2ServiceImpl(settings));
|
||||
return new AwsEc2UnicastHostsProvider(settings, transportService, ec2Service.get());
|
||||
});
|
||||
return Collections.singletonMap(EC2, () -> new AwsEc2UnicastHostsProvider(settings, transportService, ec2Service));
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<Setting<?>> getSettings() {
|
||||
return Arrays.asList(
|
||||
// Register EC2 discovery settings: discovery.ec2
|
||||
AwsEc2Service.ACCESS_KEY_SETTING,
|
||||
AwsEc2Service.SECRET_KEY_SETTING,
|
||||
AwsEc2Service.ENDPOINT_SETTING,
|
||||
AwsEc2Service.PROTOCOL_SETTING,
|
||||
AwsEc2Service.PROXY_HOST_SETTING,
|
||||
AwsEc2Service.PROXY_PORT_SETTING,
|
||||
AwsEc2Service.PROXY_USERNAME_SETTING,
|
||||
AwsEc2Service.PROXY_PASSWORD_SETTING,
|
||||
AwsEc2Service.READ_TIMEOUT_SETTING,
|
||||
Ec2ClientSettings.ACCESS_KEY_SETTING,
|
||||
Ec2ClientSettings.SECRET_KEY_SETTING,
|
||||
Ec2ClientSettings.ENDPOINT_SETTING,
|
||||
Ec2ClientSettings.PROTOCOL_SETTING,
|
||||
Ec2ClientSettings.PROXY_HOST_SETTING,
|
||||
Ec2ClientSettings.PROXY_PORT_SETTING,
|
||||
Ec2ClientSettings.PROXY_USERNAME_SETTING,
|
||||
Ec2ClientSettings.PROXY_PASSWORD_SETTING,
|
||||
Ec2ClientSettings.READ_TIMEOUT_SETTING,
|
||||
AwsEc2Service.HOST_TYPE_SETTING,
|
||||
AwsEc2Service.ANY_GROUP_SETTING,
|
||||
AwsEc2Service.GROUPS_SETTING,
|
||||
@ -125,10 +125,10 @@ public class Ec2DiscoveryPlugin extends Plugin implements DiscoveryPlugin, Close
|
||||
|
||||
@Override
|
||||
public Settings additionalSettings() {
|
||||
Settings.Builder builder = Settings.builder();
|
||||
final Settings.Builder builder = Settings.builder();
|
||||
|
||||
// Adds a node attribute for the ec2 availability zone
|
||||
String azMetadataUrl = AwsEc2ServiceImpl.EC2_METADATA_URL + "placement/availability-zone";
|
||||
final String azMetadataUrl = AwsEc2ServiceImpl.EC2_METADATA_URL + "placement/availability-zone";
|
||||
builder.put(getAvailabilityZoneNodeAttributes(settings, azMetadataUrl));
|
||||
return builder.build();
|
||||
}
|
||||
@ -139,7 +139,7 @@ public class Ec2DiscoveryPlugin extends Plugin implements DiscoveryPlugin, Close
|
||||
if (AwsEc2Service.AUTO_ATTRIBUTE_SETTING.get(settings) == false) {
|
||||
return Settings.EMPTY;
|
||||
}
|
||||
Settings.Builder attrs = Settings.builder();
|
||||
final Settings.Builder attrs = Settings.builder();
|
||||
|
||||
final URL url;
|
||||
final URLConnection urlConnection;
|
||||
@ -148,7 +148,7 @@ public class Ec2DiscoveryPlugin extends Plugin implements DiscoveryPlugin, Close
|
||||
logger.debug("obtaining ec2 [placement/availability-zone] from ec2 meta-data url {}", url);
|
||||
urlConnection = SocketAccess.doPrivilegedIOException(url::openConnection);
|
||||
urlConnection.setConnectTimeout(2000);
|
||||
} catch (IOException e) {
|
||||
} catch (final IOException e) {
|
||||
// should not happen, we know the url is not malformed, and openConnection does not actually hit network
|
||||
throw new UncheckedIOException(e);
|
||||
}
|
||||
@ -156,13 +156,13 @@ public class Ec2DiscoveryPlugin extends Plugin implements DiscoveryPlugin, Close
|
||||
try (InputStream in = SocketAccess.doPrivilegedIOException(urlConnection::getInputStream);
|
||||
BufferedReader urlReader = new BufferedReader(new InputStreamReader(in, StandardCharsets.UTF_8))) {
|
||||
|
||||
String metadataResult = urlReader.readLine();
|
||||
if (metadataResult == null || metadataResult.length() == 0) {
|
||||
final String metadataResult = urlReader.readLine();
|
||||
if ((metadataResult == null) || (metadataResult.length() == 0)) {
|
||||
throw new IllegalStateException("no ec2 metadata returned from " + url);
|
||||
} else {
|
||||
attrs.put(Node.NODE_ATTRIBUTES.getKey() + "aws_availability_zone", metadataResult);
|
||||
}
|
||||
} catch (IOException e) {
|
||||
} catch (final IOException e) {
|
||||
// this is lenient so the plugin does not fail when installed outside of ec2
|
||||
logger.error("failed to get metadata for [placement/availability-zone]", e);
|
||||
}
|
||||
@ -172,6 +172,13 @@ public class Ec2DiscoveryPlugin extends Plugin implements DiscoveryPlugin, Close
|
||||
|
||||
@Override
|
||||
public void close() throws IOException {
|
||||
IOUtils.close(ec2Service.get());
|
||||
ec2Service.close();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void reload(Settings settings) {
|
||||
// secure settings should be readable
|
||||
final Ec2ClientSettings clientSettings = Ec2ClientSettings.getClientSettings(settings);
|
||||
ec2Service.refreshAndClearCache(clientSettings);
|
||||
}
|
||||
}
|
||||
|
@ -22,7 +22,9 @@ package org.elasticsearch.discovery.ec2;
|
||||
import com.amazonaws.AmazonClientException;
|
||||
import com.amazonaws.AmazonServiceException;
|
||||
import com.amazonaws.AmazonWebServiceRequest;
|
||||
import com.amazonaws.ClientConfiguration;
|
||||
import com.amazonaws.ResponseMetadata;
|
||||
import com.amazonaws.auth.AWSCredentialsProvider;
|
||||
import com.amazonaws.regions.Region;
|
||||
import com.amazonaws.services.ec2.AmazonEC2;
|
||||
import com.amazonaws.services.ec2.model.AcceptVpcPeeringConnectionRequest;
|
||||
@ -528,9 +530,12 @@ public class AmazonEC2Mock implements AmazonEC2 {
|
||||
public static final String PREFIX_PRIVATE_DNS = "mock-ip-";
|
||||
public static final String SUFFIX_PRIVATE_DNS = ".ec2.internal";
|
||||
|
||||
List<Instance> instances = new ArrayList<>();
|
||||
final List<Instance> instances = new ArrayList<>();
|
||||
String endpoint;
|
||||
final AWSCredentialsProvider credentials;
|
||||
final ClientConfiguration configuration;
|
||||
|
||||
public AmazonEC2Mock(int nodes, List<List<Tag>> tagsList) {
|
||||
public AmazonEC2Mock(int nodes, List<List<Tag>> tagsList, AWSCredentialsProvider credentials, ClientConfiguration configuration) {
|
||||
if (tagsList != null) {
|
||||
assert tagsList.size() == nodes;
|
||||
}
|
||||
@ -552,7 +557,8 @@ public class AmazonEC2Mock implements AmazonEC2 {
|
||||
|
||||
instances.add(instance);
|
||||
}
|
||||
|
||||
this.credentials = credentials;
|
||||
this.configuration = configuration;
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -642,7 +648,7 @@ public class AmazonEC2Mock implements AmazonEC2 {
|
||||
|
||||
@Override
|
||||
public void setEndpoint(String endpoint) throws IllegalArgumentException {
|
||||
throw new UnsupportedOperationException("Not supported in mock");
|
||||
this.endpoint = endpoint;
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -2110,7 +2116,6 @@ public class AmazonEC2Mock implements AmazonEC2 {
|
||||
|
||||
@Override
|
||||
public void shutdown() {
|
||||
throw new UnsupportedOperationException("Not supported in mock");
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -26,31 +26,31 @@ import com.amazonaws.auth.AWSCredentialsProvider;
|
||||
import com.amazonaws.auth.DefaultAWSCredentialsProviderChain;
|
||||
import org.elasticsearch.common.settings.MockSecureSettings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.discovery.ec2.AwsEc2Service;
|
||||
import org.elasticsearch.discovery.ec2.AwsEc2ServiceImpl;
|
||||
import org.elasticsearch.test.ESTestCase;
|
||||
|
||||
import static org.hamcrest.Matchers.instanceOf;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
public class AwsEc2ServiceImplTests extends ESTestCase {
|
||||
|
||||
public void testAWSCredentialsWithSystemProviders() {
|
||||
AWSCredentialsProvider credentialsProvider = AwsEc2ServiceImpl.buildCredentials(logger, Settings.EMPTY);
|
||||
final AWSCredentialsProvider credentialsProvider = AwsEc2ServiceImpl.buildCredentials(logger,
|
||||
Ec2ClientSettings.getClientSettings(Settings.EMPTY));
|
||||
assertThat(credentialsProvider, instanceOf(DefaultAWSCredentialsProviderChain.class));
|
||||
}
|
||||
|
||||
public void testAWSCredentialsWithElasticsearchAwsSettings() {
|
||||
MockSecureSettings secureSettings = new MockSecureSettings();
|
||||
final MockSecureSettings secureSettings = new MockSecureSettings();
|
||||
secureSettings.setString("discovery.ec2.access_key", "aws_key");
|
||||
secureSettings.setString("discovery.ec2.secret_key", "aws_secret");
|
||||
Settings settings = Settings.builder().setSecureSettings(secureSettings).build();
|
||||
final Settings settings = Settings.builder().setSecureSettings(secureSettings).build();
|
||||
launchAWSCredentialsWithElasticsearchSettingsTest(settings, "aws_key", "aws_secret");
|
||||
}
|
||||
|
||||
protected void launchAWSCredentialsWithElasticsearchSettingsTest(Settings settings, String expectedKey, String expectedSecret) {
|
||||
AWSCredentials credentials = AwsEc2ServiceImpl.buildCredentials(logger, settings).getCredentials();
|
||||
final AWSCredentials credentials = AwsEc2ServiceImpl.buildCredentials(logger, Ec2ClientSettings.getClientSettings(settings))
|
||||
.getCredentials();
|
||||
assertThat(credentials.getAWSAccessKeyId(), is(expectedKey));
|
||||
assertThat(credentials.getAWSSecretKey(), is(expectedSecret));
|
||||
}
|
||||
@ -61,10 +61,10 @@ public class AwsEc2ServiceImplTests extends ESTestCase {
|
||||
}
|
||||
|
||||
public void testAWSConfigurationWithAwsSettings() {
|
||||
MockSecureSettings secureSettings = new MockSecureSettings();
|
||||
final MockSecureSettings secureSettings = new MockSecureSettings();
|
||||
secureSettings.setString("discovery.ec2.proxy.username", "aws_proxy_username");
|
||||
secureSettings.setString("discovery.ec2.proxy.password", "aws_proxy_password");
|
||||
Settings settings = Settings.builder()
|
||||
final Settings settings = Settings.builder()
|
||||
.put("discovery.ec2.protocol", "http")
|
||||
.put("discovery.ec2.proxy.host", "aws_proxy_host")
|
||||
.put("discovery.ec2.proxy.port", 8080)
|
||||
@ -81,7 +81,8 @@ public class AwsEc2ServiceImplTests extends ESTestCase {
|
||||
String expectedProxyUsername,
|
||||
String expectedProxyPassword,
|
||||
int expectedReadTimeout) {
|
||||
ClientConfiguration configuration = AwsEc2ServiceImpl.buildConfiguration(logger, settings);
|
||||
final ClientConfiguration configuration = AwsEc2ServiceImpl.buildConfiguration(logger,
|
||||
Ec2ClientSettings.getClientSettings(settings));
|
||||
|
||||
assertThat(configuration.getResponseMetadataCacheSize(), is(0));
|
||||
assertThat(configuration.getProtocol(), is(expectedProtocol));
|
||||
@ -92,16 +93,4 @@ public class AwsEc2ServiceImplTests extends ESTestCase {
|
||||
assertThat(configuration.getSocketTimeout(), is(expectedReadTimeout));
|
||||
}
|
||||
|
||||
public void testDefaultEndpoint() {
|
||||
String endpoint = AwsEc2ServiceImpl.findEndpoint(logger, Settings.EMPTY);
|
||||
assertThat(endpoint, nullValue());
|
||||
}
|
||||
|
||||
public void testSpecificEndpoint() {
|
||||
Settings settings = Settings.builder()
|
||||
.put(AwsEc2Service.ENDPOINT_SETTING.getKey(), "ec2.endpoint")
|
||||
.build();
|
||||
String endpoint = AwsEc2ServiceImpl.findEndpoint(logger, settings);
|
||||
assertThat(endpoint, is("ec2.endpoint"));
|
||||
}
|
||||
}
|
||||
|
@ -19,18 +19,19 @@
|
||||
|
||||
package org.elasticsearch.discovery.ec2;
|
||||
|
||||
import com.amazonaws.ClientConfiguration;
|
||||
import com.amazonaws.auth.AWSCredentialsProvider;
|
||||
import com.amazonaws.services.ec2.AmazonEC2;
|
||||
import com.amazonaws.services.ec2.model.Tag;
|
||||
import org.elasticsearch.common.component.AbstractLifecycleComponent;
|
||||
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
public class AwsEc2ServiceMock extends AbstractLifecycleComponent implements AwsEc2Service {
|
||||
public class AwsEc2ServiceMock extends AwsEc2ServiceImpl {
|
||||
|
||||
private int nodes;
|
||||
private List<List<Tag>> tagsList;
|
||||
private AmazonEC2 client;
|
||||
private final int nodes;
|
||||
private final List<List<Tag>> tagsList;
|
||||
|
||||
public AwsEc2ServiceMock(Settings settings, int nodes, List<List<Tag>> tagsList) {
|
||||
super(settings);
|
||||
@ -39,26 +40,8 @@ public class AwsEc2ServiceMock extends AbstractLifecycleComponent implements Aws
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized AmazonEC2 client() {
|
||||
if (client == null) {
|
||||
client = new AmazonEC2Mock(nodes, tagsList);
|
||||
}
|
||||
|
||||
return client;
|
||||
AmazonEC2 buildClient(AWSCredentialsProvider credentials, ClientConfiguration configuration) {
|
||||
return new AmazonEC2Mock(nodes, tagsList, credentials, configuration);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doStart() {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doStop() {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doClose() {
|
||||
|
||||
}
|
||||
}
|
||||
|
@ -17,14 +17,22 @@
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.repositories.azure;
|
||||
package org.elasticsearch.discovery.ec2;
|
||||
|
||||
public class AzureServiceDisableException extends IllegalStateException {
|
||||
public AzureServiceDisableException(String msg) {
|
||||
super(msg);
|
||||
import com.amazonaws.services.ec2.model.Tag;
|
||||
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
public class Ec2DiscoveryPluginMock extends Ec2DiscoveryPlugin {
|
||||
|
||||
Ec2DiscoveryPluginMock(Settings settings) {
|
||||
this(settings, 1, null);
|
||||
}
|
||||
|
||||
public AzureServiceDisableException(String msg, Throwable cause) {
|
||||
super(msg, cause);
|
||||
public Ec2DiscoveryPluginMock(Settings settings, int nodes, List<List<Tag>> tagsList) {
|
||||
super(settings, new AwsEc2ServiceMock(settings, nodes, tagsList));
|
||||
}
|
||||
|
||||
}
|
@ -19,12 +19,17 @@
|
||||
|
||||
package org.elasticsearch.discovery.ec2;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.UncheckedIOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.util.Arrays;
|
||||
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
import org.elasticsearch.discovery.ec2.AwsEc2Service;
|
||||
import org.elasticsearch.common.settings.MockSecureSettings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.discovery.ec2.Ec2DiscoveryPlugin;
|
||||
import org.elasticsearch.node.Node;
|
||||
@ -33,14 +38,14 @@ import org.elasticsearch.test.ESTestCase;
|
||||
public class Ec2DiscoveryPluginTests extends ESTestCase {
|
||||
|
||||
private Settings getNodeAttributes(Settings settings, String url) {
|
||||
Settings realSettings = Settings.builder()
|
||||
final Settings realSettings = Settings.builder()
|
||||
.put(AwsEc2Service.AUTO_ATTRIBUTE_SETTING.getKey(), true)
|
||||
.put(settings).build();
|
||||
return Ec2DiscoveryPlugin.getAvailabilityZoneNodeAttributes(realSettings, url);
|
||||
}
|
||||
|
||||
private void assertNodeAttributes(Settings settings, String url, String expected) {
|
||||
Settings additional = getNodeAttributes(settings, url);
|
||||
final Settings additional = getNodeAttributes(settings, url);
|
||||
if (expected == null) {
|
||||
assertTrue(additional.isEmpty());
|
||||
} else {
|
||||
@ -49,36 +54,106 @@ public class Ec2DiscoveryPluginTests extends ESTestCase {
|
||||
}
|
||||
|
||||
public void testNodeAttributesDisabled() {
|
||||
Settings settings = Settings.builder()
|
||||
final Settings settings = Settings.builder()
|
||||
.put(AwsEc2Service.AUTO_ATTRIBUTE_SETTING.getKey(), false).build();
|
||||
assertNodeAttributes(settings, "bogus", null);
|
||||
}
|
||||
|
||||
public void testNodeAttributes() throws Exception {
|
||||
Path zoneUrl = createTempFile();
|
||||
final Path zoneUrl = createTempFile();
|
||||
Files.write(zoneUrl, Arrays.asList("us-east-1c"));
|
||||
assertNodeAttributes(Settings.EMPTY, zoneUrl.toUri().toURL().toString(), "us-east-1c");
|
||||
}
|
||||
|
||||
public void testNodeAttributesBogusUrl() {
|
||||
UncheckedIOException e = expectThrows(UncheckedIOException.class, () ->
|
||||
final UncheckedIOException e = expectThrows(UncheckedIOException.class, () ->
|
||||
getNodeAttributes(Settings.EMPTY, "bogus")
|
||||
);
|
||||
assertNotNull(e.getCause());
|
||||
String msg = e.getCause().getMessage();
|
||||
final String msg = e.getCause().getMessage();
|
||||
assertTrue(msg, msg.contains("no protocol: bogus"));
|
||||
}
|
||||
|
||||
public void testNodeAttributesEmpty() throws Exception {
|
||||
Path zoneUrl = createTempFile();
|
||||
IllegalStateException e = expectThrows(IllegalStateException.class, () ->
|
||||
final Path zoneUrl = createTempFile();
|
||||
final IllegalStateException e = expectThrows(IllegalStateException.class, () ->
|
||||
getNodeAttributes(Settings.EMPTY, zoneUrl.toUri().toURL().toString())
|
||||
);
|
||||
assertTrue(e.getMessage(), e.getMessage().contains("no ec2 metadata returned"));
|
||||
}
|
||||
|
||||
public void testNodeAttributesErrorLenient() throws Exception {
|
||||
Path dne = createTempDir().resolve("dne");
|
||||
final Path dne = createTempDir().resolve("dne");
|
||||
assertNodeAttributes(Settings.EMPTY, dne.toUri().toURL().toString(), null);
|
||||
}
|
||||
|
||||
public void testDefaultEndpoint() throws IOException {
|
||||
try (Ec2DiscoveryPluginMock plugin = new Ec2DiscoveryPluginMock(Settings.EMPTY)) {
|
||||
final String endpoint = ((AmazonEC2Mock) plugin.ec2Service.client().client()).endpoint;
|
||||
assertThat(endpoint, nullValue());
|
||||
}
|
||||
}
|
||||
|
||||
public void testSpecificEndpoint() throws IOException {
|
||||
final Settings settings = Settings.builder().put(Ec2ClientSettings.ENDPOINT_SETTING.getKey(), "ec2.endpoint").build();
|
||||
try (Ec2DiscoveryPluginMock plugin = new Ec2DiscoveryPluginMock(settings)) {
|
||||
final String endpoint = ((AmazonEC2Mock) plugin.ec2Service.client().client()).endpoint;
|
||||
assertThat(endpoint, is("ec2.endpoint"));
|
||||
}
|
||||
}
|
||||
|
||||
public void testClientSettingsReInit() throws IOException {
|
||||
final MockSecureSettings mockSecure1 = new MockSecureSettings();
|
||||
mockSecure1.setString(Ec2ClientSettings.ACCESS_KEY_SETTING.getKey(), "ec2_access_1");
|
||||
mockSecure1.setString(Ec2ClientSettings.SECRET_KEY_SETTING.getKey(), "ec2_secret_1");
|
||||
mockSecure1.setString(Ec2ClientSettings.PROXY_USERNAME_SETTING.getKey(), "proxy_username_1");
|
||||
mockSecure1.setString(Ec2ClientSettings.PROXY_PASSWORD_SETTING.getKey(), "proxy_password_1");
|
||||
final Settings settings1 = Settings.builder()
|
||||
.put(Ec2ClientSettings.PROXY_HOST_SETTING.getKey(), "proxy_host_1")
|
||||
.put(Ec2ClientSettings.PROXY_PORT_SETTING.getKey(), 881)
|
||||
.put(Ec2ClientSettings.ENDPOINT_SETTING.getKey(), "ec2_endpoint_1")
|
||||
.setSecureSettings(mockSecure1)
|
||||
.build();
|
||||
final MockSecureSettings mockSecure2 = new MockSecureSettings();
|
||||
mockSecure2.setString(Ec2ClientSettings.ACCESS_KEY_SETTING.getKey(), "ec2_access_2");
|
||||
mockSecure2.setString(Ec2ClientSettings.SECRET_KEY_SETTING.getKey(), "ec2_secret_2");
|
||||
mockSecure2.setString(Ec2ClientSettings.PROXY_USERNAME_SETTING.getKey(), "proxy_username_2");
|
||||
mockSecure2.setString(Ec2ClientSettings.PROXY_PASSWORD_SETTING.getKey(), "proxy_password_2");
|
||||
final Settings settings2 = Settings.builder()
|
||||
.put(Ec2ClientSettings.PROXY_HOST_SETTING.getKey(), "proxy_host_2")
|
||||
.put(Ec2ClientSettings.PROXY_PORT_SETTING.getKey(), 882)
|
||||
.put(Ec2ClientSettings.ENDPOINT_SETTING.getKey(), "ec2_endpoint_2")
|
||||
.setSecureSettings(mockSecure2)
|
||||
.build();
|
||||
try (Ec2DiscoveryPluginMock plugin = new Ec2DiscoveryPluginMock(settings1)) {
|
||||
try (AmazonEc2Reference clientReference = plugin.ec2Service.client()) {
|
||||
assertThat(((AmazonEC2Mock) clientReference.client()).credentials.getCredentials().getAWSAccessKeyId(), is("ec2_access_1"));
|
||||
assertThat(((AmazonEC2Mock) clientReference.client()).credentials.getCredentials().getAWSSecretKey(), is("ec2_secret_1"));
|
||||
assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyUsername(), is("proxy_username_1"));
|
||||
assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyPassword(), is("proxy_password_1"));
|
||||
assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyHost(), is("proxy_host_1"));
|
||||
assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyPort(), is(881));
|
||||
assertThat(((AmazonEC2Mock) clientReference.client()).endpoint, is("ec2_endpoint_1"));
|
||||
// reload secure settings2
|
||||
plugin.reload(settings2);
|
||||
// client is not released, it is still using the old settings
|
||||
assertThat(((AmazonEC2Mock) clientReference.client()).credentials.getCredentials().getAWSAccessKeyId(), is("ec2_access_1"));
|
||||
assertThat(((AmazonEC2Mock) clientReference.client()).credentials.getCredentials().getAWSSecretKey(), is("ec2_secret_1"));
|
||||
assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyUsername(), is("proxy_username_1"));
|
||||
assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyPassword(), is("proxy_password_1"));
|
||||
assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyHost(), is("proxy_host_1"));
|
||||
assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyPort(), is(881));
|
||||
assertThat(((AmazonEC2Mock) clientReference.client()).endpoint, is("ec2_endpoint_1"));
|
||||
}
|
||||
try (AmazonEc2Reference clientReference = plugin.ec2Service.client()) {
|
||||
assertThat(((AmazonEC2Mock) clientReference.client()).credentials.getCredentials().getAWSAccessKeyId(), is("ec2_access_2"));
|
||||
assertThat(((AmazonEC2Mock) clientReference.client()).credentials.getCredentials().getAWSSecretKey(), is("ec2_secret_2"));
|
||||
assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyUsername(), is("proxy_username_2"));
|
||||
assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyPassword(), is("proxy_password_2"));
|
||||
assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyHost(), is("proxy_host_2"));
|
||||
assertThat(((AmazonEC2Mock) clientReference.client()).configuration.getProxyPort(), is(882));
|
||||
assertThat(((AmazonEC2Mock) clientReference.client()).endpoint, is("ec2_endpoint_2"));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -39,6 +39,7 @@ import org.junit.AfterClass;
|
||||
import org.junit.Before;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.InetAddress;
|
||||
import java.net.UnknownHostException;
|
||||
import java.util.ArrayList;
|
||||
@ -91,11 +92,15 @@ public class Ec2DiscoveryTests extends ESTestCase {
|
||||
}
|
||||
|
||||
protected List<DiscoveryNode> buildDynamicNodes(Settings nodeSettings, int nodes, List<List<Tag>> tagsList) {
|
||||
AwsEc2Service awsEc2Service = new AwsEc2ServiceMock(nodeSettings, nodes, tagsList);
|
||||
AwsEc2UnicastHostsProvider provider = new AwsEc2UnicastHostsProvider(nodeSettings, transportService, awsEc2Service);
|
||||
List<DiscoveryNode> discoveryNodes = provider.buildDynamicNodes();
|
||||
logger.debug("--> nodes found: {}", discoveryNodes);
|
||||
return discoveryNodes;
|
||||
try (Ec2DiscoveryPluginMock plugin = new Ec2DiscoveryPluginMock(Settings.EMPTY, nodes, tagsList)) {
|
||||
AwsEc2UnicastHostsProvider provider = new AwsEc2UnicastHostsProvider(nodeSettings, transportService, plugin.ec2Service);
|
||||
List<DiscoveryNode> discoveryNodes = provider.buildDynamicNodes();
|
||||
logger.debug("--> nodes found: {}", discoveryNodes);
|
||||
return discoveryNodes;
|
||||
} catch (IOException e) {
|
||||
fail("Unexpected IOException");
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
public void testDefaultSettings() throws InterruptedException {
|
||||
@ -315,22 +320,23 @@ public class Ec2DiscoveryTests extends ESTestCase {
|
||||
public void testGetNodeListCached() throws Exception {
|
||||
Settings.Builder builder = Settings.builder()
|
||||
.put(AwsEc2Service.NODE_CACHE_TIME_SETTING.getKey(), "500ms");
|
||||
AwsEc2Service awsEc2Service = new AwsEc2ServiceMock(Settings.EMPTY, 1, null);
|
||||
DummyEc2HostProvider provider = new DummyEc2HostProvider(builder.build(), transportService, awsEc2Service) {
|
||||
@Override
|
||||
protected List<DiscoveryNode> fetchDynamicNodes() {
|
||||
fetchCount++;
|
||||
return Ec2DiscoveryTests.this.buildDynamicNodes(Settings.EMPTY, 1);
|
||||
try (Ec2DiscoveryPluginMock plugin = new Ec2DiscoveryPluginMock(Settings.EMPTY)) {
|
||||
DummyEc2HostProvider provider = new DummyEc2HostProvider(builder.build(), transportService, plugin.ec2Service) {
|
||||
@Override
|
||||
protected List<DiscoveryNode> fetchDynamicNodes() {
|
||||
fetchCount++;
|
||||
return Ec2DiscoveryTests.this.buildDynamicNodes(Settings.EMPTY, 1);
|
||||
}
|
||||
};
|
||||
for (int i=0; i<3; i++) {
|
||||
provider.buildDynamicNodes();
|
||||
}
|
||||
};
|
||||
for (int i=0; i<3; i++) {
|
||||
provider.buildDynamicNodes();
|
||||
assertThat(provider.fetchCount, is(1));
|
||||
Thread.sleep(1_000L); // wait for cache to expire
|
||||
for (int i=0; i<3; i++) {
|
||||
provider.buildDynamicNodes();
|
||||
}
|
||||
assertThat(provider.fetchCount, is(2));
|
||||
}
|
||||
assertThat(provider.fetchCount, is(1));
|
||||
Thread.sleep(1_000L); // wait for cache to expire
|
||||
for (int i=0; i<3; i++) {
|
||||
provider.buildDynamicNodes();
|
||||
}
|
||||
assertThat(provider.fetchCount, is(2));
|
||||
}
|
||||
}
|
||||
|
@ -30,10 +30,9 @@ test.enabled = false
|
||||
|
||||
task exampleFixture(type: org.elasticsearch.gradle.test.AntFixture) {
|
||||
dependsOn testClasses
|
||||
env 'CLASSPATH', "${ -> project.sourceSets.test.runtimeClasspath.asPath }"
|
||||
executable = new File(project.runtimeJavaHome, 'bin/java')
|
||||
args '-cp', "${ -> project.sourceSets.test.runtimeClasspath.asPath }",
|
||||
'org.elasticsearch.example.resthandler.ExampleFixture',
|
||||
baseDir, 'TEST'
|
||||
args 'org.elasticsearch.example.resthandler.ExampleFixture', baseDir, 'TEST'
|
||||
}
|
||||
|
||||
integTestCluster {
|
||||
|
@ -20,46 +20,44 @@
|
||||
package org.elasticsearch.repositories.azure;
|
||||
|
||||
import com.microsoft.azure.storage.LocationMode;
|
||||
|
||||
import com.microsoft.azure.storage.StorageException;
|
||||
import org.elasticsearch.cluster.metadata.RepositoryMetaData;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.blobstore.BlobContainer;
|
||||
import org.elasticsearch.common.blobstore.BlobMetaData;
|
||||
import org.elasticsearch.common.blobstore.BlobPath;
|
||||
import org.elasticsearch.common.blobstore.BlobStore;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.net.URISyntaxException;
|
||||
import java.nio.file.FileAlreadyExistsException;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
|
||||
import static java.util.Collections.emptyMap;
|
||||
|
||||
import static org.elasticsearch.repositories.azure.AzureRepository.Repository;
|
||||
|
||||
public class AzureBlobStore extends AbstractComponent implements BlobStore {
|
||||
|
||||
private final AzureStorageService client;
|
||||
private final AzureStorageService service;
|
||||
|
||||
private final String clientName;
|
||||
private final LocationMode locMode;
|
||||
private final String container;
|
||||
private final LocationMode locationMode;
|
||||
|
||||
public AzureBlobStore(RepositoryMetaData metadata, Settings settings,
|
||||
AzureStorageService client) throws URISyntaxException, StorageException {
|
||||
public AzureBlobStore(RepositoryMetaData metadata, Settings settings, AzureStorageService service)
|
||||
throws URISyntaxException, StorageException {
|
||||
super(settings);
|
||||
this.client = client;
|
||||
this.container = Repository.CONTAINER_SETTING.get(metadata.settings());
|
||||
this.clientName = Repository.CLIENT_NAME.get(metadata.settings());
|
||||
|
||||
String modeStr = Repository.LOCATION_MODE_SETTING.get(metadata.settings());
|
||||
if (Strings.hasLength(modeStr)) {
|
||||
this.locMode = LocationMode.valueOf(modeStr.toUpperCase(Locale.ROOT));
|
||||
} else {
|
||||
this.locMode = LocationMode.PRIMARY_ONLY;
|
||||
}
|
||||
this.service = service;
|
||||
// locationMode is set per repository, not per client
|
||||
this.locationMode = Repository.LOCATION_MODE_SETTING.get(metadata.settings());
|
||||
final Map<String, AzureStorageSettings> prevSettings = this.service.refreshAndClearCache(emptyMap());
|
||||
final Map<String, AzureStorageSettings> newSettings = AzureStorageSettings.overrideLocationMode(prevSettings, this.locationMode);
|
||||
this.service.refreshAndClearCache(newSettings);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -71,7 +69,11 @@ public class AzureBlobStore extends AbstractComponent implements BlobStore {
|
||||
* Gets the configured {@link LocationMode} for the Azure storage requests.
|
||||
*/
|
||||
public LocationMode getLocationMode() {
|
||||
return locMode;
|
||||
return locationMode;
|
||||
}
|
||||
|
||||
public String getClientName() {
|
||||
return clientName;
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -80,12 +82,13 @@ public class AzureBlobStore extends AbstractComponent implements BlobStore {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void delete(BlobPath path) {
|
||||
String keyPath = path.buildAsString();
|
||||
public void delete(BlobPath path) throws IOException {
|
||||
final String keyPath = path.buildAsString();
|
||||
try {
|
||||
this.client.deleteFiles(this.clientName, this.locMode, container, keyPath);
|
||||
service.deleteFiles(clientName, container, keyPath);
|
||||
} catch (URISyntaxException | StorageException e) {
|
||||
logger.warn("can not remove [{}] in container {{}}: {}", keyPath, container, e.getMessage());
|
||||
logger.warn("cannot access [{}] in container {{}}: {}", keyPath, container, e.getMessage());
|
||||
throw new IOException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@ -93,30 +96,29 @@ public class AzureBlobStore extends AbstractComponent implements BlobStore {
|
||||
public void close() {
|
||||
}
|
||||
|
||||
public boolean doesContainerExist()
|
||||
{
|
||||
return this.client.doesContainerExist(this.clientName, this.locMode, container);
|
||||
public boolean containerExist() throws URISyntaxException, StorageException {
|
||||
return service.doesContainerExist(clientName, container);
|
||||
}
|
||||
|
||||
public boolean blobExists(String blob) throws URISyntaxException, StorageException {
|
||||
return this.client.blobExists(this.clientName, this.locMode, container, blob);
|
||||
return service.blobExists(clientName, container, blob);
|
||||
}
|
||||
|
||||
public void deleteBlob(String blob) throws URISyntaxException, StorageException {
|
||||
this.client.deleteBlob(this.clientName, this.locMode, container, blob);
|
||||
service.deleteBlob(clientName, container, blob);
|
||||
}
|
||||
|
||||
public InputStream getInputStream(String blob) throws URISyntaxException, StorageException, IOException {
|
||||
return this.client.getInputStream(this.clientName, this.locMode, container, blob);
|
||||
return service.getInputStream(clientName, container, blob);
|
||||
}
|
||||
|
||||
public Map<String, BlobMetaData> listBlobsByPrefix(String keyPath, String prefix)
|
||||
throws URISyntaxException, StorageException {
|
||||
return this.client.listBlobsByPrefix(this.clientName, this.locMode, container, keyPath, prefix);
|
||||
return service.listBlobsByPrefix(clientName, container, keyPath, prefix);
|
||||
}
|
||||
|
||||
public void writeBlob(String blobName, InputStream inputStream, long blobSize) throws URISyntaxException, StorageException,
|
||||
FileAlreadyExistsException {
|
||||
this.client.writeBlob(this.clientName, this.locMode, container, blobName, inputStream, blobSize);
|
||||
service.writeBlob(this.clientName, container, blobName, inputStream, blobSize);
|
||||
}
|
||||
}
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user