diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index a0655b4b849..6bbb655a18b 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -111,19 +111,18 @@ then `File->New Project From Existing Sources`. Point to the root of the source directory, select `Import project from external model->Gradle`, enable `Use auto-import`. In order to run tests directly from -IDEA 2017.2 and above it is required to disable IDEA run launcher to avoid -finding yourself in "jar hell", which can be achieved by adding the +IDEA 2017.2 and above, it is required to disable the IDEA run launcher in order to avoid +`idea_rt.jar` causing "jar hell". This can be achieved by adding the `-Didea.no.launcher=true` [JVM -option](https://intellij-support.jetbrains.com/hc/en-us/articles/206544869-Configuring-JVM-options-and-platform-properties) -or by adding `idea.no.launcher=true` to the +option](https://intellij-support.jetbrains.com/hc/en-us/articles/206544869-Configuring-JVM-options-and-platform-properties). +Alternatively, `idea.no.launcher=true` can be set in the [`idea.properties`](https://www.jetbrains.com/help/idea/file-idea-properties.html) -file which can be accessed under Help > Edit Custom Properties within IDEA. You -may also need to [remove `ant-javafx.jar` from your +file which can be accessed under Help > Edit Custom Properties (this will require a +restart of IDEA). For IDEA 2017.3 and above, in addition to the JVM option, you will need to go to +`Run->Edit Configurations...` and change the value for the `Shorten command line` setting from +`user-local default: none` to `classpath file`. You may also need to [remove `ant-javafx.jar` from your classpath](https://github.com/elastic/elasticsearch/issues/14348) if that is -reported as a source of jar hell. Additionally, in order to run tests directly -from IDEA 2017.3 and above, go to `Run->Edit Configurations...` and change the -value for the `Shorten command line` setting from `user-local default: none` to -`classpath file`. +reported as a source of jar hell. The Elasticsearch codebase makes heavy use of Java `assert`s and the test runner requires that assertions be enabled within the JVM. This diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java index 2cc1d4849d5..c8767318399 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java @@ -21,6 +21,8 @@ package org.elasticsearch.client; import org.apache.http.Header; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; +import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; import org.elasticsearch.action.admin.indices.delete.DeleteIndexResponse; @@ -29,13 +31,13 @@ import java.util.Collections; /** * A wrapper for the {@link RestHighLevelClient} that provides methods for accessing the Indices API. - * + *

* See Indices API on elastic.co */ public final class IndicesClient { private final RestHighLevelClient restHighLevelClient; - public IndicesClient(RestHighLevelClient restHighLevelClient) { + IndicesClient(RestHighLevelClient restHighLevelClient) { this.restHighLevelClient = restHighLevelClient; } @@ -56,8 +58,32 @@ public final class IndicesClient { * See * Delete Index API on elastic.co */ - public void deleteIndexAsync(DeleteIndexRequest deleteIndexRequest, ActionListener listener, Header... headers) { + public void deleteIndexAsync(DeleteIndexRequest deleteIndexRequest, ActionListener listener, + Header... headers) { restHighLevelClient.performRequestAsyncAndParseEntity(deleteIndexRequest, Request::deleteIndex, DeleteIndexResponse::fromXContent, listener, Collections.emptySet(), headers); } + + /** + * Creates an index using the Create Index API + *

+ * See + * Create Index API on elastic.co + */ + public CreateIndexResponse createIndex(CreateIndexRequest createIndexRequest, Header... headers) throws IOException { + return restHighLevelClient.performRequestAndParseEntity(createIndexRequest, Request::createIndex, CreateIndexResponse::fromXContent, + Collections.emptySet(), headers); + } + + /** + * Asynchronously creates an index using the Create Index API + *

+ * See + * Create Index API on elastic.co + */ + public void createIndexAsync(CreateIndexRequest createIndexRequest, ActionListener listener, + Header... headers) { + restHighLevelClient.performRequestAsyncAndParseEntity(createIndexRequest, Request::createIndex, CreateIndexResponse::fromXContent, + listener, Collections.emptySet(), headers); + } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/Request.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/Request.java index e2a6dcac20b..a3544ddb89b 100755 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/Request.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/Request.java @@ -29,12 +29,14 @@ import org.apache.http.entity.ByteArrayEntity; import org.apache.http.entity.ContentType; import org.apache.lucene.util.BytesRef; import org.elasticsearch.action.DocWriteRequest; +import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.ClearScrollRequest; +import org.elasticsearch.action.search.MultiSearchRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchScrollRequest; import org.elasticsearch.action.support.ActiveShardCount; @@ -49,6 +51,7 @@ import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; @@ -135,6 +138,19 @@ public final class Request { return new Request(HttpDelete.METHOD_NAME, endpoint, parameters.getParams(), null); } + static Request createIndex(CreateIndexRequest createIndexRequest) throws IOException { + String endpoint = endpoint(createIndexRequest.indices(), Strings.EMPTY_ARRAY, ""); + + Params parameters = Params.builder(); + parameters.withTimeout(createIndexRequest.timeout()); + parameters.withMasterTimeout(createIndexRequest.masterNodeTimeout()); + parameters.withWaitForActiveShards(createIndexRequest.waitForActiveShards()); + parameters.withUpdateAllTypes(createIndexRequest.updateAllTypes()); + + HttpEntity entity = createEntity(createIndexRequest, REQUEST_BODY_CONTENT_TYPE); + return new Request(HttpPut.METHOD_NAME, endpoint, parameters.getParams(), entity); + } + static Request info() { return new Request(HttpGet.METHOD_NAME, "/", Collections.emptyMap(), null); } @@ -381,6 +397,18 @@ public final class Request { return new Request("DELETE", "/_search/scroll", Collections.emptyMap(), entity); } + static Request multiSearch(MultiSearchRequest multiSearchRequest) throws IOException { + Params params = Params.builder(); + params.putParam(RestSearchAction.TYPED_KEYS_PARAM, "true"); + if (multiSearchRequest.maxConcurrentSearchRequests() != MultiSearchRequest.MAX_CONCURRENT_SEARCH_REQUESTS_DEFAULT) { + params.putParam("max_concurrent_searches", Integer.toString(multiSearchRequest.maxConcurrentSearchRequests())); + } + XContent xContent = REQUEST_BODY_CONTENT_TYPE.xContent(); + byte[] source = MultiSearchRequest.writeMultiLineFormat(multiSearchRequest, xContent); + HttpEntity entity = new ByteArrayEntity(source, createContentType(xContent.type())); + return new Request("GET", "/_msearch", params.getParams(), entity); + } + private static HttpEntity createEntity(ToXContent toXContent, XContentType xContentType) throws IOException { BytesRef source = XContentHelper.toXContent(toXContent, xContentType, false).toBytesRef(); return new ByteArrayEntity(source.bytes, source.offset, source.length, createContentType(xContentType)); @@ -520,6 +548,13 @@ public final class Request { return putParam("timeout", timeout); } + Params withUpdateAllTypes(boolean updateAllTypes) { + if (updateAllTypes) { + return putParam("update_all_types", Boolean.TRUE.toString()); + } + return this; + } + Params withVersion(long version) { if (version != Versions.MATCH_ANY) { return putParam("version", Long.toString(version)); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java index 2ebaf2cf342..29ab7f90ff5 100755 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java @@ -38,6 +38,8 @@ import org.elasticsearch.action.main.MainRequest; import org.elasticsearch.action.main.MainResponse; import org.elasticsearch.action.search.ClearScrollRequest; import org.elasticsearch.action.search.ClearScrollResponse; +import org.elasticsearch.action.search.MultiSearchRequest; +import org.elasticsearch.action.search.MultiSearchResponse; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchScrollRequest; @@ -377,6 +379,28 @@ public class RestHighLevelClient implements Closeable { performRequestAsyncAndParseEntity(searchRequest, Request::search, SearchResponse::fromXContent, listener, emptySet(), headers); } + /** + * Executes a multi search using the msearch API + * + * See Multi search API on + * elastic.co + */ + public final MultiSearchResponse multiSearch(MultiSearchRequest multiSearchRequest, Header... headers) throws IOException { + return performRequestAndParseEntity(multiSearchRequest, Request::multiSearch, MultiSearchResponse::fromXContext, + emptySet(), headers); + } + + /** + * Asynchronously executes a multi search using the msearch API + * + * See Multi search API on + * elastic.co + */ + public final void multiSearchAsync(MultiSearchRequest searchRequest, ActionListener listener, Header... headers) { + performRequestAsyncAndParseEntity(searchRequest, Request::multiSearch, MultiSearchResponse::fromXContext, listener, + emptySet(), headers); + } + /** * Executes a search using the Search Scroll API * diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java index 4045e565288..0d6430b5912 100755 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java @@ -20,14 +20,88 @@ package org.elasticsearch.client; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.admin.indices.alias.Alias; +import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; +import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; import org.elasticsearch.action.admin.indices.delete.DeleteIndexResponse; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.rest.RestStatus; import java.io.IOException; +import java.util.Map; + +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; public class IndicesClientIT extends ESRestHighLevelClientTestCase { + @SuppressWarnings("unchecked") + public void testCreateIndex() throws IOException { + { + // Create index + String indexName = "plain_index"; + assertFalse(indexExists(indexName)); + + CreateIndexRequest createIndexRequest = new CreateIndexRequest(indexName); + + CreateIndexResponse createIndexResponse = + execute(createIndexRequest, highLevelClient().indices()::createIndex, highLevelClient().indices()::createIndexAsync); + assertTrue(createIndexResponse.isAcknowledged()); + + assertTrue(indexExists(indexName)); + } + { + // Create index with mappings, aliases and settings + String indexName = "rich_index"; + assertFalse(indexExists(indexName)); + + CreateIndexRequest createIndexRequest = new CreateIndexRequest(indexName); + + Alias alias = new Alias("alias_name"); + alias.filter("{\"term\":{\"year\":2016}}"); + alias.routing("1"); + createIndexRequest.alias(alias); + + Settings.Builder settings = Settings.builder(); + settings.put(SETTING_NUMBER_OF_REPLICAS, 2); + createIndexRequest.settings(settings); + + XContentBuilder mappingBuilder = JsonXContent.contentBuilder(); + mappingBuilder.startObject().startObject("properties").startObject("field"); + mappingBuilder.field("type", "text"); + mappingBuilder.endObject().endObject().endObject(); + createIndexRequest.mapping("type_name", mappingBuilder); + + CreateIndexResponse createIndexResponse = + execute(createIndexRequest, highLevelClient().indices()::createIndex, highLevelClient().indices()::createIndexAsync); + assertTrue(createIndexResponse.isAcknowledged()); + + Map indexMetaData = getIndexMetadata(indexName); + + Map settingsData = (Map) indexMetaData.get("settings"); + Map indexSettings = (Map) settingsData.get("index"); + assertEquals("2", indexSettings.get("number_of_replicas")); + + Map aliasesData = (Map) indexMetaData.get("aliases"); + Map aliasData = (Map) aliasesData.get("alias_name"); + assertEquals("1", aliasData.get("index_routing")); + Map filter = (Map) aliasData.get("filter"); + Map term = (Map) filter.get("term"); + assertEquals(2016, term.get("year")); + + Map mappingsData = (Map) indexMetaData.get("mappings"); + Map typeData = (Map) mappingsData.get("type_name"); + Map properties = (Map) typeData.get("properties"); + Map field = (Map) properties.get("field"); + + assertEquals("text", field.get("type")); + } + } + public void testDeleteIndex() throws IOException { { // Delete index if exists @@ -65,4 +139,18 @@ public class IndicesClientIT extends ESRestHighLevelClientTestCase { return response.getStatusLine().getStatusCode() == 200; } + + @SuppressWarnings("unchecked") + private Map getIndexMetadata(String index) throws IOException { + Response response = client().performRequest("GET", index); + + XContentType entityContentType = XContentType.fromMediaTypeOrFormat(response.getEntity().getContentType().getValue()); + Map responseEntity = XContentHelper.convertToMap(entityContentType.xContent(), response.getEntity().getContent(), + false); + + Map indexMetaData = (Map) responseEntity.get(index); + assertNotNull(indexMetaData); + + return indexMetaData; + } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestTests.java index 3be250d513d..182de30fd15 100755 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestTests.java @@ -25,6 +25,7 @@ import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; import org.apache.http.util.EntityUtils; import org.elasticsearch.action.DocWriteRequest; +import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkShardRequest; @@ -32,9 +33,11 @@ import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.ClearScrollRequest; +import org.elasticsearch.action.search.MultiSearchRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchScrollRequest; import org.elasticsearch.action.search.SearchType; +import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.action.support.master.AcknowledgedRequest; @@ -42,6 +45,7 @@ import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.action.support.replication.ReplicatedWriteRequest; import org.elasticsearch.action.support.replication.ReplicationRequest; import org.elasticsearch.action.update.UpdateRequest; +import org.elasticsearch.common.CheckedBiConsumer; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; @@ -56,6 +60,7 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.rest.action.search.RestSearchAction; +import org.elasticsearch.search.Scroll; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.builder.SearchSourceBuilder; @@ -72,16 +77,21 @@ import java.io.IOException; import java.io.InputStream; import java.lang.reflect.Constructor; import java.lang.reflect.Modifier; +import java.util.ArrayList; import java.util.HashMap; +import java.util.List; import java.util.Locale; import java.util.Map; import java.util.StringJoiner; +import java.util.function.BiConsumer; import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Supplier; import static java.util.Collections.singletonMap; +import static org.elasticsearch.client.Request.REQUEST_BODY_CONTENT_TYPE; import static org.elasticsearch.client.Request.enforceSameContentType; +import static org.elasticsearch.search.RandomSearchRequestGenerator.randomSearchRequest; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; public class RequestTests extends ESTestCase { @@ -245,6 +255,34 @@ public class RequestTests extends ESTestCase { assertEquals(method, request.getMethod()); } + public void testCreateIndex() throws IOException { + CreateIndexRequest createIndexRequest = new CreateIndexRequest(); + + String indexName = "index-" + randomAlphaOfLengthBetween(2, 5); + + createIndexRequest.index(indexName); + + Map expectedParams = new HashMap<>(); + + setRandomTimeout(createIndexRequest::timeout, AcknowledgedRequest.DEFAULT_ACK_TIMEOUT, expectedParams); + setRandomMasterTimeout(createIndexRequest, expectedParams); + setRandomWaitForActiveShards(createIndexRequest::waitForActiveShards, expectedParams); + + if (randomBoolean()) { + boolean updateAllTypes = randomBoolean(); + createIndexRequest.updateAllTypes(updateAllTypes); + if (updateAllTypes) { + expectedParams.put("update_all_types", Boolean.TRUE.toString()); + } + } + + Request request = Request.createIndex(createIndexRequest); + assertEquals("/" + indexName, request.getEndpoint()); + assertEquals(expectedParams, request.getParameters()); + assertEquals("PUT", request.getMethod()); + assertToXContentBody(createIndexRequest, request.getEntity()); + } + public void testDeleteIndex() throws IOException { DeleteIndexRequest deleteIndexRequest = new DeleteIndexRequest(); @@ -399,11 +437,7 @@ public class RequestTests extends ESTestCase { expectedParams.put("refresh", refreshPolicy.getValue()); } } - if (randomBoolean()) { - int waitForActiveShards = randomIntBetween(0, 10); - updateRequest.waitForActiveShards(waitForActiveShards); - expectedParams.put("wait_for_active_shards", String.valueOf(waitForActiveShards)); - } + setRandomWaitForActiveShards(updateRequest::waitForActiveShards, expectedParams); if (randomBoolean()) { long version = randomLong(); updateRequest.version(version); @@ -771,6 +805,55 @@ public class RequestTests extends ESTestCase { } } + public void testMultiSearch() throws IOException { + int numberOfSearchRequests = randomIntBetween(0, 32); + MultiSearchRequest multiSearchRequest = new MultiSearchRequest(); + for (int i = 0; i < numberOfSearchRequests; i++) { + SearchRequest searchRequest = randomSearchRequest(() -> { + // No need to return a very complex SearchSourceBuilder here, that is tested elsewhere + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + searchSourceBuilder.from(randomInt(10)); + searchSourceBuilder.size(randomIntBetween(20, 100)); + return searchSourceBuilder; + }); + // scroll is not supported in the current msearch api, so unset it: + searchRequest.scroll((Scroll) null); + // only expand_wildcards, ignore_unavailable and allow_no_indices can be specified from msearch api, so unset other options: + IndicesOptions randomlyGenerated = searchRequest.indicesOptions(); + IndicesOptions msearchDefault = new MultiSearchRequest().indicesOptions(); + searchRequest.indicesOptions(IndicesOptions.fromOptions( + randomlyGenerated.ignoreUnavailable(), randomlyGenerated.allowNoIndices(), randomlyGenerated.expandWildcardsOpen(), + randomlyGenerated.expandWildcardsClosed(), msearchDefault.allowAliasesToMultipleIndices(), + msearchDefault.forbidClosedIndices(), msearchDefault.ignoreAliases() + )); + multiSearchRequest.add(searchRequest); + } + + Map expectedParams = new HashMap<>(); + expectedParams.put(RestSearchAction.TYPED_KEYS_PARAM, "true"); + if (randomBoolean()) { + multiSearchRequest.maxConcurrentSearchRequests(randomIntBetween(1, 8)); + expectedParams.put("max_concurrent_searches", Integer.toString(multiSearchRequest.maxConcurrentSearchRequests())); + } + + Request request = Request.multiSearch(multiSearchRequest); + assertEquals("/_msearch", request.getEndpoint()); + assertEquals(expectedParams, request.getParameters()); + + List requests = new ArrayList<>(); + CheckedBiConsumer consumer = (searchRequest, p) -> { + SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(p); + if (searchSourceBuilder.equals(new SearchSourceBuilder()) == false) { + searchRequest.source(searchSourceBuilder); + } + requests.add(searchRequest); + }; + MultiSearchRequest.readMultiLineFormat(new BytesArray(EntityUtils.toByteArray(request.getEntity())), + REQUEST_BODY_CONTENT_TYPE.xContent(), consumer, null, multiSearchRequest.indicesOptions(), null, null, + null, xContentRegistry(), true); + assertEquals(requests, multiSearchRequest.requests()); + } + public void testSearchScroll() throws IOException { SearchScrollRequest searchScrollRequest = new SearchScrollRequest(); searchScrollRequest.scrollId(randomAlphaOfLengthBetween(5, 10)); @@ -782,7 +865,7 @@ public class RequestTests extends ESTestCase { assertEquals("/_search/scroll", request.getEndpoint()); assertEquals(0, request.getParameters().size()); assertToXContentBody(searchScrollRequest, request.getEntity()); - assertEquals(Request.REQUEST_BODY_CONTENT_TYPE.mediaTypeWithoutParameters(), request.getEntity().getContentType().getValue()); + assertEquals(REQUEST_BODY_CONTENT_TYPE.mediaTypeWithoutParameters(), request.getEntity().getContentType().getValue()); } public void testClearScroll() throws IOException { @@ -796,11 +879,11 @@ public class RequestTests extends ESTestCase { assertEquals("/_search/scroll", request.getEndpoint()); assertEquals(0, request.getParameters().size()); assertToXContentBody(clearScrollRequest, request.getEntity()); - assertEquals(Request.REQUEST_BODY_CONTENT_TYPE.mediaTypeWithoutParameters(), request.getEntity().getContentType().getValue()); + assertEquals(REQUEST_BODY_CONTENT_TYPE.mediaTypeWithoutParameters(), request.getEntity().getContentType().getValue()); } private static void assertToXContentBody(ToXContent expectedBody, HttpEntity actualEntity) throws IOException { - BytesReference expectedBytes = XContentHelper.toXContent(expectedBody, Request.REQUEST_BODY_CONTENT_TYPE, false); + BytesReference expectedBytes = XContentHelper.toXContent(expectedBody, REQUEST_BODY_CONTENT_TYPE, false); assertEquals(XContentType.JSON.mediaTypeWithoutParameters(), actualEntity.getContentType().getValue()); assertEquals(expectedBytes, new BytesArray(EntityUtils.toByteArray(actualEntity))); } @@ -959,6 +1042,14 @@ public class RequestTests extends ESTestCase { } } + private static void setRandomWaitForActiveShards(Consumer setter, Map expectedParams) { + if (randomBoolean()) { + int waitForActiveShards = randomIntBetween(0, 10); + setter.accept(waitForActiveShards); + expectedParams.put("wait_for_active_shards", String.valueOf(waitForActiveShards)); + } + } + private static void setRandomRefreshPolicy(ReplicatedWriteRequest request, Map expectedParams) { if (randomBoolean()) { WriteRequest.RefreshPolicy refreshPolicy = randomFrom(WriteRequest.RefreshPolicy.values()); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java index 289ebf372d8..3e72c7c64b6 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java @@ -23,20 +23,30 @@ import org.apache.http.HttpEntity; import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; import org.apache.http.nio.entity.NStringEntity; +import org.apache.lucene.search.join.ScoreMode; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.search.ClearScrollRequest; import org.elasticsearch.action.search.ClearScrollResponse; +import org.elasticsearch.action.search.MultiSearchRequest; +import org.elasticsearch.action.search.MultiSearchResponse; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchScrollRequest; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.MatchQueryBuilder; +import org.elasticsearch.index.query.NestedQueryBuilder; +import org.elasticsearch.index.query.ScriptQueryBuilder; +import org.elasticsearch.index.query.TermsQueryBuilder; import org.elasticsearch.join.aggregations.Children; import org.elasticsearch.join.aggregations.ChildrenAggregationBuilder; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.script.Script; +import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.search.aggregations.bucket.range.Range; import org.elasticsearch.search.aggregations.bucket.range.RangeAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.terms.Terms; @@ -45,10 +55,12 @@ import org.elasticsearch.search.aggregations.matrix.stats.MatrixStats; import org.elasticsearch.search.aggregations.matrix.stats.MatrixStatsAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.search.suggest.SuggestBuilder; import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder; +import org.hamcrest.Matchers; import org.junit.Before; import java.io.IOException; @@ -64,6 +76,7 @@ import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.lessThan; +import static org.hamcrest.Matchers.nullValue; public class SearchIT extends ESRestHighLevelClientTestCase { @@ -80,10 +93,24 @@ public class SearchIT extends ESRestHighLevelClientTestCase { StringEntity doc5 = new StringEntity("{\"type\":\"type2\", \"num\":100, \"num2\":10}", ContentType.APPLICATION_JSON); client().performRequest("PUT", "/index/type/5", Collections.emptyMap(), doc5); client().performRequest("POST", "/index/_refresh"); + + StringEntity doc = new StringEntity("{\"field\":\"value1\"}", ContentType.APPLICATION_JSON); + client().performRequest("PUT", "/index1/doc/1", Collections.emptyMap(), doc); + doc = new StringEntity("{\"field\":\"value2\"}", ContentType.APPLICATION_JSON); + client().performRequest("PUT", "/index1/doc/2", Collections.emptyMap(), doc); + doc = new StringEntity("{\"field\":\"value1\"}", ContentType.APPLICATION_JSON); + client().performRequest("PUT", "/index2/doc/3", Collections.emptyMap(), doc); + doc = new StringEntity("{\"field\":\"value2\"}", ContentType.APPLICATION_JSON); + client().performRequest("PUT", "/index2/doc/4", Collections.emptyMap(), doc); + doc = new StringEntity("{\"field\":\"value1\"}", ContentType.APPLICATION_JSON); + client().performRequest("PUT", "/index3/doc/5", Collections.emptyMap(), doc); + doc = new StringEntity("{\"field\":\"value2\"}", ContentType.APPLICATION_JSON); + client().performRequest("PUT", "/index3/doc/6", Collections.emptyMap(), doc); + client().performRequest("POST", "/index1,index2,index3/_refresh"); } public void testSearchNoQuery() throws IOException { - SearchRequest searchRequest = new SearchRequest(); + SearchRequest searchRequest = new SearchRequest("index"); SearchResponse searchResponse = execute(searchRequest, highLevelClient()::search, highLevelClient()::searchAsync); assertSearchHeader(searchResponse); assertNull(searchResponse.getAggregations()); @@ -106,7 +133,7 @@ public class SearchIT extends ESRestHighLevelClientTestCase { } public void testSearchMatchQuery() throws IOException { - SearchRequest searchRequest = new SearchRequest(); + SearchRequest searchRequest = new SearchRequest("index"); searchRequest.source(new SearchSourceBuilder().query(new MatchQueryBuilder("num", 10))); SearchResponse searchResponse = execute(searchRequest, highLevelClient()::search, highLevelClient()::searchAsync); assertSearchHeader(searchResponse); @@ -164,7 +191,7 @@ public class SearchIT extends ESRestHighLevelClientTestCase { assertEquals(RestStatus.BAD_REQUEST, exception.status()); } - SearchRequest searchRequest = new SearchRequest(); + SearchRequest searchRequest = new SearchRequest("index"); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); searchSourceBuilder.aggregation(new RangeAggregationBuilder("agg1").field("num") .addRange("first", 0, 30).addRange("second", 31, 200)); @@ -193,7 +220,7 @@ public class SearchIT extends ESRestHighLevelClientTestCase { } public void testSearchWithTermsAndRangeAgg() throws IOException { - SearchRequest searchRequest = new SearchRequest(); + SearchRequest searchRequest = new SearchRequest("index"); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); TermsAggregationBuilder agg = new TermsAggregationBuilder("agg1", ValueType.STRING).field("type.keyword"); agg.subAggregation(new RangeAggregationBuilder("subagg").field("num") @@ -247,7 +274,7 @@ public class SearchIT extends ESRestHighLevelClientTestCase { } public void testSearchWithMatrixStats() throws IOException { - SearchRequest searchRequest = new SearchRequest(); + SearchRequest searchRequest = new SearchRequest("index"); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); searchSourceBuilder.aggregation(new MatrixStatsAggregationBuilder("agg1").fields(Arrays.asList("num", "num2"))); searchSourceBuilder.size(0); @@ -374,7 +401,7 @@ public class SearchIT extends ESRestHighLevelClientTestCase { } public void testSearchWithSuggest() throws IOException { - SearchRequest searchRequest = new SearchRequest(); + SearchRequest searchRequest = new SearchRequest("index"); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); searchSourceBuilder.suggest(new SuggestBuilder().addSuggestion("sugg1", new PhraseSuggestionBuilder("type")) .setGlobalText("type")); @@ -464,6 +491,185 @@ public class SearchIT extends ESRestHighLevelClientTestCase { } } + public void testMultiSearch() throws Exception { + MultiSearchRequest multiSearchRequest = new MultiSearchRequest(); + SearchRequest searchRequest1 = new SearchRequest("index1"); + searchRequest1.source().sort("_id", SortOrder.ASC); + multiSearchRequest.add(searchRequest1); + SearchRequest searchRequest2 = new SearchRequest("index2"); + searchRequest2.source().sort("_id", SortOrder.ASC); + multiSearchRequest.add(searchRequest2); + SearchRequest searchRequest3 = new SearchRequest("index3"); + searchRequest3.source().sort("_id", SortOrder.ASC); + multiSearchRequest.add(searchRequest3); + + MultiSearchResponse multiSearchResponse = + execute(multiSearchRequest, highLevelClient()::multiSearch, highLevelClient()::multiSearchAsync); + assertThat(multiSearchResponse.getTook().millis(), Matchers.greaterThanOrEqualTo(0L)); + assertThat(multiSearchResponse.getResponses().length, Matchers.equalTo(3)); + + assertThat(multiSearchResponse.getResponses()[0].getFailure(), Matchers.nullValue()); + assertThat(multiSearchResponse.getResponses()[0].isFailure(), Matchers.is(false)); + SearchIT.assertSearchHeader(multiSearchResponse.getResponses()[0].getResponse()); + assertThat(multiSearchResponse.getResponses()[0].getResponse().getHits().getTotalHits(), Matchers.equalTo(2L)); + assertThat(multiSearchResponse.getResponses()[0].getResponse().getHits().getAt(0).getId(), Matchers.equalTo("1")); + assertThat(multiSearchResponse.getResponses()[0].getResponse().getHits().getAt(1).getId(), Matchers.equalTo("2")); + + assertThat(multiSearchResponse.getResponses()[1].getFailure(), Matchers.nullValue()); + assertThat(multiSearchResponse.getResponses()[1].isFailure(), Matchers.is(false)); + SearchIT.assertSearchHeader(multiSearchResponse.getResponses()[1].getResponse()); + assertThat(multiSearchResponse.getResponses()[1].getResponse().getHits().getTotalHits(), Matchers.equalTo(2L)); + assertThat(multiSearchResponse.getResponses()[1].getResponse().getHits().getAt(0).getId(), Matchers.equalTo("3")); + assertThat(multiSearchResponse.getResponses()[1].getResponse().getHits().getAt(1).getId(), Matchers.equalTo("4")); + + assertThat(multiSearchResponse.getResponses()[2].getFailure(), Matchers.nullValue()); + assertThat(multiSearchResponse.getResponses()[2].isFailure(), Matchers.is(false)); + SearchIT.assertSearchHeader(multiSearchResponse.getResponses()[2].getResponse()); + assertThat(multiSearchResponse.getResponses()[2].getResponse().getHits().getTotalHits(), Matchers.equalTo(2L)); + assertThat(multiSearchResponse.getResponses()[2].getResponse().getHits().getAt(0).getId(), Matchers.equalTo("5")); + assertThat(multiSearchResponse.getResponses()[2].getResponse().getHits().getAt(1).getId(), Matchers.equalTo("6")); + } + + public void testMultiSearch_withAgg() throws Exception { + MultiSearchRequest multiSearchRequest = new MultiSearchRequest(); + SearchRequest searchRequest1 = new SearchRequest("index1"); + searchRequest1.source().size(0).aggregation(new TermsAggregationBuilder("name", ValueType.STRING).field("field.keyword") + .order(BucketOrder.key(true))); + multiSearchRequest.add(searchRequest1); + SearchRequest searchRequest2 = new SearchRequest("index2"); + searchRequest2.source().size(0).aggregation(new TermsAggregationBuilder("name", ValueType.STRING).field("field.keyword") + .order(BucketOrder.key(true))); + multiSearchRequest.add(searchRequest2); + SearchRequest searchRequest3 = new SearchRequest("index3"); + searchRequest3.source().size(0).aggregation(new TermsAggregationBuilder("name", ValueType.STRING).field("field.keyword") + .order(BucketOrder.key(true))); + multiSearchRequest.add(searchRequest3); + + MultiSearchResponse multiSearchResponse = + execute(multiSearchRequest, highLevelClient()::multiSearch, highLevelClient()::multiSearchAsync); + assertThat(multiSearchResponse.getTook().millis(), Matchers.greaterThanOrEqualTo(0L)); + assertThat(multiSearchResponse.getResponses().length, Matchers.equalTo(3)); + + assertThat(multiSearchResponse.getResponses()[0].getFailure(), Matchers.nullValue()); + assertThat(multiSearchResponse.getResponses()[0].isFailure(), Matchers.is(false)); + SearchIT.assertSearchHeader(multiSearchResponse.getResponses()[0].getResponse()); + assertThat(multiSearchResponse.getResponses()[0].getResponse().getHits().getTotalHits(), Matchers.equalTo(2L)); + assertThat(multiSearchResponse.getResponses()[0].getResponse().getHits().getHits().length, Matchers.equalTo(0)); + Terms terms = multiSearchResponse.getResponses()[0].getResponse().getAggregations().get("name"); + assertThat(terms.getBuckets().size(), Matchers.equalTo(2)); + assertThat(terms.getBuckets().get(0).getKeyAsString(), Matchers.equalTo("value1")); + assertThat(terms.getBuckets().get(1).getKeyAsString(), Matchers.equalTo("value2")); + + assertThat(multiSearchResponse.getResponses()[1].getFailure(), Matchers.nullValue()); + assertThat(multiSearchResponse.getResponses()[1].isFailure(), Matchers.is(false)); + SearchIT.assertSearchHeader(multiSearchResponse.getResponses()[0].getResponse()); + assertThat(multiSearchResponse.getResponses()[1].getResponse().getHits().getTotalHits(), Matchers.equalTo(2L)); + assertThat(multiSearchResponse.getResponses()[1].getResponse().getHits().getHits().length, Matchers.equalTo(0)); + terms = multiSearchResponse.getResponses()[1].getResponse().getAggregations().get("name"); + assertThat(terms.getBuckets().size(), Matchers.equalTo(2)); + assertThat(terms.getBuckets().get(0).getKeyAsString(), Matchers.equalTo("value1")); + assertThat(terms.getBuckets().get(1).getKeyAsString(), Matchers.equalTo("value2")); + + assertThat(multiSearchResponse.getResponses()[2].getFailure(), Matchers.nullValue()); + assertThat(multiSearchResponse.getResponses()[2].isFailure(), Matchers.is(false)); + SearchIT.assertSearchHeader(multiSearchResponse.getResponses()[0].getResponse()); + assertThat(multiSearchResponse.getResponses()[2].getResponse().getHits().getTotalHits(), Matchers.equalTo(2L)); + assertThat(multiSearchResponse.getResponses()[2].getResponse().getHits().getHits().length, Matchers.equalTo(0)); + terms = multiSearchResponse.getResponses()[2].getResponse().getAggregations().get("name"); + assertThat(terms.getBuckets().size(), Matchers.equalTo(2)); + assertThat(terms.getBuckets().get(0).getKeyAsString(), Matchers.equalTo("value1")); + assertThat(terms.getBuckets().get(1).getKeyAsString(), Matchers.equalTo("value2")); + } + + public void testMultiSearch_withQuery() throws Exception { + MultiSearchRequest multiSearchRequest = new MultiSearchRequest(); + SearchRequest searchRequest1 = new SearchRequest("index1"); + searchRequest1.source().query(new TermsQueryBuilder("field", "value2")); + multiSearchRequest.add(searchRequest1); + SearchRequest searchRequest2 = new SearchRequest("index2"); + searchRequest2.source().query(new TermsQueryBuilder("field", "value2")); + multiSearchRequest.add(searchRequest2); + SearchRequest searchRequest3 = new SearchRequest("index3"); + searchRequest3.source().query(new TermsQueryBuilder("field", "value2")); + multiSearchRequest.add(searchRequest3); + + MultiSearchResponse multiSearchResponse = + execute(multiSearchRequest, highLevelClient()::multiSearch, highLevelClient()::multiSearchAsync); + assertThat(multiSearchResponse.getTook().millis(), Matchers.greaterThanOrEqualTo(0L)); + assertThat(multiSearchResponse.getResponses().length, Matchers.equalTo(3)); + + assertThat(multiSearchResponse.getResponses()[0].getFailure(), Matchers.nullValue()); + assertThat(multiSearchResponse.getResponses()[0].isFailure(), Matchers.is(false)); + SearchIT.assertSearchHeader(multiSearchResponse.getResponses()[0].getResponse()); + assertThat(multiSearchResponse.getResponses()[0].getResponse().getHits().getTotalHits(), Matchers.equalTo(1L)); + assertThat(multiSearchResponse.getResponses()[0].getResponse().getHits().getAt(0).getId(), Matchers.equalTo("2")); + + assertThat(multiSearchResponse.getResponses()[1].getFailure(), Matchers.nullValue()); + assertThat(multiSearchResponse.getResponses()[1].isFailure(), Matchers.is(false)); + SearchIT.assertSearchHeader(multiSearchResponse.getResponses()[1].getResponse()); + assertThat(multiSearchResponse.getResponses()[1].getResponse().getHits().getTotalHits(), Matchers.equalTo(1L)); + assertThat(multiSearchResponse.getResponses()[1].getResponse().getHits().getAt(0).getId(), Matchers.equalTo("4")); + + assertThat(multiSearchResponse.getResponses()[2].getFailure(), Matchers.nullValue()); + assertThat(multiSearchResponse.getResponses()[2].isFailure(), Matchers.is(false)); + SearchIT.assertSearchHeader(multiSearchResponse.getResponses()[2].getResponse()); + assertThat(multiSearchResponse.getResponses()[2].getResponse().getHits().getTotalHits(), Matchers.equalTo(1L)); + assertThat(multiSearchResponse.getResponses()[2].getResponse().getHits().getAt(0).getId(), Matchers.equalTo("6")); + + searchRequest1.source().highlighter(new HighlightBuilder().field("field")); + searchRequest2.source().highlighter(new HighlightBuilder().field("field")); + searchRequest3.source().highlighter(new HighlightBuilder().field("field")); + multiSearchResponse = execute(multiSearchRequest, highLevelClient()::multiSearch, highLevelClient()::multiSearchAsync); + assertThat(multiSearchResponse.getTook().millis(), Matchers.greaterThanOrEqualTo(0L)); + assertThat(multiSearchResponse.getResponses().length, Matchers.equalTo(3)); + + assertThat(multiSearchResponse.getResponses()[0].getFailure(), Matchers.nullValue()); + assertThat(multiSearchResponse.getResponses()[0].isFailure(), Matchers.is(false)); + SearchIT.assertSearchHeader(multiSearchResponse.getResponses()[0].getResponse()); + assertThat(multiSearchResponse.getResponses()[0].getResponse().getHits().getTotalHits(), Matchers.equalTo(1L)); + assertThat(multiSearchResponse.getResponses()[0].getResponse().getHits().getAt(0).getHighlightFields() + .get("field").fragments()[0].string(), Matchers.equalTo("value2")); + + assertThat(multiSearchResponse.getResponses()[1].getFailure(), Matchers.nullValue()); + assertThat(multiSearchResponse.getResponses()[1].isFailure(), Matchers.is(false)); + SearchIT.assertSearchHeader(multiSearchResponse.getResponses()[1].getResponse()); + assertThat(multiSearchResponse.getResponses()[1].getResponse().getHits().getTotalHits(), Matchers.equalTo(1L)); + assertThat(multiSearchResponse.getResponses()[1].getResponse().getHits().getAt(0).getId(), Matchers.equalTo("4")); + assertThat(multiSearchResponse.getResponses()[1].getResponse().getHits().getAt(0).getHighlightFields() + .get("field").fragments()[0].string(), Matchers.equalTo("value2")); + + assertThat(multiSearchResponse.getResponses()[2].getFailure(), Matchers.nullValue()); + assertThat(multiSearchResponse.getResponses()[2].isFailure(), Matchers.is(false)); + SearchIT.assertSearchHeader(multiSearchResponse.getResponses()[2].getResponse()); + assertThat(multiSearchResponse.getResponses()[2].getResponse().getHits().getTotalHits(), Matchers.equalTo(1L)); + assertThat(multiSearchResponse.getResponses()[2].getResponse().getHits().getAt(0).getId(), Matchers.equalTo("6")); + assertThat(multiSearchResponse.getResponses()[2].getResponse().getHits().getAt(0).getHighlightFields() + .get("field").fragments()[0].string(), Matchers.equalTo("value2")); + } + + public void testMultiSearch_failure() throws Exception { + MultiSearchRequest multiSearchRequest = new MultiSearchRequest(); + SearchRequest searchRequest1 = new SearchRequest("index1"); + searchRequest1.source().query(new ScriptQueryBuilder(new Script(ScriptType.INLINE, "invalid", "code", Collections.emptyMap()))); + multiSearchRequest.add(searchRequest1); + SearchRequest searchRequest2 = new SearchRequest("index2"); + searchRequest2.source().query(new ScriptQueryBuilder(new Script(ScriptType.INLINE, "invalid", "code", Collections.emptyMap()))); + multiSearchRequest.add(searchRequest2); + + MultiSearchResponse multiSearchResponse = + execute(multiSearchRequest, highLevelClient()::multiSearch, highLevelClient()::multiSearchAsync); + assertThat(multiSearchResponse.getTook().millis(), Matchers.greaterThanOrEqualTo(0L)); + assertThat(multiSearchResponse.getResponses().length, Matchers.equalTo(2)); + + assertThat(multiSearchResponse.getResponses()[0].isFailure(), Matchers.is(true)); + assertThat(multiSearchResponse.getResponses()[0].getFailure().getMessage(), containsString("search_phase_execution_exception")); + assertThat(multiSearchResponse.getResponses()[0].getResponse(), nullValue()); + + assertThat(multiSearchResponse.getResponses()[1].isFailure(), Matchers.is(true)); + assertThat(multiSearchResponse.getResponses()[1].getFailure().getMessage(), containsString("search_phase_execution_exception")); + assertThat(multiSearchResponse.getResponses()[1].getResponse(), nullValue()); + } + private static void assertSearchHeader(SearchResponse searchResponse) { assertThat(searchResponse.getTook().nanos(), greaterThanOrEqualTo(0L)); assertEquals(0, searchResponse.getFailedShards()); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java index e866fb92aae..372cc17d137 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java @@ -21,13 +21,18 @@ package org.elasticsearch.client.documentation; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.indices.alias.Alias; +import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; +import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; import org.elasticsearch.action.admin.indices.delete.DeleteIndexResponse; +import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.ESRestHighLevelClientTestCase; -import org.elasticsearch.client.Response; import org.elasticsearch.client.RestHighLevelClient; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.rest.RestStatus; import java.io.IOException; @@ -52,8 +57,8 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase RestHighLevelClient client = highLevelClient(); { - Response createIndexResponse = client().performRequest("PUT", "/posts"); - assertEquals(200, createIndexResponse.getStatusLine().getStatusCode()); + CreateIndexResponse createIndexResponse = client.indices().createIndex(new CreateIndexRequest("posts")); + assertTrue(createIndexResponse.isAcknowledged()); } { @@ -61,14 +66,26 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase DeleteIndexRequest request = new DeleteIndexRequest("posts"); // <1> // end::delete-index-request + // tag::delete-index-request-timeout + request.timeout(TimeValue.timeValueMinutes(2)); // <1> + request.timeout("2m"); // <2> + // end::delete-index-request-timeout + // tag::delete-index-request-masterTimeout + request.masterNodeTimeout(TimeValue.timeValueMinutes(1)); // <1> + request.masterNodeTimeout("1m"); // <2> + // end::delete-index-request-masterTimeout + // tag::delete-index-request-indicesOptions + request.indicesOptions(IndicesOptions.lenientExpandOpen()); // <1> + // end::delete-index-request-indicesOptions + // tag::delete-index-execute DeleteIndexResponse deleteIndexResponse = client.indices().deleteIndex(request); // end::delete-index-execute - assertTrue(deleteIndexResponse.isAcknowledged()); // tag::delete-index-response boolean acknowledged = deleteIndexResponse.isAcknowledged(); // <1> // end::delete-index-response + assertTrue(acknowledged); // tag::delete-index-execute-async client.indices().deleteIndexAsync(request, new ActionListener() { @@ -85,26 +102,11 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase // end::delete-index-execute-async } - { - DeleteIndexRequest request = new DeleteIndexRequest("posts"); - // tag::delete-index-request-timeout - request.timeout(TimeValue.timeValueMinutes(2)); // <1> - request.timeout("2m"); // <2> - // end::delete-index-request-timeout - // tag::delete-index-request-masterTimeout - request.masterNodeTimeout(TimeValue.timeValueMinutes(1)); // <1> - request.timeout("1m"); // <2> - // end::delete-index-request-masterTimeout - // tag::delete-index-request-indicesOptions - request.indicesOptions(IndicesOptions.lenientExpandOpen()); // <1> - // end::delete-index-request-indicesOptions - } - { // tag::delete-index-notfound try { DeleteIndexRequest request = new DeleteIndexRequest("does_not_exist"); - DeleteIndexResponse deleteIndexResponse = client.indices().deleteIndex(request); + client.indices().deleteIndex(request); } catch (ElasticsearchException exception) { if (exception.status() == RestStatus.NOT_FOUND) { // <1> @@ -113,4 +115,79 @@ public class IndicesClientDocumentationIT extends ESRestHighLevelClientTestCase // end::delete-index-notfound } } + + public void testCreateIndex() throws IOException { + RestHighLevelClient client = highLevelClient(); + + { + // tag::create-index-request + CreateIndexRequest request = new CreateIndexRequest("twitter"); // <1> + // end::create-index-request + + // tag::create-index-request-settings + request.settings(Settings.builder() // <1> + .put("index.number_of_shards", 3) + .put("index.number_of_replicas", 2) + ); + // end::create-index-request-settings + + // tag::create-index-request-mappings + request.mapping("tweet", // <1> + " {\n" + + " \"tweet\": {\n" + + " \"properties\": {\n" + + " \"message\": {\n" + + " \"type\": \"text\"\n" + + " }\n" + + " }\n" + + " }\n" + + " }", // <2> + XContentType.JSON); + // end::create-index-request-mappings + + // tag::create-index-request-aliases + request.alias( + new Alias("twitter_alias") // <1> + ); + // end::create-index-request-aliases + + // tag::create-index-request-timeout + request.timeout(TimeValue.timeValueMinutes(2)); // <1> + request.timeout("2m"); // <2> + // end::create-index-request-timeout + // tag::create-index-request-masterTimeout + request.masterNodeTimeout(TimeValue.timeValueMinutes(1)); // <1> + request.masterNodeTimeout("1m"); // <2> + // end::create-index-request-masterTimeout + // tag::create-index-request-waitForActiveShards + request.waitForActiveShards(2); // <1> + request.waitForActiveShards(ActiveShardCount.DEFAULT); // <2> + // end::create-index-request-waitForActiveShards + + // tag::create-index-execute + CreateIndexResponse createIndexResponse = client.indices().createIndex(request); + // end::create-index-execute + + // tag::create-index-response + boolean acknowledged = createIndexResponse.isAcknowledged(); // <1> + boolean shardsAcked = createIndexResponse.isShardsAcked(); // <2> + // end::create-index-response + assertTrue(acknowledged); + assertTrue(shardsAcked); + + // tag::create-index-execute-async + client.indices().createIndexAsync(request, new ActionListener() { + @Override + public void onResponse(CreateIndexResponse createIndexResponse) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }); + // end::create-index-execute-async + } + } } diff --git a/core/src/main/java/org/elasticsearch/ElasticsearchException.java b/core/src/main/java/org/elasticsearch/ElasticsearchException.java index f4e807b9ffc..9e4e7b909f7 100644 --- a/core/src/main/java/org/elasticsearch/ElasticsearchException.java +++ b/core/src/main/java/org/elasticsearch/ElasticsearchException.java @@ -34,6 +34,7 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.search.aggregations.MultiBucketConsumerService; import org.elasticsearch.transport.TcpTransport; import java.io.IOException; @@ -986,7 +987,10 @@ public class ElasticsearchException extends RuntimeException implements ToXConte SHARD_LOCK_OBTAIN_FAILED_EXCEPTION(org.elasticsearch.env.ShardLockObtainFailedException.class, org.elasticsearch.env.ShardLockObtainFailedException::new, 147, Version.V_5_0_2), UNKNOWN_NAMED_OBJECT_EXCEPTION(org.elasticsearch.common.xcontent.NamedXContentRegistry.UnknownNamedObjectException.class, - org.elasticsearch.common.xcontent.NamedXContentRegistry.UnknownNamedObjectException::new, 148, Version.V_5_2_0); + org.elasticsearch.common.xcontent.NamedXContentRegistry.UnknownNamedObjectException::new, 148, Version.V_5_2_0), + TOO_MANY_BUCKETS_EXCEPTION(MultiBucketConsumerService.TooManyBucketsException.class, + MultiBucketConsumerService.TooManyBucketsException::new, 149, + Version.V_7_0_0_alpha1); final Class exceptionClass; final CheckedFunction constructor; diff --git a/core/src/main/java/org/elasticsearch/Version.java b/core/src/main/java/org/elasticsearch/Version.java index 372d88c75cd..e234e8828bc 100644 --- a/core/src/main/java/org/elasticsearch/Version.java +++ b/core/src/main/java/org/elasticsearch/Version.java @@ -131,10 +131,13 @@ public class Version implements Comparable { public static final int V_6_0_1_ID = 6000199; public static final Version V_6_0_1 = new Version(V_6_0_1_ID, org.apache.lucene.util.Version.LUCENE_7_0_1); + public static final int V_6_0_2_ID = 6000299; + public static final Version V_6_0_2 = + new Version(V_6_0_2_ID, org.apache.lucene.util.Version.LUCENE_7_0_1); public static final int V_6_1_0_ID = 6010099; public static final Version V_6_1_0 = new Version(V_6_1_0_ID, org.apache.lucene.util.Version.LUCENE_7_1_0); public static final int V_6_2_0_ID = 6020099; - public static final Version V_6_2_0 = new Version(V_6_2_0_ID, org.apache.lucene.util.Version.LUCENE_7_1_0); + public static final Version V_6_2_0 = new Version(V_6_2_0_ID, org.apache.lucene.util.Version.LUCENE_7_2_0); public static final int V_7_0_0_alpha1_ID = 7000001; public static final Version V_7_0_0_alpha1 = new Version(V_7_0_0_alpha1_ID, org.apache.lucene.util.Version.LUCENE_7_2_0); @@ -157,6 +160,8 @@ public class Version implements Comparable { return V_6_1_0; case V_6_2_0_ID: return V_6_2_0; + case V_6_0_2_ID: + return V_6_0_2; case V_6_0_1_ID: return V_6_0_1; case V_6_0_0_ID: diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/alias/Alias.java b/core/src/main/java/org/elasticsearch/action/admin/indices/alias/Alias.java index a9e4c777784..dc088f815b1 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/alias/Alias.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/alias/Alias.java @@ -21,10 +21,13 @@ package org.elasticsearch.action.admin.indices.alias; import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; @@ -33,11 +36,17 @@ import org.elasticsearch.index.query.QueryBuilder; import java.io.IOException; import java.util.Map; +import java.util.Objects; /** * Represents an alias, to be associated with an index */ -public class Alias implements Streamable { +public class Alias implements Streamable, ToXContentObject { + + private static final ParseField FILTER = new ParseField("filter"); + private static final ParseField ROUTING = new ParseField("routing"); + private static final ParseField INDEX_ROUTING = new ParseField("index_routing", "indexRouting", "index-routing"); + private static final ParseField SEARCH_ROUTING = new ParseField("search_routing", "searchRouting", "search-routing"); private String name; @@ -196,16 +205,16 @@ public class Alias implements Streamable { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token == XContentParser.Token.START_OBJECT) { - if ("filter".equals(currentFieldName)) { + if (FILTER.match(currentFieldName)) { Map filter = parser.mapOrdered(); alias.filter(filter); } } else if (token == XContentParser.Token.VALUE_STRING) { - if ("routing".equals(currentFieldName)) { + if (ROUTING.match(currentFieldName)) { alias.routing(parser.text()); - } else if ("index_routing".equals(currentFieldName) || "indexRouting".equals(currentFieldName) || "index-routing".equals(currentFieldName)) { + } else if (INDEX_ROUTING.match(currentFieldName)) { alias.indexRouting(parser.text()); - } else if ("search_routing".equals(currentFieldName) || "searchRouting".equals(currentFieldName) || "search-routing".equals(currentFieldName)) { + } else if (SEARCH_ROUTING.match(currentFieldName)) { alias.searchRouting(parser.text()); } } @@ -213,6 +222,29 @@ public class Alias implements Streamable { return alias; } + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(name); + + if (filter != null) { + builder.rawField(FILTER.getPreferredName(), new BytesArray(filter), XContentType.JSON); + } + + if (indexRouting != null && indexRouting.equals(searchRouting)) { + builder.field(ROUTING.getPreferredName(), indexRouting); + } else { + if (indexRouting != null) { + builder.field(INDEX_ROUTING.getPreferredName(), indexRouting); + } + if (searchRouting != null) { + builder.field(SEARCH_ROUTING.getPreferredName(), searchRouting); + } + } + + builder.endObject(); + return builder; + } + @Override public boolean equals(Object o) { if (this == o) return true; diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java b/core/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java index 2d320b094b2..f628974834c 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequest.java @@ -30,6 +30,7 @@ import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.MapBuilder; @@ -37,6 +38,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; @@ -65,7 +67,11 @@ import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS; * @see org.elasticsearch.client.Requests#createIndexRequest(String) * @see CreateIndexResponse */ -public class CreateIndexRequest extends AcknowledgedRequest implements IndicesRequest { +public class CreateIndexRequest extends AcknowledgedRequest implements IndicesRequest, ToXContentObject { + + private static final ParseField MAPPINGS = new ParseField("mappings"); + private static final ParseField SETTINGS = new ParseField("settings"); + private static final ParseField ALIASES = new ParseField("aliases"); private String cause = ""; @@ -376,14 +382,14 @@ public class CreateIndexRequest extends AcknowledgedRequest public CreateIndexRequest source(Map source) { for (Map.Entry entry : source.entrySet()) { String name = entry.getKey(); - if (name.equals("settings")) { + if (SETTINGS.match(name)) { settings((Map) entry.getValue()); - } else if (name.equals("mappings")) { + } else if (MAPPINGS.match(name)) { Map mappings = (Map) entry.getValue(); for (Map.Entry entry1 : mappings.entrySet()) { mapping(entry1.getKey(), (Map) entry1.getValue()); } - } else if (name.equals("aliases")) { + } else if (ALIASES.match(name)) { aliases((Map) entry.getValue()); } else { // maybe custom? @@ -520,4 +526,32 @@ public class CreateIndexRequest extends AcknowledgedRequest out.writeBoolean(updateAllTypes); waitForActiveShards.writeTo(out); } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + + builder.startObject(SETTINGS.getPreferredName()); + settings.toXContent(builder, params); + builder.endObject(); + + builder.startObject(MAPPINGS.getPreferredName()); + for (Map.Entry entry : mappings.entrySet()) { + builder.rawField(entry.getKey(), new BytesArray(entry.getValue()), XContentType.JSON); + } + builder.endObject(); + + builder.startObject(ALIASES.getPreferredName()); + for (Alias alias : aliases) { + alias.toXContent(builder, params); + } + builder.endObject(); + + for (Map.Entry entry : customs.entrySet()) { + builder.field(entry.getKey(), entry.getValue(), params); + } + + builder.endObject(); + return builder; + } } diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexResponse.java b/core/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexResponse.java index b770c11c6ab..5c07b4024ee 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexResponse.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexResponse.java @@ -39,20 +39,17 @@ import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constru */ public class CreateIndexResponse extends AcknowledgedResponse implements ToXContentObject { - private static final String SHARDS_ACKNOWLEDGED = "shards_acknowledged"; - private static final String INDEX = "index"; - - private static final ParseField SHARDS_ACKNOWLEDGED_PARSER = new ParseField(SHARDS_ACKNOWLEDGED); - private static final ParseField INDEX_PARSER = new ParseField(INDEX); + private static final ParseField SHARDS_ACKNOWLEDGED = new ParseField("shards_acknowledged"); + private static final ParseField INDEX = new ParseField("index"); private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("create_index", true, args -> new CreateIndexResponse((boolean) args[0], (boolean) args[1], (String) args[2])); static { declareAcknowledgedField(PARSER); - PARSER.declareField(constructorArg(), (parser, context) -> parser.booleanValue(), SHARDS_ACKNOWLEDGED_PARSER, + PARSER.declareField(constructorArg(), (parser, context) -> parser.booleanValue(), SHARDS_ACKNOWLEDGED, ObjectParser.ValueType.BOOLEAN); - PARSER.declareField(constructorArg(), (parser, context) -> parser.text(), INDEX_PARSER, ObjectParser.ValueType.STRING); + PARSER.declareField(constructorArg(), (parser, context) -> parser.text(), INDEX, ObjectParser.ValueType.STRING); } private boolean shardsAcked; @@ -102,8 +99,8 @@ public class CreateIndexResponse extends AcknowledgedResponse implements ToXCont } public void addCustomFields(XContentBuilder builder) throws IOException { - builder.field(SHARDS_ACKNOWLEDGED, isShardsAcked()); - builder.field(INDEX, index()); + builder.field(SHARDS_ACKNOWLEDGED.getPreferredName(), isShardsAcked()); + builder.field(INDEX.getPreferredName(), index()); } @Override diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/get/TransportGetIndexAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/get/TransportGetIndexAction.java index c9cf3257c76..b383c02be74 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/get/TransportGetIndexAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/get/TransportGetIndexAction.java @@ -36,9 +36,11 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.indices.IndicesService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import java.io.IOException; import java.util.List; /** @@ -46,10 +48,15 @@ import java.util.List; */ public class TransportGetIndexAction extends TransportClusterInfoAction { + private final IndicesService indicesService; + @Inject public TransportGetIndexAction(Settings settings, TransportService transportService, ClusterService clusterService, - ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { - super(settings, GetIndexAction.NAME, transportService, clusterService, threadPool, actionFilters, GetIndexRequest::new, indexNameExpressionResolver); + ThreadPool threadPool, ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, IndicesService indicesService) { + super(settings, GetIndexAction.NAME, transportService, clusterService, threadPool, actionFilters, GetIndexRequest::new, + indexNameExpressionResolver); + this.indicesService = indicesService; } @Override @@ -60,7 +67,8 @@ public class TransportGetIndexAction extends TransportClusterInfoAction metadataFieldPredicate = indicesService::isMetaDataField; + Predicate fieldPredicate = metadataFieldPredicate.or(indicesService.getFieldFilter().apply(shardId.getIndexName())); + Collection typeIntersection; if (request.types().length == 0) { typeIntersection = indexService.mapperService().types(); @@ -104,16 +110,15 @@ public class TransportGetFieldMappingsIndexAction extends TransportSingleShardAc } } - MapBuilder> typeMappings = new MapBuilder<>(); + Map> typeMappings = new HashMap<>(); for (String type : typeIntersection) { DocumentMapper documentMapper = indexService.mapperService().documentMapper(type); - Map fieldMapping = findFieldMappingsByType(documentMapper, request); + Map fieldMapping = findFieldMappingsByType(fieldPredicate, documentMapper, request); if (!fieldMapping.isEmpty()) { typeMappings.put(type, fieldMapping); } } - - return new GetFieldMappingsResponse(singletonMap(shardId.getIndexName(), typeMappings.immutableMap())); + return new GetFieldMappingsResponse(singletonMap(shardId.getIndexName(), Collections.unmodifiableMap(typeMappings))); } @Override @@ -163,47 +168,50 @@ public class TransportGetFieldMappingsIndexAction extends TransportSingleShardAc } }; - private Map findFieldMappingsByType(DocumentMapper documentMapper, GetFieldMappingsIndexRequest request) { - MapBuilder fieldMappings = new MapBuilder<>(); + private static Map findFieldMappingsByType(Predicate fieldPredicate, + DocumentMapper documentMapper, + GetFieldMappingsIndexRequest request) { + Map fieldMappings = new HashMap<>(); final DocumentFieldMappers allFieldMappers = documentMapper.mappers(); for (String field : request.fields()) { if (Regex.isMatchAllPattern(field)) { for (FieldMapper fieldMapper : allFieldMappers) { - addFieldMapper(fieldMapper.fieldType().name(), fieldMapper, fieldMappings, request.includeDefaults()); + addFieldMapper(fieldPredicate, fieldMapper.fieldType().name(), fieldMapper, fieldMappings, request.includeDefaults()); } } else if (Regex.isSimpleMatchPattern(field)) { for (FieldMapper fieldMapper : allFieldMappers) { if (Regex.simpleMatch(field, fieldMapper.fieldType().name())) { - addFieldMapper(fieldMapper.fieldType().name(), fieldMapper, fieldMappings, - request.includeDefaults()); + addFieldMapper(fieldPredicate, fieldMapper.fieldType().name(), + fieldMapper, fieldMappings, request.includeDefaults()); } } } else { // not a pattern FieldMapper fieldMapper = allFieldMappers.smartNameFieldMapper(field); if (fieldMapper != null) { - addFieldMapper(field, fieldMapper, fieldMappings, request.includeDefaults()); + addFieldMapper(fieldPredicate, field, fieldMapper, fieldMappings, request.includeDefaults()); } else if (request.probablySingleFieldRequest()) { fieldMappings.put(field, FieldMappingMetaData.NULL); } } } - return fieldMappings.immutableMap(); + return Collections.unmodifiableMap(fieldMappings); } - private void addFieldMapper(String field, FieldMapper fieldMapper, MapBuilder fieldMappings, boolean includeDefaults) { + private static void addFieldMapper(Predicate fieldPredicate, + String field, FieldMapper fieldMapper, Map fieldMappings, + boolean includeDefaults) { if (fieldMappings.containsKey(field)) { return; } - try { - XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); - builder.startObject(); - fieldMapper.toXContent(builder, includeDefaults ? includeDefaultsParams : ToXContent.EMPTY_PARAMS); - builder.endObject(); - fieldMappings.put(field, new FieldMappingMetaData(fieldMapper.fieldType().name(), builder.bytes())); - } catch (IOException e) { - throw new ElasticsearchException("failed to serialize XContent of field [" + field + "]", e); + if (fieldPredicate.test(field)) { + try { + BytesReference bytes = XContentHelper.toXContent(fieldMapper, XContentType.JSON, + includeDefaults ? includeDefaultsParams : ToXContent.EMPTY_PARAMS, false); + fieldMappings.put(field, new FieldMappingMetaData(fieldMapper.fieldType().name(), bytes)); + } catch (IOException e) { + throw new ElasticsearchException("failed to serialize XContent of field [" + field + "]", e); + } } } - } diff --git a/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetMappingsAction.java b/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetMappingsAction.java index 3189a5a15c2..8ad2ce5475f 100644 --- a/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetMappingsAction.java +++ b/core/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetMappingsAction.java @@ -31,15 +31,23 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.indices.IndicesService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import java.io.IOException; + public class TransportGetMappingsAction extends TransportClusterInfoAction { + private final IndicesService indicesService; + @Inject public TransportGetMappingsAction(Settings settings, TransportService transportService, ClusterService clusterService, - ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { - super(settings, GetMappingsAction.NAME, transportService, clusterService, threadPool, actionFilters, GetMappingsRequest::new, indexNameExpressionResolver); + ThreadPool threadPool, ActionFilters actionFilters, + IndexNameExpressionResolver indexNameExpressionResolver, IndicesService indicesService) { + super(settings, GetMappingsAction.NAME, transportService, clusterService, threadPool, actionFilters, GetMappingsRequest::new, + indexNameExpressionResolver); + this.indicesService = indicesService; } @Override @@ -50,7 +58,8 @@ public class TransportGetMappingsAction extends TransportClusterInfoAction listener) { + protected void doMasterOperation(final GetMappingsRequest request, String[] concreteIndices, final ClusterState state, + final ActionListener listener) { logger.trace("serving getMapping request based on version {}", state.version()); - ImmutableOpenMap> result = state.metaData().findMappings( - concreteIndices, request.types() - ); - listener.onResponse(new GetMappingsResponse(result)); + try { + ImmutableOpenMap> result = + state.metaData().findMappings(concreteIndices, request.types(), indicesService.getFieldFilter()); + listener.onResponse(new GetMappingsResponse(result)); + } catch (IOException e) { + listener.onFailure(e); + } } } diff --git a/core/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesIndexAction.java b/core/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesIndexAction.java index b9e6f56b6d7..b24dc685df6 100644 --- a/core/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesIndexAction.java +++ b/core/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesIndexAction.java @@ -40,6 +40,7 @@ import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; +import java.util.function.Predicate; public class TransportFieldCapabilitiesIndexAction extends TransportSingleShardAction { @@ -77,12 +78,15 @@ public class TransportFieldCapabilitiesIndexAction extends TransportSingleShardA for (String field : request.fields()) { fieldNames.addAll(mapperService.simpleMatchToIndexNames(field)); } + Predicate fieldPredicate = indicesService.getFieldFilter().apply(shardId.getIndexName()); Map responseMap = new HashMap<>(); for (String field : fieldNames) { MappedFieldType ft = mapperService.fullName(field); if (ft != null) { FieldCapabilities fieldCap = new FieldCapabilities(field, ft.typeName(), ft.isSearchable(), ft.isAggregatable()); - responseMap.put(field, fieldCap); + if (indicesService.isMetaDataField(field) || fieldPredicate.test(field)) { + responseMap.put(field, fieldCap); + } } } return new FieldCapabilitiesIndexResponse(shardId.getIndexName(), responseMap); diff --git a/core/src/main/java/org/elasticsearch/action/search/MultiSearchRequest.java b/core/src/main/java/org/elasticsearch/action/search/MultiSearchRequest.java index 76f73bde4b6..7772b245658 100644 --- a/core/src/main/java/org/elasticsearch/action/search/MultiSearchRequest.java +++ b/core/src/main/java/org/elasticsearch/action/search/MultiSearchRequest.java @@ -23,20 +23,36 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.CompositeIndicesRequest; import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.common.CheckedBiConsumer; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import java.io.ByteArrayOutputStream; import java.io.IOException; import java.util.ArrayList; import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.Objects; import static org.elasticsearch.action.ValidateActions.addValidationError; +import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue; +import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeStringArrayValue; +import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeStringValue; /** * A multi search API request. */ public class MultiSearchRequest extends ActionRequest implements CompositeIndicesRequest { + public static final int MAX_CONCURRENT_SEARCH_REQUESTS_DEFAULT = 0; + private int maxConcurrentSearchRequests = 0; private List requests = new ArrayList<>(); @@ -131,4 +147,171 @@ public class MultiSearchRequest extends ActionRequest implements CompositeIndice request.writeTo(out); } } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + MultiSearchRequest that = (MultiSearchRequest) o; + return maxConcurrentSearchRequests == that.maxConcurrentSearchRequests && + Objects.equals(requests, that.requests) && + Objects.equals(indicesOptions, that.indicesOptions); + } + + @Override + public int hashCode() { + return Objects.hash(maxConcurrentSearchRequests, requests, indicesOptions); + } + + public static void readMultiLineFormat(BytesReference data, + XContent xContent, + CheckedBiConsumer consumer, + String[] indices, + IndicesOptions indicesOptions, + String[] types, + String routing, + String searchType, + NamedXContentRegistry registry, + boolean allowExplicitIndex) throws IOException { + int from = 0; + int length = data.length(); + byte marker = xContent.streamSeparator(); + while (true) { + int nextMarker = findNextMarker(marker, from, data, length); + if (nextMarker == -1) { + break; + } + // support first line with \n + if (nextMarker == 0) { + from = nextMarker + 1; + continue; + } + + SearchRequest searchRequest = new SearchRequest(); + if (indices != null) { + searchRequest.indices(indices); + } + if (indicesOptions != null) { + searchRequest.indicesOptions(indicesOptions); + } + if (types != null && types.length > 0) { + searchRequest.types(types); + } + if (routing != null) { + searchRequest.routing(routing); + } + if (searchType != null) { + searchRequest.searchType(searchType); + } + IndicesOptions defaultOptions = SearchRequest.DEFAULT_INDICES_OPTIONS; + // now parse the action + if (nextMarker - from > 0) { + try (XContentParser parser = xContent.createParser(registry, data.slice(from, nextMarker - from))) { + Map source = parser.map(); + for (Map.Entry entry : source.entrySet()) { + Object value = entry.getValue(); + if ("index".equals(entry.getKey()) || "indices".equals(entry.getKey())) { + if (!allowExplicitIndex) { + throw new IllegalArgumentException("explicit index in multi search is not allowed"); + } + searchRequest.indices(nodeStringArrayValue(value)); + } else if ("type".equals(entry.getKey()) || "types".equals(entry.getKey())) { + searchRequest.types(nodeStringArrayValue(value)); + } else if ("search_type".equals(entry.getKey()) || "searchType".equals(entry.getKey())) { + searchRequest.searchType(nodeStringValue(value, null)); + } else if ("request_cache".equals(entry.getKey()) || "requestCache".equals(entry.getKey())) { + searchRequest.requestCache(nodeBooleanValue(value, entry.getKey())); + } else if ("preference".equals(entry.getKey())) { + searchRequest.preference(nodeStringValue(value, null)); + } else if ("routing".equals(entry.getKey())) { + searchRequest.routing(nodeStringValue(value, null)); + } + } + defaultOptions = IndicesOptions.fromMap(source, defaultOptions); + } + } + searchRequest.indicesOptions(defaultOptions); + + // move pointers + from = nextMarker + 1; + // now for the body + nextMarker = findNextMarker(marker, from, data, length); + if (nextMarker == -1) { + break; + } + BytesReference bytes = data.slice(from, nextMarker - from); + try (XContentParser parser = xContent.createParser(registry, bytes)) { + consumer.accept(searchRequest, parser); + } + // move pointers + from = nextMarker + 1; + } + } + + private static int findNextMarker(byte marker, int from, BytesReference data, int length) { + for (int i = from; i < length; i++) { + if (data.get(i) == marker) { + return i; + } + } + if (from != length) { + throw new IllegalArgumentException("The msearch request must be terminated by a newline [\n]"); + } + return -1; + } + + public static byte[] writeMultiLineFormat(MultiSearchRequest multiSearchRequest, XContent xContent) throws IOException { + ByteArrayOutputStream output = new ByteArrayOutputStream(); + for (SearchRequest request : multiSearchRequest.requests()) { + try (XContentBuilder xContentBuilder = XContentBuilder.builder(xContent)) { + xContentBuilder.startObject(); + if (request.indices() != null) { + xContentBuilder.field("index", request.indices()); + } + if (request.indicesOptions() != null && request.indicesOptions() != SearchRequest.DEFAULT_INDICES_OPTIONS) { + if (request.indicesOptions().expandWildcardsOpen() && request.indicesOptions().expandWildcardsClosed()) { + xContentBuilder.field("expand_wildcards", "all"); + } else if (request.indicesOptions().expandWildcardsOpen()) { + xContentBuilder.field("expand_wildcards", "open"); + } else if (request.indicesOptions().expandWildcardsClosed()) { + xContentBuilder.field("expand_wildcards", "closed"); + } else { + xContentBuilder.field("expand_wildcards", "none"); + } + xContentBuilder.field("ignore_unavailable", request.indicesOptions().ignoreUnavailable()); + xContentBuilder.field("allow_no_indices", request.indicesOptions().allowNoIndices()); + } + if (request.types() != null) { + xContentBuilder.field("types", request.types()); + } + if (request.searchType() != null) { + xContentBuilder.field("search_type", request.searchType().name().toLowerCase(Locale.ROOT)); + } + if (request.requestCache() != null) { + xContentBuilder.field("request_cache", request.requestCache()); + } + if (request.preference() != null) { + xContentBuilder.field("preference", request.preference()); + } + if (request.routing() != null) { + xContentBuilder.field("routing", request.routing()); + } + xContentBuilder.endObject(); + xContentBuilder.bytes().writeTo(output); + } + output.write(xContent.streamSeparator()); + try (XContentBuilder xContentBuilder = XContentBuilder.builder(xContent)) { + if (request.source() != null) { + request.source().toXContent(xContentBuilder, ToXContent.EMPTY_PARAMS); + } else { + xContentBuilder.startObject(); + xContentBuilder.endObject(); + } + xContentBuilder.bytes().writeTo(output); + } + output.write(xContent.streamSeparator()); + } + return output.toByteArray(); + } + } diff --git a/core/src/main/java/org/elasticsearch/action/search/MultiSearchResponse.java b/core/src/main/java/org/elasticsearch/action/search/MultiSearchResponse.java index 560379a6ce2..cb30385ecc8 100644 --- a/core/src/main/java/org/elasticsearch/action/search/MultiSearchResponse.java +++ b/core/src/main/java/org/elasticsearch/action/search/MultiSearchResponse.java @@ -24,23 +24,39 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.Version; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentParser.Token; import java.io.IOException; import java.util.Arrays; import java.util.Iterator; +import java.util.List; + +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; /** * A multi search response. */ public class MultiSearchResponse extends ActionResponse implements Iterable, ToXContentObject { + private static final ParseField RESPONSES = new ParseField(Fields.RESPONSES); + private static final ParseField TOOK_IN_MILLIS = new ParseField("took"); + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("multi_search", + true, a -> new MultiSearchResponse(((List)a[0]).toArray(new Item[0]), (long) a[1])); + static { + PARSER.declareObjectArray(constructorArg(), (p, c) -> itemFromXContent(p), RESPONSES); + PARSER.declareLong(constructorArg(), TOOK_IN_MILLIS); + } + /** * A search response item, holding the actual search response, or an error message if it failed. */ @@ -188,6 +204,45 @@ public class MultiSearchResponse extends ActionResponse implements Iterable reduceContextFunction; - public SearchPhaseController(Settings settings, BigArrays bigArrays, ScriptService scriptService) { + /** + * Constructor. + * @param settings Node settings + * @param reduceContextFunction A function that builds a context for the reduce of an {@link InternalAggregation} + */ + public SearchPhaseController(Settings settings, Function reduceContextFunction) { super(settings); - this.bigArrays = bigArrays; - this.scriptService = scriptService; + this.reduceContextFunction = reduceContextFunction; } public AggregatedDfs aggregateDfs(Collection results) { @@ -496,7 +500,7 @@ public final class SearchPhaseController extends AbstractComponent { } } final Suggest suggest = groupedSuggestions.isEmpty() ? null : new Suggest(Suggest.reduce(groupedSuggestions)); - ReduceContext reduceContext = new ReduceContext(bigArrays, scriptService, true); + ReduceContext reduceContext = reduceContextFunction.apply(true); final InternalAggregations aggregations = aggregationsList.isEmpty() ? null : reduceAggs(aggregationsList, firstResult.pipelineAggregators(), reduceContext); final SearchProfileShardResults shardResults = profileResults.isEmpty() ? null : new SearchProfileShardResults(profileResults); @@ -513,7 +517,7 @@ public final class SearchPhaseController extends AbstractComponent { * that relevant for the final reduce step. For final reduce see {@link #reduceAggs(List, List, ReduceContext)} */ private InternalAggregations reduceAggsIncrementally(List aggregationsList) { - ReduceContext reduceContext = new ReduceContext(bigArrays, scriptService, false); + ReduceContext reduceContext = reduceContextFunction.apply(false); return aggregationsList.isEmpty() ? null : reduceAggs(aggregationsList, null, reduceContext); } diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchResponse.java b/core/src/main/java/org/elasticsearch/action/search/SearchResponse.java index 02d2e6a3429..a8bbd698918 100644 --- a/core/src/main/java/org/elasticsearch/action/search/SearchResponse.java +++ b/core/src/main/java/org/elasticsearch/action/search/SearchResponse.java @@ -32,6 +32,7 @@ import org.elasticsearch.common.xcontent.StatusToXContentObject; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.RestActions; import org.elasticsearch.search.SearchHits; @@ -242,9 +243,14 @@ public class SearchResponse extends ActionResponse implements StatusToXContentOb } public static SearchResponse fromXContent(XContentParser parser) throws IOException { - ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation); - XContentParser.Token token; - String currentFieldName = null; + ensureExpectedToken(Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation); + parser.nextToken(); + return innerFromXContent(parser); + } + + static SearchResponse innerFromXContent(XContentParser parser) throws IOException { + ensureExpectedToken(Token.FIELD_NAME, parser.currentToken(), parser::getTokenLocation); + String currentFieldName = parser.currentName(); SearchHits hits = null; Aggregations aggs = null; Suggest suggest = null; @@ -259,8 +265,8 @@ public class SearchResponse extends ActionResponse implements StatusToXContentOb String scrollId = null; List failures = new ArrayList<>(); Clusters clusters = Clusters.EMPTY; - while((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { + for (Token token = parser.nextToken(); token != Token.END_OBJECT; token = parser.nextToken()) { + if (token == Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token.isValue()) { if (SCROLL_ID.match(currentFieldName)) { @@ -276,7 +282,7 @@ public class SearchResponse extends ActionResponse implements StatusToXContentOb } else { parser.skipChildren(); } - } else if (token == XContentParser.Token.START_OBJECT) { + } else if (token == Token.START_OBJECT) { if (SearchHits.Fields.HITS.equals(currentFieldName)) { hits = SearchHits.fromXContent(parser); } else if (Aggregations.AGGREGATIONS_FIELD.equals(currentFieldName)) { @@ -286,8 +292,8 @@ public class SearchResponse extends ActionResponse implements StatusToXContentOb } else if (SearchProfileShardResults.PROFILE_FIELD.equals(currentFieldName)) { profile = SearchProfileShardResults.fromXContent(parser); } else if (RestActions._SHARDS_FIELD.match(currentFieldName)) { - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { + while ((token = parser.nextToken()) != Token.END_OBJECT) { + if (token == Token.FIELD_NAME) { currentFieldName = parser.currentName(); } else if (token.isValue()) { if (RestActions.FAILED_FIELD.match(currentFieldName)) { @@ -301,9 +307,9 @@ public class SearchResponse extends ActionResponse implements StatusToXContentOb } else { parser.skipChildren(); } - } else if (token == XContentParser.Token.START_ARRAY) { + } else if (token == Token.START_ARRAY) { if (RestActions.FAILURES_FIELD.match(currentFieldName)) { - while((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + while((token = parser.nextToken()) != Token.END_ARRAY) { failures.add(ShardSearchFailure.fromXContent(parser)); } } else { diff --git a/core/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java b/core/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java index 9dec3be5c1b..371314b990c 100644 --- a/core/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java +++ b/core/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java @@ -76,7 +76,7 @@ public class TransportMultiSearchAction extends HandledTransportAction void declareAcknowledgedField(ConstructingObjectParser PARSER) { - PARSER.declareField(constructorArg(), (parser, context) -> parser.booleanValue(), ACKNOWLEDGED_PARSER, + PARSER.declareField(constructorArg(), (parser, context) -> parser.booleanValue(), ACKNOWLEDGED, ObjectParser.ValueType.BOOLEAN); } @@ -78,6 +77,6 @@ public abstract class AcknowledgedResponse extends ActionResponse { } protected void addAcknowledgedField(XContentBuilder builder) throws IOException { - builder.field(ACKNOWLEDGED, isAcknowledged()); + builder.field(ACKNOWLEDGED.getPreferredName(), isAcknowledged()); } } diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaData.java b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaData.java index 66f5a49f6d6..74233b5cec7 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaData.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaData.java @@ -93,6 +93,9 @@ public class IndexTemplateMetaData extends AbstractDiffable mappings, ImmutableOpenMap aliases, ImmutableOpenMap customs) { + if (patterns == null || patterns.isEmpty()) { + throw new IllegalArgumentException("Index patterns must not be null or empty; got " + patterns); + } this.name = name; this.order = order; this.version = version; @@ -244,7 +247,7 @@ public class IndexTemplateMetaData extends AbstractDiffable 0 ? patterns.get(0) : ""); + out.writeString(patterns.get(0)); } Settings.writeSettingsToStream(settings, out); out.writeVInt(mappings.size()); diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java index 83a06d9c4ca..9cbfb2ec71f 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MappingMetaData.java @@ -107,15 +107,6 @@ public class MappingMetaData extends AbstractDiffable { initMappers(withoutType); } - private MappingMetaData() { - this.type = ""; - try { - this.source = new CompressedXContent("{}"); - } catch (IOException ex) { - throw new IllegalStateException("Cannot create MappingMetaData prototype", ex); - } - } - private void initMappers(Map withoutType) { if (withoutType.containsKey("_routing")) { boolean required = false; @@ -143,13 +134,6 @@ public class MappingMetaData extends AbstractDiffable { } } - public MappingMetaData(String type, CompressedXContent source, Routing routing, boolean hasParentField) { - this.type = type; - this.source = source; - this.routing = routing; - this.hasParentField = hasParentField; - } - void updateDefaultMapping(MappingMetaData defaultMapping) { if (routing == Routing.EMPTY) { routing = defaultMapping.routing(); @@ -250,5 +234,4 @@ public class MappingMetaData extends AbstractDiffable { public static Diff readDiffFrom(StreamInput in) throws IOException { return readDiffFrom(MappingMetaData::new, in); } - } diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java index c582f372e51..0e9bcf8f11a 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java @@ -48,11 +48,13 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.gateway.MetaDataStateFormat; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.rest.RestStatus; import java.io.IOException; @@ -69,6 +71,8 @@ import java.util.Map; import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; +import java.util.function.Function; +import java.util.function.Predicate; import static org.elasticsearch.common.settings.Settings.readSettingsFromStream; import static org.elasticsearch.common.settings.Settings.writeSettingsToStream; @@ -324,32 +328,38 @@ public class MetaData implements Iterable, Diffable, To return false; } - /* - * Finds all mappings for types and concrete indices. Types are expanded to - * include all types that match the glob patterns in the types array. Empty - * types array, null or {"_all"} will be expanded to all types available for - * the given indices. + /** + * Finds all mappings for types and concrete indices. Types are expanded to include all types that match the glob + * patterns in the types array. Empty types array, null or {"_all"} will be expanded to all types available for + * the given indices. Only fields that match the provided field filter will be returned (default is a predicate + * that always returns true, which can be overridden via plugins) + * + * @see MapperPlugin#getFieldFilter() + * */ - public ImmutableOpenMap> findMappings(String[] concreteIndices, final String[] types) { + public ImmutableOpenMap> findMappings(String[] concreteIndices, + final String[] types, + Function> fieldFilter) + throws IOException { assert types != null; assert concreteIndices != null; if (concreteIndices.length == 0) { return ImmutableOpenMap.of(); } + boolean isAllTypes = isAllTypes(types); ImmutableOpenMap.Builder> indexMapBuilder = ImmutableOpenMap.builder(); Iterable intersection = HppcMaps.intersection(ObjectHashSet.from(concreteIndices), indices.keys()); for (String index : intersection) { IndexMetaData indexMetaData = indices.get(index); - ImmutableOpenMap.Builder filteredMappings; - if (isAllTypes(types)) { - indexMapBuilder.put(index, indexMetaData.getMappings()); // No types specified means get it all - + Predicate fieldPredicate = fieldFilter.apply(index); + if (isAllTypes) { + indexMapBuilder.put(index, filterFields(indexMetaData.getMappings(), fieldPredicate)); } else { - filteredMappings = ImmutableOpenMap.builder(); + ImmutableOpenMap.Builder filteredMappings = ImmutableOpenMap.builder(); for (ObjectObjectCursor cursor : indexMetaData.getMappings()) { if (Regex.simpleMatch(types, cursor.key)) { - filteredMappings.put(cursor.key, cursor.value); + filteredMappings.put(cursor.key, filterFields(cursor.value, fieldPredicate)); } } if (!filteredMappings.isEmpty()) { @@ -360,6 +370,95 @@ public class MetaData implements Iterable, Diffable, To return indexMapBuilder.build(); } + private static ImmutableOpenMap filterFields(ImmutableOpenMap mappings, + Predicate fieldPredicate) throws IOException { + if (fieldPredicate == MapperPlugin.NOOP_FIELD_PREDICATE) { + return mappings; + } + ImmutableOpenMap.Builder builder = ImmutableOpenMap.builder(mappings.size()); + for (ObjectObjectCursor cursor : mappings) { + builder.put(cursor.key, filterFields(cursor.value, fieldPredicate)); + } + return builder.build(); // No types specified means return them all + } + + @SuppressWarnings("unchecked") + private static MappingMetaData filterFields(MappingMetaData mappingMetaData, Predicate fieldPredicate) throws IOException { + if (fieldPredicate == MapperPlugin.NOOP_FIELD_PREDICATE) { + return mappingMetaData; + } + Map sourceAsMap = XContentHelper.convertToMap(mappingMetaData.source().compressedReference(), true).v2(); + Map mapping; + if (sourceAsMap.size() == 1 && sourceAsMap.containsKey(mappingMetaData.type())) { + mapping = (Map) sourceAsMap.get(mappingMetaData.type()); + } else { + mapping = sourceAsMap; + } + + Map properties = (Map)mapping.get("properties"); + if (properties == null || properties.isEmpty()) { + return mappingMetaData; + } + + filterFields("", properties, fieldPredicate); + + return new MappingMetaData(mappingMetaData.type(), sourceAsMap); + } + + @SuppressWarnings("unchecked") + private static boolean filterFields(String currentPath, Map fields, Predicate fieldPredicate) { + assert fieldPredicate != MapperPlugin.NOOP_FIELD_PREDICATE; + Iterator> entryIterator = fields.entrySet().iterator(); + while (entryIterator.hasNext()) { + Map.Entry entry = entryIterator.next(); + String newPath = mergePaths(currentPath, entry.getKey()); + Object value = entry.getValue(); + boolean mayRemove = true; + boolean isMultiField = false; + if (value instanceof Map) { + Map map = (Map) value; + Map properties = (Map)map.get("properties"); + if (properties != null) { + mayRemove = filterFields(newPath, properties, fieldPredicate); + } else { + Map subFields = (Map)map.get("fields"); + if (subFields != null) { + isMultiField = true; + if (mayRemove = filterFields(newPath, subFields, fieldPredicate)) { + map.remove("fields"); + } + } + } + } else { + throw new IllegalStateException("cannot filter mappings, found unknown element of type [" + value.getClass() + "]"); + } + + //only remove a field if it has no sub-fields left and it has to be excluded + if (fieldPredicate.test(newPath) == false) { + if (mayRemove) { + entryIterator.remove(); + } else if (isMultiField) { + //multi fields that should be excluded but hold subfields that don't have to be excluded are converted to objects + Map map = (Map) value; + Map subFields = (Map)map.get("fields"); + assert subFields.size() > 0; + map.put("properties", subFields); + map.remove("fields"); + map.remove("type"); + } + } + } + //return true if the ancestor may be removed, as it has no sub-fields left + return fields.size() == 0; + } + + private static String mergePaths(String path, String field) { + if (path.length() == 0) { + return field; + } + return path + "." + field; + } + /** * Returns all the concrete indices. */ diff --git a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java index 2c0bc929294..59c38be50e8 100644 --- a/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java +++ b/core/src/main/java/org/elasticsearch/cluster/metadata/MetaDataUpdateSettingsService.java @@ -38,6 +38,7 @@ import org.elasticsearch.common.Priority; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; @@ -54,7 +55,6 @@ import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Set; -import java.util.function.Predicate; import static org.elasticsearch.action.support.ContextPreservingActionListener.wrapPreservingContext; @@ -164,13 +164,16 @@ public class MetaDataUpdateSettingsService extends AbstractComponent implements Settings.Builder settingsForOpenIndices = Settings.builder(); final Set skippedSettings = new HashSet<>(); - indexScopedSettings.validate(normalizedSettings, false); // don't validate dependencies here we check it below - // never allow to change the number of shards + indexScopedSettings.validate(normalizedSettings.filter(s -> Regex.isSimpleMatchPattern(s) == false /* don't validate wildcards */), + false); //don't validate dependencies here we check it below never allow to change the number of shards for (String key : normalizedSettings.keySet()) { Setting setting = indexScopedSettings.get(key); - assert setting != null; // we already validated the normalized settings + boolean isWildcard = setting == null && Regex.isSimpleMatchPattern(key); + assert setting != null // we already validated the normalized settings + || (isWildcard && normalizedSettings.hasValue(key) == false) + : "unknown setting: " + key + " isWildcard: " + isWildcard + " hasValue: " + normalizedSettings.hasValue(key); settingsForClosedIndices.copy(key, normalizedSettings); - if (setting.isDynamic()) { + if (isWildcard || setting.isDynamic()) { settingsForOpenIndices.copy(key, normalizedSettings); } else { skippedSettings.add(key); diff --git a/core/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java b/core/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java index 13c2e50eba2..9914ee2577a 100644 --- a/core/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java +++ b/core/src/main/java/org/elasticsearch/cluster/service/ClusterApplierService.java @@ -133,8 +133,11 @@ public class ClusterApplierService extends AbstractLifecycleComponent implements Objects.requireNonNull(nodeConnectionsService, "please set the node connection service before starting"); Objects.requireNonNull(state.get(), "please set initial state before starting"); addListener(localNodeMasterListeners); - threadPoolExecutor = EsExecutors.newSinglePrioritizing(CLUSTER_UPDATE_THREAD_NAME, - daemonThreadFactory(settings, CLUSTER_UPDATE_THREAD_NAME), threadPool.getThreadContext(), threadPool.scheduler()); + threadPoolExecutor = EsExecutors.newSinglePrioritizing( + nodeName() + "/" + CLUSTER_UPDATE_THREAD_NAME, + daemonThreadFactory(settings, CLUSTER_UPDATE_THREAD_NAME), + threadPool.getThreadContext(), + threadPool.scheduler()); } class UpdateTask extends SourcePrioritizedRunnable implements Function { diff --git a/core/src/main/java/org/elasticsearch/cluster/service/MasterService.java b/core/src/main/java/org/elasticsearch/cluster/service/MasterService.java index a5f71dc48b8..6858866d2dc 100644 --- a/core/src/main/java/org/elasticsearch/cluster/service/MasterService.java +++ b/core/src/main/java/org/elasticsearch/cluster/service/MasterService.java @@ -104,8 +104,11 @@ public class MasterService extends AbstractLifecycleComponent { protected synchronized void doStart() { Objects.requireNonNull(clusterStatePublisher, "please set a cluster state publisher before starting"); Objects.requireNonNull(clusterStateSupplier, "please set a cluster state supplier before starting"); - threadPoolExecutor = EsExecutors.newSinglePrioritizing(MASTER_UPDATE_THREAD_NAME, - daemonThreadFactory(settings, MASTER_UPDATE_THREAD_NAME), threadPool.getThreadContext(), threadPool.scheduler()); + threadPoolExecutor = EsExecutors.newSinglePrioritizing( + nodeName() + "/" + MASTER_UPDATE_THREAD_NAME, + daemonThreadFactory(settings, MASTER_UPDATE_THREAD_NAME), + threadPool.getThreadContext(), + threadPool.scheduler()); taskBatcher = new Batcher(logger, threadPoolExecutor); } diff --git a/core/src/main/java/org/elasticsearch/common/bytes/ByteBufferReference.java b/core/src/main/java/org/elasticsearch/common/bytes/ByteBufferReference.java new file mode 100644 index 00000000000..fbdcdfd6885 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/common/bytes/ByteBufferReference.java @@ -0,0 +1,89 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.bytes; + +import org.apache.lucene.util.BytesRef; + +import java.nio.ByteBuffer; + +/** + * This is a {@link BytesReference} backed by a {@link ByteBuffer}. The byte buffer can either be a heap or + * direct byte buffer. The reference is composed of the space between the {@link ByteBuffer#position} and + * {@link ByteBuffer#limit} at construction time. If the position or limit of the underlying byte buffer is + * changed, those changes will not be reflected in this reference. However, modifying the limit or position + * of the underlying byte buffer is not recommended as those can be used during {@link ByteBuffer#get()} + * bounds checks. Use {@link ByteBuffer#duplicate()} at creation time if you plan on modifying the markers of + * the underlying byte buffer. Any changes to the underlying data in the byte buffer will be reflected. + */ +public class ByteBufferReference extends BytesReference { + + private final ByteBuffer buffer; + private final int offset; + private final int length; + + public ByteBufferReference(ByteBuffer buffer) { + this.buffer = buffer; + this.offset = buffer.position(); + this.length = buffer.remaining(); + } + + @Override + public byte get(int index) { + return buffer.get(index + offset); + } + + @Override + public int length() { + return length; + } + + @Override + public BytesReference slice(int from, int length) { + if (from < 0 || (from + length) > this.length) { + throw new IndexOutOfBoundsException("can't slice a buffer with length [" + this.length + "], with slice parameters from [" + + from + "], length [" + length + "]"); + } + ByteBuffer newByteBuffer = buffer.duplicate(); + newByteBuffer.position(offset + from); + newByteBuffer.limit(offset + from + length); + return new ByteBufferReference(newByteBuffer); + } + + /** + * This will return a bytes ref composed of the bytes. If this is a direct byte buffer, the bytes will + * have to be copied. + * + * @return the bytes ref + */ + @Override + public BytesRef toBytesRef() { + if (buffer.hasArray()) { + return new BytesRef(buffer.array(), buffer.arrayOffset() + offset, length); + } + final byte[] copy = new byte[length]; + buffer.get(copy, offset, length); + return new BytesRef(copy); + } + + @Override + public long ramBytesUsed() { + return buffer.capacity(); + } +} diff --git a/core/src/main/java/org/elasticsearch/common/geo/GeoShapeType.java b/core/src/main/java/org/elasticsearch/common/geo/GeoShapeType.java index f8030296940..9eb1fa9a3f4 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/GeoShapeType.java +++ b/core/src/main/java/org/elasticsearch/common/geo/GeoShapeType.java @@ -241,6 +241,11 @@ public enum GeoShapeType { } return coordinates; } + + @Override + public String wktName() { + return BBOX; + } }, CIRCLE("circle") { @Override @@ -273,11 +278,13 @@ public enum GeoShapeType { private final String shapename; private static Map shapeTypeMap = new HashMap<>(); + private static final String BBOX = "BBOX"; static { for (GeoShapeType type : values()) { shapeTypeMap.put(type.shapename, type); } + shapeTypeMap.put(ENVELOPE.wktName().toLowerCase(Locale.ROOT), ENVELOPE); } GeoShapeType(String shapename) { @@ -300,6 +307,11 @@ public enum GeoShapeType { ShapeBuilder.Orientation orientation, boolean coerce); abstract CoordinateNode validate(CoordinateNode coordinates, boolean coerce); + /** wkt shape name */ + public String wktName() { + return this.shapename; + } + public static List getShapeWriteables() { List namedWriteables = new ArrayList<>(); namedWriteables.add(new Entry(ShapeBuilder.class, PointBuilder.TYPE.shapeName(), PointBuilder::new)); @@ -313,4 +325,9 @@ public enum GeoShapeType { namedWriteables.add(new Entry(ShapeBuilder.class, GeometryCollectionBuilder.TYPE.shapeName(), GeometryCollectionBuilder::new)); return namedWriteables; } + + @Override + public String toString() { + return this.shapename; + } } diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/CircleBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/CircleBuilder.java index 108e66d9150..ecc33b94ae4 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/CircleBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/CircleBuilder.java @@ -168,6 +168,11 @@ public class CircleBuilder extends ShapeBuilder { return TYPE; } + @Override + public String toWKT() { + throw new UnsupportedOperationException("The WKT spec does not support CIRCLE geometry"); + } + @Override public int hashCode() { return Objects.hash(center, radius, unit.ordinal()); diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/EnvelopeBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/EnvelopeBuilder.java index b352aa1d924..4949c363347 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/EnvelopeBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/EnvelopeBuilder.java @@ -20,6 +20,7 @@ package org.elasticsearch.common.geo.builders; import org.elasticsearch.common.geo.GeoShapeType; +import org.elasticsearch.common.geo.parsers.GeoWKTParser; import org.elasticsearch.common.geo.parsers.ShapeParser; import org.locationtech.spatial4j.shape.Rectangle; import com.vividsolutions.jts.geom.Coordinate; @@ -70,6 +71,28 @@ public class EnvelopeBuilder extends ShapeBuilder { return this.bottomRight; } + @Override + protected StringBuilder contentToWKT() { + StringBuilder sb = new StringBuilder(); + + sb.append(GeoWKTParser.LPAREN); + // minX, maxX, maxY, minY + sb.append(topLeft.x); + sb.append(GeoWKTParser.COMMA); + sb.append(GeoWKTParser.SPACE); + sb.append(bottomRight.x); + sb.append(GeoWKTParser.COMMA); + sb.append(GeoWKTParser.SPACE); + // TODO support Z?? + sb.append(topLeft.y); + sb.append(GeoWKTParser.COMMA); + sb.append(GeoWKTParser.SPACE); + sb.append(bottomRight.y); + sb.append(GeoWKTParser.RPAREN); + + return sb; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java index 3ea422265a7..84052939da4 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java @@ -21,6 +21,7 @@ package org.elasticsearch.common.geo.builders; import org.elasticsearch.common.geo.GeoShapeType; import org.elasticsearch.common.geo.parsers.ShapeParser; +import org.elasticsearch.common.geo.parsers.GeoWKTParser; import org.locationtech.spatial4j.shape.Shape; import org.elasticsearch.ElasticsearchException; @@ -136,6 +137,23 @@ public class GeometryCollectionBuilder extends ShapeBuilder { return builder.endObject(); } + @Override + protected StringBuilder contentToWKT() { + StringBuilder sb = new StringBuilder(); + if (shapes.isEmpty()) { + sb.append(GeoWKTParser.EMPTY); + } else { + sb.append(GeoWKTParser.LPAREN); + sb.append(shapes.get(0).toWKT()); + for (int i = 1; i < shapes.size(); ++i) { + sb.append(GeoWKTParser.COMMA); + sb.append(shapes.get(i).toWKT()); + } + sb.append(GeoWKTParser.RPAREN); + } + return sb; + } + @Override public GeoShapeType type() { return TYPE; diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilder.java index 1a4f71da2d4..34a8960f69c 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiLineStringBuilder.java @@ -20,8 +20,8 @@ package org.elasticsearch.common.geo.builders; import org.elasticsearch.common.geo.GeoShapeType; +import org.elasticsearch.common.geo.parsers.GeoWKTParser; import org.elasticsearch.common.geo.parsers.ShapeParser; -import org.locationtech.spatial4j.shape.Shape; import com.vividsolutions.jts.geom.Coordinate; import com.vividsolutions.jts.geom.Geometry; import com.vividsolutions.jts.geom.LineString; @@ -82,6 +82,25 @@ public class MultiLineStringBuilder extends ShapeBuilder 0) { + sb.append(ShapeBuilder.coordinateListToWKT(lines.get(0).coordinates)); + } + for (int i = 1; i < lines.size(); ++i) { + sb.append(GeoWKTParser.COMMA); + sb.append(ShapeBuilder.coordinateListToWKT(lines.get(i).coordinates)); + } + sb.append(GeoWKTParser.RPAREN); + } + return sb; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java index 3c002631b8d..aa577887e00 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java @@ -21,6 +21,7 @@ package org.elasticsearch.common.geo.builders; import org.elasticsearch.common.geo.GeoShapeType; import org.elasticsearch.common.geo.parsers.ShapeParser; +import org.elasticsearch.common.geo.parsers.GeoWKTParser; import org.locationtech.spatial4j.shape.Shape; import com.vividsolutions.jts.geom.Coordinate; @@ -101,6 +102,37 @@ public class MultiPolygonBuilder extends ShapeBuilder { return polygons; } + private static String polygonCoordinatesToWKT(PolygonBuilder polygon) { + StringBuilder sb = new StringBuilder(); + sb.append(GeoWKTParser.LPAREN); + sb.append(ShapeBuilder.coordinateListToWKT(polygon.shell().coordinates)); + for (LineStringBuilder hole : polygon.holes()) { + sb.append(GeoWKTParser.COMMA); + sb.append(ShapeBuilder.coordinateListToWKT(hole.coordinates)); + } + sb.append(GeoWKTParser.RPAREN); + return sb.toString(); + } + + @Override + protected StringBuilder contentToWKT() { + final StringBuilder sb = new StringBuilder(); + if (polygons.isEmpty()) { + sb.append(GeoWKTParser.EMPTY); + } else { + sb.append(GeoWKTParser.LPAREN); + if (polygons.size() > 0) { + sb.append(polygonCoordinatesToWKT(polygons.get(0))); + } + for (int i = 1; i < polygons.size(); ++i) { + sb.append(GeoWKTParser.COMMA); + sb.append(polygonCoordinatesToWKT(polygons.get(i))); + } + sb.append(GeoWKTParser.RPAREN); + } + return sb; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java index 919aae37c73..ffcb44c9e46 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/PolygonBuilder.java @@ -729,6 +729,19 @@ public class PolygonBuilder extends ShapeBuilder { } } + @Override + protected StringBuilder contentToWKT() { + StringBuilder sb = new StringBuilder(); + sb.append('('); + sb.append(ShapeBuilder.coordinateListToWKT(shell.coordinates)); + for (LineStringBuilder hole : holes) { + sb.append(", "); + sb.append(ShapeBuilder.coordinateListToWKT(hole.coordinates)); + } + sb.append(')'); + return sb; + } + @Override public int hashCode() { return Objects.hash(shell, holes, orientation); diff --git a/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java b/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java index ef50a667faa..106c312a3bc 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java +++ b/core/src/main/java/org/elasticsearch/common/geo/builders/ShapeBuilder.java @@ -27,6 +27,7 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.Assertions; import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.GeoShapeType; +import org.elasticsearch.common.geo.parsers.GeoWKTParser; import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -339,6 +340,47 @@ public abstract class ShapeBuilder> } } + protected StringBuilder contentToWKT() { + return coordinateListToWKT(this.coordinates); + } + + public String toWKT() { + StringBuilder sb = new StringBuilder(); + sb.append(type().wktName()); + sb.append(GeoWKTParser.SPACE); + sb.append(contentToWKT()); + return sb.toString(); + } + + protected static StringBuilder coordinateListToWKT(final List coordinates) { + final StringBuilder sb = new StringBuilder(); + + if (coordinates.isEmpty()) { + sb.append(GeoWKTParser.EMPTY); + } else { + // walk through coordinates: + sb.append(GeoWKTParser.LPAREN); + sb.append(coordinateToWKT(coordinates.get(0))); + for (int i = 1; i < coordinates.size(); ++i) { + sb.append(GeoWKTParser.COMMA); + sb.append(GeoWKTParser.SPACE); + sb.append(coordinateToWKT(coordinates.get(i))); + } + sb.append(GeoWKTParser.RPAREN); + } + + return sb; + } + + private static String coordinateToWKT(final Coordinate coordinate) { + final StringBuilder sb = new StringBuilder(); + sb.append(coordinate.x + GeoWKTParser.SPACE + coordinate.y); + if (Double.isNaN(coordinate.z) == false) { + sb.append(GeoWKTParser.SPACE + coordinate.z); + } + return sb.toString(); + } + protected static final IntersectionOrder INTERSECTION_ORDER = new IntersectionOrder(); private static final class IntersectionOrder implements Comparator { diff --git a/core/src/main/java/org/elasticsearch/common/geo/parsers/GeoWKTParser.java b/core/src/main/java/org/elasticsearch/common/geo/parsers/GeoWKTParser.java new file mode 100644 index 00000000000..005caed53a7 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/common/geo/parsers/GeoWKTParser.java @@ -0,0 +1,321 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.common.geo.parsers; + +import com.vividsolutions.jts.geom.Coordinate; +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.geo.GeoShapeType; + +import org.elasticsearch.common.geo.builders.CoordinatesBuilder; +import org.elasticsearch.common.geo.builders.EnvelopeBuilder; +import org.elasticsearch.common.geo.builders.GeometryCollectionBuilder; +import org.elasticsearch.common.geo.builders.LineStringBuilder; +import org.elasticsearch.common.geo.builders.MultiLineStringBuilder; +import org.elasticsearch.common.geo.builders.MultiPointBuilder; +import org.elasticsearch.common.geo.builders.MultiPolygonBuilder; +import org.elasticsearch.common.geo.builders.PointBuilder; +import org.elasticsearch.common.geo.builders.PolygonBuilder; +import org.elasticsearch.common.geo.builders.ShapeBuilder; +import org.elasticsearch.common.io.FastStringReader; +import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.io.StreamTokenizer; +import java.util.List; + +/** + * Parses shape geometry represented in WKT format + * + * complies with OGC® document: 12-063r5 and ISO/IEC 13249-3:2016 standard + * located at http://docs.opengeospatial.org/is/12-063r5/12-063r5.html + */ +public class GeoWKTParser { + public static final String EMPTY = "EMPTY"; + public static final String SPACE = Loggers.SPACE; + public static final String LPAREN = "("; + public static final String RPAREN = ")"; + public static final String COMMA = ","; + private static final String NAN = "NaN"; + + private static final String NUMBER = ""; + private static final String EOF = "END-OF-STREAM"; + private static final String EOL = "END-OF-LINE"; + + // no instance + private GeoWKTParser() {} + + public static ShapeBuilder parse(XContentParser parser) + throws IOException, ElasticsearchParseException { + FastStringReader reader = new FastStringReader(parser.text()); + try { + // setup the tokenizer; configured to read words w/o numbers + StreamTokenizer tokenizer = new StreamTokenizer(reader); + tokenizer.resetSyntax(); + tokenizer.wordChars('a', 'z'); + tokenizer.wordChars('A', 'Z'); + tokenizer.wordChars(128 + 32, 255); + tokenizer.wordChars('0', '9'); + tokenizer.wordChars('-', '-'); + tokenizer.wordChars('+', '+'); + tokenizer.wordChars('.', '.'); + tokenizer.whitespaceChars(0, ' '); + tokenizer.commentChar('#'); + ShapeBuilder builder = parseGeometry(tokenizer); + checkEOF(tokenizer); + return builder; + } finally { + reader.close(); + } + } + + /** parse geometry from the stream tokenizer */ + private static ShapeBuilder parseGeometry(StreamTokenizer stream) throws IOException, ElasticsearchParseException { + final GeoShapeType type = GeoShapeType.forName(nextWord(stream)); + switch (type) { + case POINT: + return parsePoint(stream); + case MULTIPOINT: + return parseMultiPoint(stream); + case LINESTRING: + return parseLine(stream); + case MULTILINESTRING: + return parseMultiLine(stream); + case POLYGON: + return parsePolygon(stream); + case MULTIPOLYGON: + return parseMultiPolygon(stream); + case ENVELOPE: + return parseBBox(stream); + case GEOMETRYCOLLECTION: + return parseGeometryCollection(stream); + default: + throw new IllegalArgumentException("Unknown geometry type: " + type); + } + } + + private static EnvelopeBuilder parseBBox(StreamTokenizer stream) throws IOException, ElasticsearchParseException { + if (nextEmptyOrOpen(stream).equals(EMPTY)) { + return null; + } + double minLon = nextNumber(stream); + nextComma(stream); + double maxLon = nextNumber(stream); + nextComma(stream); + double maxLat = nextNumber(stream); + nextComma(stream); + double minLat = nextNumber(stream); + nextCloser(stream); + return new EnvelopeBuilder(new Coordinate(minLon, maxLat), new Coordinate(maxLon, minLat)); + } + + private static PointBuilder parsePoint(StreamTokenizer stream) throws IOException, ElasticsearchParseException { + if (nextEmptyOrOpen(stream).equals(EMPTY)) { + return null; + } + PointBuilder pt = new PointBuilder(nextNumber(stream), nextNumber(stream)); + if (isNumberNext(stream) == true) { + nextNumber(stream); + } + nextCloser(stream); + return pt; + } + + private static List parseCoordinateList(StreamTokenizer stream) + throws IOException, ElasticsearchParseException { + CoordinatesBuilder coordinates = new CoordinatesBuilder(); + boolean isOpenParen = false; + if (isNumberNext(stream) || (isOpenParen = nextWord(stream).equals(LPAREN))) { + coordinates.coordinate(parseCoordinate(stream)); + } + + if (isOpenParen && nextCloser(stream).equals(RPAREN) == false) { + throw new ElasticsearchParseException("expected: [{}]" + RPAREN + " but found: [{}]" + tokenString(stream), stream.lineno()); + } + + while (nextCloserOrComma(stream).equals(COMMA)) { + isOpenParen = false; + if (isNumberNext(stream) || (isOpenParen = nextWord(stream).equals(LPAREN))) { + coordinates.coordinate(parseCoordinate(stream)); + } + if (isOpenParen && nextCloser(stream).equals(RPAREN) == false) { + throw new ElasticsearchParseException("expected: " + RPAREN + " but found: " + tokenString(stream), stream.lineno()); + } + } + return coordinates.build(); + } + + private static Coordinate parseCoordinate(StreamTokenizer stream) + throws IOException, ElasticsearchParseException { + final double lon = nextNumber(stream); + final double lat = nextNumber(stream); + Double z = null; + if (isNumberNext(stream)) { + z = nextNumber(stream); + } + return z == null ? new Coordinate(lon, lat) : new Coordinate(lon, lat, z); + } + + private static MultiPointBuilder parseMultiPoint(StreamTokenizer stream) throws IOException, ElasticsearchParseException { + String token = nextEmptyOrOpen(stream); + if (token.equals(EMPTY)) { + return null; + } + return new MultiPointBuilder(parseCoordinateList(stream)); + } + + private static LineStringBuilder parseLine(StreamTokenizer stream) throws IOException, ElasticsearchParseException { + String token = nextEmptyOrOpen(stream); + if (token.equals(EMPTY)) { + return null; + } + return new LineStringBuilder(parseCoordinateList(stream)); + } + + private static MultiLineStringBuilder parseMultiLine(StreamTokenizer stream) throws IOException, ElasticsearchParseException { + String token = nextEmptyOrOpen(stream); + if (token.equals(EMPTY)) { + return null; + } + MultiLineStringBuilder builder = new MultiLineStringBuilder(); + builder.linestring(parseLine(stream)); + while (nextCloserOrComma(stream).equals(COMMA)) { + builder.linestring(parseLine(stream)); + } + return builder; + } + + private static PolygonBuilder parsePolygon(StreamTokenizer stream) throws IOException, ElasticsearchParseException { + if (nextEmptyOrOpen(stream).equals(EMPTY)) { + return null; + } + PolygonBuilder builder = new PolygonBuilder(parseLine(stream), ShapeBuilder.Orientation.RIGHT); + while (nextCloserOrComma(stream).equals(COMMA)) { + builder.hole(parseLine(stream)); + } + return builder; + } + + private static MultiPolygonBuilder parseMultiPolygon(StreamTokenizer stream) throws IOException, ElasticsearchParseException { + if (nextEmptyOrOpen(stream).equals(EMPTY)) { + return null; + } + MultiPolygonBuilder builder = new MultiPolygonBuilder().polygon(parsePolygon(stream)); + while (nextCloserOrComma(stream).equals(COMMA)) { + builder.polygon(parsePolygon(stream)); + } + return builder; + } + + private static GeometryCollectionBuilder parseGeometryCollection(StreamTokenizer stream) + throws IOException, ElasticsearchParseException { + if (nextEmptyOrOpen(stream).equals(EMPTY)) { + return null; + } + GeometryCollectionBuilder builder = new GeometryCollectionBuilder().shape(parseGeometry(stream)); + while (nextCloserOrComma(stream).equals(COMMA)) { + builder.shape(parseGeometry(stream)); + } + return builder; + } + + /** next word in the stream */ + private static String nextWord(StreamTokenizer stream) throws ElasticsearchParseException, IOException { + switch (stream.nextToken()) { + case StreamTokenizer.TT_WORD: + final String word = stream.sval; + return word.equalsIgnoreCase(EMPTY) ? EMPTY : word; + case '(': return LPAREN; + case ')': return RPAREN; + case ',': return COMMA; + } + throw new ElasticsearchParseException("expected word but found: " + tokenString(stream), stream.lineno()); + } + + private static double nextNumber(StreamTokenizer stream) throws IOException, ElasticsearchParseException { + if (stream.nextToken() == StreamTokenizer.TT_WORD) { + if (stream.sval.equalsIgnoreCase(NAN)) { + return Double.NaN; + } else { + try { + return Double.parseDouble(stream.sval); + } catch (NumberFormatException e) { + throw new ElasticsearchParseException("invalid number found: " + stream.sval, stream.lineno()); + } + } + } + throw new ElasticsearchParseException("expected number but found: " + tokenString(stream), stream.lineno()); + } + + private static String tokenString(StreamTokenizer stream) { + switch (stream.ttype) { + case StreamTokenizer.TT_WORD: return stream.sval; + case StreamTokenizer.TT_EOF: return EOF; + case StreamTokenizer.TT_EOL: return EOL; + case StreamTokenizer.TT_NUMBER: return NUMBER; + } + return "'" + (char) stream.ttype + "'"; + } + + private static boolean isNumberNext(StreamTokenizer stream) throws IOException { + final int type = stream.nextToken(); + stream.pushBack(); + return type == StreamTokenizer.TT_WORD; + } + + private static String nextEmptyOrOpen(StreamTokenizer stream) throws IOException, ElasticsearchParseException { + final String next = nextWord(stream); + if (next.equals(EMPTY) || next.equals(LPAREN)) { + return next; + } + throw new ElasticsearchParseException("expected " + EMPTY + " or " + LPAREN + + " but found: " + tokenString(stream), stream.lineno()); + } + + private static String nextCloser(StreamTokenizer stream) throws IOException, ElasticsearchParseException { + if (nextWord(stream).equals(RPAREN)) { + return RPAREN; + } + throw new ElasticsearchParseException("expected " + RPAREN + " but found: " + tokenString(stream), stream.lineno()); + } + + private static String nextComma(StreamTokenizer stream) throws IOException, ElasticsearchParseException { + if (nextWord(stream).equals(COMMA) == true) { + return COMMA; + } + throw new ElasticsearchParseException("expected " + COMMA + " but found: " + tokenString(stream), stream.lineno()); + } + + private static String nextCloserOrComma(StreamTokenizer stream) throws IOException, ElasticsearchParseException { + String token = nextWord(stream); + if (token.equals(COMMA) || token.equals(RPAREN)) { + return token; + } + throw new ElasticsearchParseException("expected " + COMMA + " or " + RPAREN + + " but found: " + tokenString(stream), stream.lineno()); + } + + /** next word in the stream */ + private static void checkEOF(StreamTokenizer stream) throws ElasticsearchParseException, IOException { + if (stream.nextToken() != StreamTokenizer.TT_EOF) { + throw new ElasticsearchParseException("expected end of WKT string but found additional text: " + + tokenString(stream), stream.lineno()); + } + } +} diff --git a/core/src/main/java/org/elasticsearch/common/geo/parsers/ShapeParser.java b/core/src/main/java/org/elasticsearch/common/geo/parsers/ShapeParser.java index 39540f902fe..0ee3333c480 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/parsers/ShapeParser.java +++ b/core/src/main/java/org/elasticsearch/common/geo/parsers/ShapeParser.java @@ -51,6 +51,8 @@ public interface ShapeParser { return null; } if (parser.currentToken() == XContentParser.Token.START_OBJECT) { return GeoJsonParser.parse(parser, shapeMapper); + } else if (parser.currentToken() == XContentParser.Token.VALUE_STRING) { + return GeoWKTParser.parse(parser); } throw new ElasticsearchParseException("shape must be an object consisting of type and coordinates"); } diff --git a/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java b/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java index 38eaef1d14d..f952eb36a0d 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java @@ -500,6 +500,16 @@ public abstract class AbstractScopedSettings extends AbstractComponent { return updateSettings(toApply, target, updates, type, false); } + /** + * Returns true if the given key is a valid delete key + */ + private boolean isValidDelete(String key, boolean onlyDynamic) { + return isFinalSetting(key) == false && // it's not a final setting + (onlyDynamic && isDynamicSetting(key) // it's a dynamicSetting and we only do dynamic settings + || get(key) == null && key.startsWith(ARCHIVED_SETTINGS_PREFIX) // the setting is not registered AND it's been archived + || (onlyDynamic == false && get(key) != null)); // if it's not dynamic AND we have a key + } + /** * Updates a target settings builder with new, updated or deleted settings from a given settings builder. * @@ -519,21 +529,16 @@ public abstract class AbstractScopedSettings extends AbstractComponent { final Predicate canUpdate = (key) -> ( isFinalSetting(key) == false && // it's not a final setting ((onlyDynamic == false && get(key) != null) || isDynamicSetting(key))); - final Predicate canRemove = (key) ->(// we can delete if - isFinalSetting(key) == false && // it's not a final setting - (onlyDynamic && isDynamicSetting(key) // it's a dynamicSetting and we only do dynamic settings - || get(key) == null && key.startsWith(ARCHIVED_SETTINGS_PREFIX) // the setting is not registered AND it's been archived - || (onlyDynamic == false && get(key) != null))); // if it's not dynamic AND we have a key for (String key : toApply.keySet()) { - boolean isNull = toApply.get(key) == null; - if (isNull && (canRemove.test(key) || key.endsWith("*"))) { + boolean isDelete = toApply.hasValue(key) == false; + if (isDelete && (isValidDelete(key, onlyDynamic) || key.endsWith("*"))) { // this either accepts null values that suffice the canUpdate test OR wildcard expressions (key ends with *) // we don't validate if there is any dynamic setting with that prefix yet we could do in the future toRemove.add(key); // we don't set changed here it's set after we apply deletes below if something actually changed } else if (get(key) == null) { throw new IllegalArgumentException(type + " setting [" + key + "], not recognized"); - } else if (isNull == false && canUpdate.test(key)) { + } else if (isDelete == false && canUpdate.test(key)) { validate(key, toApply, false); // we might not have a full picture here do to a dependency validation settingsBuilder.copy(key, toApply); updates.copy(key, toApply); @@ -546,7 +551,7 @@ public abstract class AbstractScopedSettings extends AbstractComponent { } } } - changed |= applyDeletes(toRemove, target, canRemove); + changed |= applyDeletes(toRemove, target, k -> isValidDelete(k, onlyDynamic)); target.put(settingsBuilder.build()); return changed; } diff --git a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index 2bea2a59e16..ae28b42cf16 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -85,6 +85,7 @@ import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.SearchService; +import org.elasticsearch.search.aggregations.MultiBucketConsumerService; import org.elasticsearch.search.fetch.subphase.highlight.FastVectorHighlighter; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.RemoteClusterAware; @@ -360,6 +361,7 @@ public final class ClusterSettings extends AbstractScopedSettings { SearchService.DEFAULT_KEEPALIVE_SETTING, SearchService.KEEPALIVE_INTERVAL_SETTING, SearchService.MAX_KEEPALIVE_SETTING, + MultiBucketConsumerService.MAX_BUCKET_SETTING, SearchService.LOW_LEVEL_CANCELLATION_SETTING, Node.WRITE_PORTS_FILE_SETTING, Node.NODE_NAME_SETTING, diff --git a/core/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java b/core/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java index 441bb131f03..6ebc47c8252 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java +++ b/core/src/main/java/org/elasticsearch/common/settings/KeyStoreWrapper.java @@ -49,6 +49,7 @@ import java.util.HashSet; import java.util.Locale; import java.util.Map; import java.util.Set; +import java.util.regex.Pattern; import java.util.stream.Collectors; import org.apache.lucene.codecs.CodecUtil; @@ -59,7 +60,6 @@ import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.IndexOutput; import org.apache.lucene.store.SimpleFSDirectory; import org.apache.lucene.util.SetOnce; -import org.elasticsearch.bootstrap.BootstrapSettings; import org.elasticsearch.cli.ExitCodes; import org.elasticsearch.cli.UserException; import org.elasticsearch.common.Randomness; @@ -75,6 +75,11 @@ import org.elasticsearch.common.Randomness; */ public class KeyStoreWrapper implements SecureSettings { + /** + * A regex for the valid characters that a setting name in the keystore may use. + */ + private static final Pattern ALLOWED_SETTING_NAME = Pattern.compile("[a-z0-9_\\-.]+"); + public static final Setting SEED_SETTING = SecureSetting.secureString("keystore.seed", null); /** Characters that may be used in the bootstrap seed setting added to all keystores. */ @@ -383,6 +388,18 @@ public class KeyStoreWrapper implements SecureSettings { return Base64.getDecoder().wrap(bytesStream); } + /** + * Ensure the given setting name is allowed. + * + * @throws IllegalArgumentException if the setting name is not valid + */ + public static void validateSettingName(String setting) { + if (ALLOWED_SETTING_NAME.matcher(setting).matches() == false) { + throw new IllegalArgumentException("Setting name [" + setting + "] does not match the allowed setting name pattern [" + + ALLOWED_SETTING_NAME.pattern() + "]"); + } + } + /** * Set a string setting. * @@ -390,6 +407,7 @@ public class KeyStoreWrapper implements SecureSettings { */ void setString(String setting, char[] value) throws GeneralSecurityException { assert isLoaded(); + validateSettingName(setting); if (ASCII_ENCODER.canEncode(CharBuffer.wrap(value)) == false) { throw new IllegalArgumentException("Value must be ascii"); } @@ -401,6 +419,7 @@ public class KeyStoreWrapper implements SecureSettings { /** Set a file setting. */ void setFile(String setting, byte[] bytes) throws GeneralSecurityException { assert isLoaded(); + validateSettingName(setting); bytes = Base64.getEncoder().encode(bytes); char[] chars = new char[bytes.length]; for (int i = 0; i < chars.length; ++i) { diff --git a/core/src/main/java/org/elasticsearch/common/settings/SecureSetting.java b/core/src/main/java/org/elasticsearch/common/settings/SecureSetting.java index 4a1e598bba8..c23a0bd42e3 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/SecureSetting.java +++ b/core/src/main/java/org/elasticsearch/common/settings/SecureSetting.java @@ -46,6 +46,7 @@ public abstract class SecureSetting extends Setting { private SecureSetting(String key, Property... properties) { super(key, (String)null, null, ArrayUtils.concat(properties, FIXED_PROPERTIES, Property.class)); assert assertAllowedProperties(properties); + KeyStoreWrapper.validateSettingName(key); } private boolean assertAllowedProperties(Setting.Property... properties) { diff --git a/core/src/main/java/org/elasticsearch/common/settings/Settings.java b/core/src/main/java/org/elasticsearch/common/settings/Settings.java index 41acefdd8e8..0a0a01c3fe3 100644 --- a/core/src/main/java/org/elasticsearch/common/settings/Settings.java +++ b/core/src/main/java/org/elasticsearch/common/settings/Settings.java @@ -306,6 +306,13 @@ public final class Settings implements ToXContentFragment { } } + /** + * Returns true iff the given key has a value in this settings object + */ + public boolean hasValue(String key) { + return settings.get(key) != null; + } + /** * We have to lazy initialize the deprecation logger as otherwise a static logger here would be constructed before logging is configured * leading to a runtime failure (see {@link LogConfigurator#checkErrorListener()} ). The premature construction would come from any @@ -617,7 +624,7 @@ public final class Settings implements ToXContentFragment { } /** - * Parsers the generated xconten from {@link Settings#toXContent(XContentBuilder, Params)} into a new Settings object. + * Parsers the generated xcontent from {@link Settings#toXContent(XContentBuilder, Params)} into a new Settings object. * Note this method requires the parser to either be positioned on a null token or on * {@link org.elasticsearch.common.xcontent.XContentParser.Token#START_OBJECT}. */ @@ -1229,8 +1236,9 @@ public final class Settings implements ToXContentFragment { Iterator> iterator = map.entrySet().iterator(); while(iterator.hasNext()) { Map.Entry entry = iterator.next(); - if (entry.getKey().startsWith(prefix) == false) { - replacements.put(prefix + entry.getKey(), entry.getValue()); + String key = entry.getKey(); + if (key.startsWith(prefix) == false && key.endsWith("*") == false) { + replacements.put(prefix + key, entry.getValue()); iterator.remove(); } } diff --git a/core/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java b/core/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java index 45d9a208284..057a970470b 100644 --- a/core/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java +++ b/core/src/main/java/org/elasticsearch/common/util/concurrent/EsExecutors.java @@ -246,13 +246,16 @@ public class EsExecutors { * waiting if necessary for space to become available. */ static class ForceQueuePolicy implements XRejectedExecutionHandler { + @Override public void rejectedExecution(Runnable r, ThreadPoolExecutor executor) { try { + // force queue policy should only be used with a scaling queue + assert executor.getQueue() instanceof ExecutorScalingQueue; executor.getQueue().put(r); - } catch (InterruptedException e) { - //should never happen since we never wait - throw new EsRejectedExecutionException(e); + } catch (final InterruptedException e) { + // a scaling queue never blocks so a put to it can never be interrupted + throw new AssertionError(e); } } @@ -260,6 +263,7 @@ public class EsExecutors { public long rejected() { return 0; } + } } diff --git a/core/src/main/java/org/elasticsearch/common/util/concurrent/EsRejectedExecutionException.java b/core/src/main/java/org/elasticsearch/common/util/concurrent/EsRejectedExecutionException.java index 01fbbac725b..a38bbf452b7 100644 --- a/core/src/main/java/org/elasticsearch/common/util/concurrent/EsRejectedExecutionException.java +++ b/core/src/main/java/org/elasticsearch/common/util/concurrent/EsRejectedExecutionException.java @@ -27,29 +27,20 @@ import org.elasticsearch.rest.RestStatus; import java.io.IOException; public class EsRejectedExecutionException extends ElasticsearchException { + private final boolean isExecutorShutdown; - public EsRejectedExecutionException(String message, boolean isExecutorShutdown, Object... args) { - super(message, args); + public EsRejectedExecutionException(String message, boolean isExecutorShutdown) { + super(message); this.isExecutorShutdown = isExecutorShutdown; } - public EsRejectedExecutionException(String message, Object... args) { - this(message, false, args); - } - - public EsRejectedExecutionException(String message, boolean isExecutorShutdown) { - this(message, isExecutorShutdown, new Object[0]); + public EsRejectedExecutionException(String message) { + this(message, false); } public EsRejectedExecutionException() { - super((String)null); - this.isExecutorShutdown = false; - } - - public EsRejectedExecutionException(Throwable e) { - super(null, e); - this.isExecutorShutdown = false; + this(null, false); } @Override @@ -79,4 +70,5 @@ public class EsRejectedExecutionException extends ElasticsearchException { public boolean isExecutorShutdown() { return isExecutorShutdown; } + } diff --git a/core/src/main/java/org/elasticsearch/common/util/concurrent/EsThreadPoolExecutor.java b/core/src/main/java/org/elasticsearch/common/util/concurrent/EsThreadPoolExecutor.java index a1ac182b8dc..8bbf0a59ee0 100644 --- a/core/src/main/java/org/elasticsearch/common/util/concurrent/EsThreadPoolExecutor.java +++ b/core/src/main/java/org/elasticsearch/common/util/concurrent/EsThreadPoolExecutor.java @@ -37,7 +37,11 @@ public class EsThreadPoolExecutor extends ThreadPoolExecutor { /** * Name used in error reporting. */ - protected final String name; + private final String name; + + final String getName() { + return name; + } EsThreadPoolExecutor(String name, int corePoolSize, int maximumPoolSize, long keepAliveTime, TimeUnit unit, BlockingQueue workQueue, ThreadFactory threadFactory, ThreadContext contextHolder) { @@ -138,15 +142,16 @@ public class EsThreadPoolExecutor extends ThreadPoolExecutor { } @Override - public String toString() { + public final String toString() { StringBuilder b = new StringBuilder(); b.append(getClass().getSimpleName()).append('['); - b.append(name).append(", "); + b.append("name = ").append(name).append(", "); if (getQueue() instanceof SizeBlockingQueue) { @SuppressWarnings("rawtypes") SizeBlockingQueue queue = (SizeBlockingQueue) getQueue(); b.append("queue capacity = ").append(queue.capacity()).append(", "); } + appendThreadPoolExecutorDetails(b); /* * ThreadPoolExecutor has some nice information in its toString but we * can't get at it easily without just getting the toString. @@ -155,6 +160,16 @@ public class EsThreadPoolExecutor extends ThreadPoolExecutor { return b.toString(); } + /** + * Append details about this thread pool to the specified {@link StringBuilder}. All details should be appended as key/value pairs in + * the form "%s = %s, " + * + * @param sb the {@link StringBuilder} to append to + */ + protected void appendThreadPoolExecutorDetails(final StringBuilder sb) { + + } + protected Runnable wrapRunnable(Runnable command) { return contextHolder.preserveContext(command); } diff --git a/core/src/main/java/org/elasticsearch/common/util/concurrent/QueueResizingEsThreadPoolExecutor.java b/core/src/main/java/org/elasticsearch/common/util/concurrent/QueueResizingEsThreadPoolExecutor.java index 8062d5510c7..e929192b5dd 100644 --- a/core/src/main/java/org/elasticsearch/common/util/concurrent/QueueResizingEsThreadPoolExecutor.java +++ b/core/src/main/java/org/elasticsearch/common/util/concurrent/QueueResizingEsThreadPoolExecutor.java @@ -22,21 +22,16 @@ package org.elasticsearch.common.util.concurrent; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.common.ExponentiallyWeightedMovingAverage; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.util.concurrent.ResizableBlockingQueue; import java.util.Locale; -import java.util.concurrent.BlockingQueue; import java.util.concurrent.ThreadFactory; -import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.function.Function; import java.util.function.Supplier; -import java.util.stream.Stream; /** * An extension to thread pool executor, which automatically adjusts the queue size of the @@ -80,8 +75,8 @@ public final class QueueResizingEsThreadPoolExecutor extends EsThreadPoolExecuto this.maxQueueSize = maxQueueSize; this.targetedResponseTimeNanos = targetedResponseTime.getNanos(); this.executionEWMA = new ExponentiallyWeightedMovingAverage(EWMA_ALPHA, 0); - logger.debug("thread pool [{}] will adjust queue by [{}] when determining automatic queue size", - name, QUEUE_ADJUSTMENT_AMOUNT); + logger.debug( + "thread pool [{}] will adjust queue by [{}] when determining automatic queue size", getName(), QUEUE_ADJUSTMENT_AMOUNT); } @Override @@ -180,7 +175,7 @@ public final class QueueResizingEsThreadPoolExecutor extends EsThreadPoolExecuto final long avgTaskTime = totalNanos / tasksPerFrame; logger.debug("[{}]: there were [{}] tasks in [{}], avg task time [{}], EWMA task execution [{}], " + "[{} tasks/s], optimal queue is [{}], current capacity [{}]", - name, + getName(), tasksPerFrame, TimeValue.timeValueNanos(totalRuntime), TimeValue.timeValueNanos(avgTaskTime), @@ -196,7 +191,7 @@ public final class QueueResizingEsThreadPoolExecutor extends EsThreadPoolExecuto final int newCapacity = workQueue.adjustCapacity(desiredQueueSize, QUEUE_ADJUSTMENT_AMOUNT, minQueueSize, maxQueueSize); if (oldCapacity != newCapacity && logger.isDebugEnabled()) { - logger.debug("adjusted [{}] queue size by [{}], old capacity: [{}], new capacity: [{}]", name, + logger.debug("adjusted [{}] queue size by [{}], old capacity: [{}], new capacity: [{}]", getName(), newCapacity > oldCapacity ? QUEUE_ADJUSTMENT_AMOUNT : -QUEUE_ADJUSTMENT_AMOUNT, oldCapacity, newCapacity); } @@ -205,7 +200,7 @@ public final class QueueResizingEsThreadPoolExecutor extends EsThreadPoolExecuto logger.warn((Supplier) () -> new ParameterizedMessage( "failed to calculate optimal queue size for [{}] thread pool, " + "total frame time [{}ns], tasks [{}], task execution time [{}ns]", - name, totalRuntime, tasksPerFrame, totalNanos), + getName(), totalRuntime, tasksPerFrame, totalNanos), e); } finally { // Finally, decrement the task count and time back to their starting values. We @@ -224,7 +219,8 @@ public final class QueueResizingEsThreadPoolExecutor extends EsThreadPoolExecuto // - Adjustment happens and we decrement the tasks by 10, taskCount is now 15 // - Since taskCount will now be incremented forever, it will never be 10 again, // so there will be no further adjustments - logger.debug("[{}]: too many incoming tasks while queue size adjustment occurs, resetting measurements to 0", name); + logger.debug( + "[{}]: too many incoming tasks while queue size adjustment occurs, resetting measurements to 0", getName()); totalTaskNanos.getAndSet(1); taskCount.getAndSet(0); startNs = System.nanoTime(); @@ -237,26 +233,13 @@ public final class QueueResizingEsThreadPoolExecutor extends EsThreadPoolExecuto } @Override - public String toString() { - StringBuilder b = new StringBuilder(); - b.append(getClass().getSimpleName()).append('['); - b.append(name).append(", "); - - @SuppressWarnings("rawtypes") - ResizableBlockingQueue queue = (ResizableBlockingQueue) getQueue(); - - b.append("queue capacity = ").append(getCurrentCapacity()).append(", "); - b.append("min queue capacity = ").append(minQueueSize).append(", "); - b.append("max queue capacity = ").append(maxQueueSize).append(", "); - b.append("frame size = ").append(tasksPerFrame).append(", "); - b.append("targeted response rate = ").append(TimeValue.timeValueNanos(targetedResponseTimeNanos)).append(", "); - b.append("task execution EWMA = ").append(TimeValue.timeValueNanos((long)executionEWMA.getAverage())).append(", "); - b.append("adjustment amount = ").append(QUEUE_ADJUSTMENT_AMOUNT).append(", "); - /* - * ThreadPoolExecutor has some nice information in its toString but we - * can't get at it easily without just getting the toString. - */ - b.append(super.toString()).append(']'); - return b.toString(); + protected void appendThreadPoolExecutorDetails(StringBuilder sb) { + sb.append("min queue capacity = ").append(minQueueSize).append(", "); + sb.append("max queue capacity = ").append(maxQueueSize).append(", "); + sb.append("frame size = ").append(tasksPerFrame).append(", "); + sb.append("targeted response rate = ").append(TimeValue.timeValueNanos(targetedResponseTimeNanos)).append(", "); + sb.append("task execution EWMA = ").append(TimeValue.timeValueNanos((long) executionEWMA.getAverage())).append(", "); + sb.append("adjustment amount = ").append(QUEUE_ADJUSTMENT_AMOUNT).append(", "); } + } diff --git a/core/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java b/core/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java index 95c08e88898..6427368c4b9 100644 --- a/core/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java +++ b/core/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java @@ -74,7 +74,6 @@ public final class ThreadContext implements Closeable, Writeable { private static final ThreadContextStruct DEFAULT_CONTEXT = new ThreadContextStruct(); private final Map defaultHeader; private final ContextThreadLocal threadLocal; - private boolean isSystemContext; /** * Creates a new ThreadContext instance @@ -121,7 +120,6 @@ public final class ThreadContext implements Closeable, Writeable { return () -> threadLocal.set(context); } - /** * Just like {@link #stashContext()} but no default context is set. * @param preserveResponseHeaders if set to true the response headers of the restore thread will be preserved. diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/UnicastZenPing.java b/core/src/main/java/org/elasticsearch/discovery/zen/UnicastZenPing.java index 06269706e0d..54cdb7caeaa 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/UnicastZenPing.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/UnicastZenPing.java @@ -167,12 +167,13 @@ public class UnicastZenPing extends AbstractComponent implements ZenPing { final ThreadFactory threadFactory = EsExecutors.daemonThreadFactory(settings, "[unicast_connect]"); unicastZenPingExecutorService = EsExecutors.newScaling( - "unicast_connect", - 0, concurrentConnects, - 60, - TimeUnit.SECONDS, - threadFactory, - threadPool.getThreadContext()); + nodeName() + "/" + "unicast_connect", + 0, + concurrentConnects, + 60, + TimeUnit.SECONDS, + threadFactory, + threadPool.getThreadContext()); } /** diff --git a/core/src/main/java/org/elasticsearch/index/engine/EngineConfig.java b/core/src/main/java/org/elasticsearch/index/engine/EngineConfig.java index f923abc1a6c..8c134b140bd 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/EngineConfig.java +++ b/core/src/main/java/org/elasticsearch/index/engine/EngineConfig.java @@ -69,7 +69,9 @@ public final class EngineConfig { private final QueryCache queryCache; private final QueryCachingPolicy queryCachingPolicy; @Nullable - private final List refreshListeners; + private final List externalRefreshListener; + @Nullable + private final List internalRefreshListener; @Nullable private final Sort indexSort; private final boolean forceNewHistoryUUID; @@ -120,7 +122,8 @@ public final class EngineConfig { Similarity similarity, CodecService codecService, Engine.EventListener eventListener, QueryCache queryCache, QueryCachingPolicy queryCachingPolicy, boolean forceNewHistoryUUID, TranslogConfig translogConfig, TimeValue flushMergesAfter, - List refreshListeners, Sort indexSort, + List externalRefreshListener, + List internalRefreshListener, Sort indexSort, TranslogRecoveryRunner translogRecoveryRunner, CircuitBreakerService circuitBreakerService) { if (openMode == null) { throw new IllegalArgumentException("openMode must not be null"); @@ -147,7 +150,8 @@ public final class EngineConfig { this.flushMergesAfter = flushMergesAfter; this.openMode = openMode; this.forceNewHistoryUUID = forceNewHistoryUUID; - this.refreshListeners = refreshListeners; + this.externalRefreshListener = externalRefreshListener; + this.internalRefreshListener = internalRefreshListener; this.indexSort = indexSort; this.translogRecoveryRunner = translogRecoveryRunner; this.circuitBreakerService = circuitBreakerService; @@ -343,12 +347,18 @@ public final class EngineConfig { } /** - * The refresh listeners to add to Lucene + * The refresh listeners to add to Lucene for externally visible refreshes */ - public List getRefreshListeners() { - return refreshListeners; + public List getExternalRefreshListener() { + return externalRefreshListener; } + /** + * The refresh listeners to add to Lucene for internally visible refreshes. These listeners will also be invoked on external refreshes + */ + public List getInternalRefreshListener() { return internalRefreshListener;} + + /** * returns true if the engine is allowed to optimize indexing operations with an auto-generated ID */ diff --git a/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index e431bfb7a5b..53747b063df 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -232,9 +232,12 @@ public class InternalEngine extends Engine { assert pendingTranslogRecovery.get() == false : "translog recovery can't be pending before we set it"; // don't allow commits until we are done with recovering pendingTranslogRecovery.set(openMode == EngineConfig.OpenMode.OPEN_INDEX_AND_TRANSLOG); - for (ReferenceManager.RefreshListener listener: engineConfig.getRefreshListeners()) { + for (ReferenceManager.RefreshListener listener: engineConfig.getExternalRefreshListener()) { this.externalSearcherManager.addListener(listener); } + for (ReferenceManager.RefreshListener listener: engineConfig.getInternalRefreshListener()) { + this.internalSearcherManager.addListener(listener); + } success = true; } finally { if (success == false) { @@ -426,11 +429,6 @@ public class InternalEngine extends Engine { } else if (translog.isCurrent(translogGeneration) == false) { commitIndexWriter(indexWriter, translog, lastCommittedSegmentInfos.getUserData().get(Engine.SYNC_COMMIT_ID)); refreshLastCommittedSegmentInfos(); - } else if (lastCommittedSegmentInfos.getUserData().containsKey(HISTORY_UUID_KEY) == false) { - assert historyUUID != null; - // put the history uuid into the index - commitIndexWriter(indexWriter, translog, lastCommittedSegmentInfos.getUserData().get(Engine.SYNC_COMMIT_ID)); - refreshLastCommittedSegmentInfos(); } // clean up what's not needed translog.trimUnreferencedReaders(); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java index db40fb228bc..68d6ac66678 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java @@ -369,7 +369,7 @@ public class GeoShapeFieldMapper extends FieldMapper { public void setStrategyName(String strategyName) { checkIfFrozen(); this.strategyName = strategyName; - if (this.strategyName.equals(SpatialStrategy.TERM)) { + if (this.strategyName.equals(SpatialStrategy.TERM.getStrategyName())) { this.pointsOnly = true; } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java index b21f47d8feb..e34a762f527 100755 --- a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -105,6 +105,8 @@ public class MapperService extends AbstractIndexComponent implements Closeable { Setting.boolSetting("index.mapper.dynamic", INDEX_MAPPER_DYNAMIC_DEFAULT, Property.Dynamic, Property.IndexScope, Property.Deprecated); + //TODO this needs to be cleaned up: _timestamp and _ttl are not supported anymore, _field_names, _seq_no, _version and _source are + //also missing, not sure if on purpose. See IndicesModule#getMetadataMappers private static ObjectHashSet META_FIELDS = ObjectHashSet.from( "_uid", "_id", "_type", "_parent", "_routing", "_index", "_size", "_timestamp", "_ttl" diff --git a/core/src/main/java/org/elasticsearch/index/seqno/GlobalCheckpointSyncAction.java b/core/src/main/java/org/elasticsearch/index/seqno/GlobalCheckpointSyncAction.java index 2c60ebfac6b..95e3505e746 100644 --- a/core/src/main/java/org/elasticsearch/index/seqno/GlobalCheckpointSyncAction.java +++ b/core/src/main/java/org/elasticsearch/index/seqno/GlobalCheckpointSyncAction.java @@ -39,10 +39,13 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.IndexShardClosedException; import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.index.translog.Translog; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import java.io.IOException; + /** * Background global checkpoint sync action initiated when a shard goes inactive. This is needed because while we send the global checkpoint * on every replication operation, after the last operation completes the global checkpoint could advance but without a follow-up operation @@ -116,16 +119,24 @@ public class GlobalCheckpointSyncAction extends TransportReplicationAction< @Override protected PrimaryResult shardOperationOnPrimary( final Request request, final IndexShard indexShard) throws Exception { - indexShard.getTranslog().sync(); + maybeSyncTranslog(indexShard); return new PrimaryResult<>(request, new ReplicationResponse()); } @Override protected ReplicaResult shardOperationOnReplica(final Request request, final IndexShard indexShard) throws Exception { - indexShard.getTranslog().sync(); + maybeSyncTranslog(indexShard); return new ReplicaResult(); } + private void maybeSyncTranslog(final IndexShard indexShard) throws IOException { + final Translog translog = indexShard.getTranslog(); + if (indexShard.getTranslogDurability() == Translog.Durability.REQUEST && + translog.getLastSyncedGlobalCheckpoint() < indexShard.getGlobalCheckpoint()) { + indexShard.getTranslog().sync(); + } + } + public static final class Request extends ReplicationRequest { private Request() { diff --git a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java index 1dc28915d09..f0246060acf 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -48,7 +48,6 @@ import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; import org.elasticsearch.action.admin.indices.upgrade.post.UpgradeRequest; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MappingMetaData; -import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.RecoverySource; import org.elasticsearch.cluster.routing.RecoverySource.SnapshotRecoverySource; @@ -66,7 +65,6 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.AsyncIOProcessor; -import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexModule; @@ -416,12 +414,9 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl logger.debug("failed to refresh due to move to cluster wide started", e); } - if (newRouting.primary()) { - final DiscoveryNode recoverySourceNode = recoveryState.getSourceNode(); - if (currentRouting.isRelocationTarget() == false || recoverySourceNode.getVersion().before(Version.V_6_0_0_alpha1)) { - // there was no primary context hand-off in < 6.0.0, need to manually activate the shard - getEngine().seqNoService().activatePrimaryMode(getEngine().seqNoService().getLocalCheckpoint()); - } + if (newRouting.primary() && currentRouting.isRelocationTarget() == false) { + // there was no primary context hand-off in < 6.0.0, need to manually activate the shard + getEngine().seqNoService().activatePrimaryMode(getEngine().seqNoService().getLocalCheckpoint()); } changeState(IndexShardState.STARTED, "global state is [" + newRouting.state() + "]"); @@ -485,15 +480,18 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl * subsequently fails before the primary/replica re-sync completes successfully and we are now being * promoted, the local checkpoint tracker here could be left in a state where it would re-issue sequence * numbers. To ensure that this is not the case, we restore the state of the local checkpoint tracker by - * replaying the translog and marking any operations there are completed. Rolling the translog generation is - * not strictly needed here (as we will never have collisions between sequence numbers in a translog - * generation in a new primary as it takes the last known sequence number as a starting point), but it - * simplifies reasoning about the relationship between primary terms and translog generations. + * replaying the translog and marking any operations there are completed. */ - getEngine().rollTranslogGeneration(); - getEngine().restoreLocalCheckpointFromTranslog(); - getEngine().fillSeqNoGaps(newPrimaryTerm); - getEngine().seqNoService().updateLocalCheckpointForShard(currentRouting.allocationId().getId(), + final Engine engine = getEngine(); + engine.restoreLocalCheckpointFromTranslog(); + /* Rolling the translog generation is not strictly needed here (as we will never have collisions between + * sequence numbers in a translog generation in a new primary as it takes the last known sequence number + * as a starting point), but it simplifies reasoning about the relationship between primary terms and + * translog generations. + */ + engine.rollTranslogGeneration(); + engine.fillSeqNoGaps(newPrimaryTerm); + engine.seqNoService().updateLocalCheckpointForShard(currentRouting.allocationId().getId(), getEngine().seqNoService().getLocalCheckpoint()); primaryReplicaSyncer.accept(this, new ActionListener() { @Override @@ -1337,6 +1335,17 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl active.set(true); newEngine.recoverFromTranslog(); } + assertSequenceNumbersInCommit(); + } + + private boolean assertSequenceNumbersInCommit() throws IOException { + final Map userData = SegmentInfos.readLatestCommit(store.directory()).getUserData(); + assert userData.containsKey(SequenceNumbers.LOCAL_CHECKPOINT_KEY) : "commit point doesn't contains a local checkpoint"; + assert userData.containsKey(SequenceNumbers.MAX_SEQ_NO) : "commit point doesn't contains a maximum sequence number"; + assert userData.containsKey(Engine.HISTORY_UUID_KEY) : "commit point doesn't contains a history uuid"; + assert userData.get(Engine.HISTORY_UUID_KEY).equals(getHistoryUUID()) : "commit point history uuid [" + + userData.get(Engine.HISTORY_UUID_KEY) + "] is different than engine [" + getHistoryUUID() + "]"; + return true; } private boolean assertMaxUnsafeAutoIdInCommit() throws IOException { @@ -2185,8 +2194,9 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl mapperService.indexAnalyzer(), similarityService.similarity(mapperService), codecService, shardEventListener, indexCache.query(), cachingPolicy, forceNewHistoryUUID, translogConfig, IndexingMemoryController.SHARD_INACTIVE_TIME_SETTING.get(indexSettings.getSettings()), - Arrays.asList(refreshListeners, new RefreshMetricUpdater(refreshMetric)), indexSort, - this::runTranslogRecovery, circuitBreakerService); + Collections.singletonList(refreshListeners), + Collections.singletonList(new RefreshMetricUpdater(refreshMetric)), + indexSort, this::runTranslogRecovery, circuitBreakerService); } /** diff --git a/core/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java b/core/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java index 495f1dc4bdb..d527fa83501 100644 --- a/core/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java +++ b/core/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java @@ -217,7 +217,13 @@ public class TermVectorsService { if (perFieldAnalyzer != null && perFieldAnalyzer.containsKey(field)) { analyzer = mapperService.getIndexAnalyzers().get(perFieldAnalyzer.get(field).toString()); } else { - analyzer = mapperService.fullName(field).indexAnalyzer(); + MappedFieldType fieldType = mapperService.fullName(field); + if (fieldType instanceof KeywordFieldMapper.KeywordFieldType) { + KeywordFieldMapper.KeywordFieldType keywordFieldType = (KeywordFieldMapper.KeywordFieldType) fieldType; + analyzer = keywordFieldType.normalizer() == null ? keywordFieldType.indexAnalyzer() : keywordFieldType.normalizer(); + } else { + analyzer = fieldType.indexAnalyzer(); + } } if (analyzer == null) { analyzer = mapperService.getIndexAnalyzers().getDefaultIndexAnalyzer(); diff --git a/core/src/main/java/org/elasticsearch/index/translog/CountedBitSet.java b/core/src/main/java/org/elasticsearch/index/translog/CountedBitSet.java index 9fac230c9a8..ca1ae279a99 100644 --- a/core/src/main/java/org/elasticsearch/index/translog/CountedBitSet.java +++ b/core/src/main/java/org/elasticsearch/index/translog/CountedBitSet.java @@ -21,6 +21,7 @@ package org.elasticsearch.index.translog; import org.apache.lucene.util.BitSet; import org.apache.lucene.util.FixedBitSet; +import org.apache.lucene.util.RamUsageEstimator; /** * A {@link CountedBitSet} wraps a {@link FixedBitSet} but automatically releases the internal bitset @@ -28,11 +29,14 @@ import org.apache.lucene.util.FixedBitSet; * from translog as these numbers are likely to form contiguous ranges (eg. filling all bits). */ final class CountedBitSet extends BitSet { + static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(CountedBitSet.class); private short onBits; // Number of bits are set. private FixedBitSet bitset; CountedBitSet(short numBits) { - assert numBits > 0; + if (numBits <= 0) { + throw new IllegalArgumentException("Number of bits must be positive. Given [" + numBits + "]"); + } this.onBits = 0; this.bitset = new FixedBitSet(numBits); } @@ -41,7 +45,6 @@ final class CountedBitSet extends BitSet { public boolean get(int index) { assert 0 <= index && index < this.length(); assert bitset == null || onBits < bitset.length() : "Bitset should be released when all bits are set"; - return bitset == null ? true : bitset.get(index); } @@ -52,7 +55,7 @@ final class CountedBitSet extends BitSet { // Ignore set when bitset is full. if (bitset != null) { - boolean wasOn = bitset.getAndSet(index); + final boolean wasOn = bitset.getAndSet(index); if (wasOn == false) { onBits++; // Once all bits are set, we can simply just return YES for all indexes. @@ -66,12 +69,12 @@ final class CountedBitSet extends BitSet { @Override public void clear(int startIndex, int endIndex) { - throw new UnsupportedOperationException("Not implemented yet"); + throw new UnsupportedOperationException(); } @Override public void clear(int index) { - throw new UnsupportedOperationException("Not implemented yet"); + throw new UnsupportedOperationException(); } @Override @@ -86,20 +89,19 @@ final class CountedBitSet extends BitSet { @Override public int prevSetBit(int index) { - throw new UnsupportedOperationException("Not implemented yet"); + throw new UnsupportedOperationException(); } @Override public int nextSetBit(int index) { - throw new UnsupportedOperationException("Not implemented yet"); + throw new UnsupportedOperationException(); } @Override public long ramBytesUsed() { - throw new UnsupportedOperationException("Not implemented yet"); + return BASE_RAM_BYTES_USED + (bitset == null ? 0 : bitset.ramBytesUsed()); } - // Exposed for testing boolean isInternalBitsetReleased() { return bitset == null; } diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesModule.java b/core/src/main/java/org/elasticsearch/indices/IndicesModule.java index e446ec7e6d3..f1e52409943 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndicesModule.java +++ b/core/src/main/java/org/elasticsearch/indices/IndicesModule.java @@ -24,7 +24,6 @@ import org.elasticsearch.action.admin.indices.rollover.MaxAgeCondition; import org.elasticsearch.action.admin.indices.rollover.MaxDocsCondition; import org.elasticsearch.action.admin.indices.rollover.MaxSizeCondition; import org.elasticsearch.action.resync.TransportResyncReplicationAction; -import org.elasticsearch.index.shard.PrimaryReplicaSyncer; import org.elasticsearch.common.geo.ShapesAvailability; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.io.stream.NamedWriteableRegistry.Entry; @@ -33,12 +32,12 @@ import org.elasticsearch.index.mapper.BooleanFieldMapper; import org.elasticsearch.index.mapper.CompletionFieldMapper; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.FieldNamesFieldMapper; +import org.elasticsearch.index.mapper.GeoPointFieldMapper; import org.elasticsearch.index.mapper.GeoShapeFieldMapper; import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.IndexFieldMapper; import org.elasticsearch.index.mapper.IpFieldMapper; import org.elasticsearch.index.mapper.KeywordFieldMapper; -import org.elasticsearch.index.mapper.GeoPointFieldMapper; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MetadataFieldMapper; import org.elasticsearch.index.mapper.NumberFieldMapper; @@ -52,6 +51,7 @@ import org.elasticsearch.index.mapper.TypeFieldMapper; import org.elasticsearch.index.mapper.UidFieldMapper; import org.elasticsearch.index.mapper.VersionFieldMapper; import org.elasticsearch.index.seqno.GlobalCheckpointSyncAction; +import org.elasticsearch.index.shard.PrimaryReplicaSyncer; import org.elasticsearch.indices.cluster.IndicesClusterStateService; import org.elasticsearch.indices.flush.SyncedFlushService; import org.elasticsearch.indices.mapper.MapperRegistry; @@ -64,6 +64,9 @@ import java.util.Collections; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; +import java.util.Set; +import java.util.function.Function; +import java.util.function.Predicate; /** * Configures classes and services that are shared by indices on each node. @@ -73,7 +76,8 @@ public class IndicesModule extends AbstractModule { private final MapperRegistry mapperRegistry; public IndicesModule(List mapperPlugins) { - this.mapperRegistry = new MapperRegistry(getMappers(mapperPlugins), getMetadataMappers(mapperPlugins)); + this.mapperRegistry = new MapperRegistry(getMappers(mapperPlugins), getMetadataMappers(mapperPlugins), + getFieldFilter(mapperPlugins)); registerBuiltinWritables(); } @@ -118,23 +122,42 @@ public class IndicesModule extends AbstractModule { return Collections.unmodifiableMap(mappers); } - private Map getMetadataMappers(List mapperPlugins) { + private static final Map builtInMetadataMappers = initBuiltInMetadataMappers(); + + private static Map initBuiltInMetadataMappers() { + Map builtInMetadataMappers; // Use a LinkedHashMap for metadataMappers because iteration order matters + builtInMetadataMappers = new LinkedHashMap<>(); + // UID first so it will be the first stored field to load (so will benefit from "fields: []" early termination + builtInMetadataMappers.put(UidFieldMapper.NAME, new UidFieldMapper.TypeParser()); + builtInMetadataMappers.put(IdFieldMapper.NAME, new IdFieldMapper.TypeParser()); + builtInMetadataMappers.put(RoutingFieldMapper.NAME, new RoutingFieldMapper.TypeParser()); + builtInMetadataMappers.put(IndexFieldMapper.NAME, new IndexFieldMapper.TypeParser()); + builtInMetadataMappers.put(SourceFieldMapper.NAME, new SourceFieldMapper.TypeParser()); + builtInMetadataMappers.put(TypeFieldMapper.NAME, new TypeFieldMapper.TypeParser()); + builtInMetadataMappers.put(VersionFieldMapper.NAME, new VersionFieldMapper.TypeParser()); + builtInMetadataMappers.put(ParentFieldMapper.NAME, new ParentFieldMapper.TypeParser()); + builtInMetadataMappers.put(SeqNoFieldMapper.NAME, new SeqNoFieldMapper.TypeParser()); + //_field_names must be added last so that it has a chance to see all the other mappers + builtInMetadataMappers.put(FieldNamesFieldMapper.NAME, new FieldNamesFieldMapper.TypeParser()); + return Collections.unmodifiableMap(builtInMetadataMappers); + } + + private static Map getMetadataMappers(List mapperPlugins) { Map metadataMappers = new LinkedHashMap<>(); - // builtin metadata mappers - // UID first so it will be the first stored field to load (so will benefit from "fields: []" early termination - - metadataMappers.put(UidFieldMapper.NAME, new UidFieldMapper.TypeParser()); - metadataMappers.put(IdFieldMapper.NAME, new IdFieldMapper.TypeParser()); - metadataMappers.put(RoutingFieldMapper.NAME, new RoutingFieldMapper.TypeParser()); - metadataMappers.put(IndexFieldMapper.NAME, new IndexFieldMapper.TypeParser()); - metadataMappers.put(SourceFieldMapper.NAME, new SourceFieldMapper.TypeParser()); - metadataMappers.put(TypeFieldMapper.NAME, new TypeFieldMapper.TypeParser()); - metadataMappers.put(VersionFieldMapper.NAME, new VersionFieldMapper.TypeParser()); - metadataMappers.put(ParentFieldMapper.NAME, new ParentFieldMapper.TypeParser()); - metadataMappers.put(SeqNoFieldMapper.NAME, new SeqNoFieldMapper.TypeParser()); - // _field_names is not registered here, see below + int i = 0; + Map.Entry fieldNamesEntry = null; + for (Map.Entry entry : builtInMetadataMappers.entrySet()) { + if (i < builtInMetadataMappers.size() - 1) { + metadataMappers.put(entry.getKey(), entry.getValue()); + } else { + assert entry.getKey().equals(FieldNamesFieldMapper.NAME) : "_field_names must be the last registered mapper, order counts"; + fieldNamesEntry = entry; + } + i++; + } + assert fieldNamesEntry != null; for (MapperPlugin mapperPlugin : mapperPlugins) { for (Map.Entry entry : mapperPlugin.getMetadataMappers().entrySet()) { @@ -147,11 +170,49 @@ public class IndicesModule extends AbstractModule { } } - // we register _field_names here so that it has a chance to see all other mappers, including from plugins - metadataMappers.put(FieldNamesFieldMapper.NAME, new FieldNamesFieldMapper.TypeParser()); + // we register _field_names here so that it has a chance to see all the other mappers, including from plugins + metadataMappers.put(fieldNamesEntry.getKey(), fieldNamesEntry.getValue()); return Collections.unmodifiableMap(metadataMappers); } + /** + * Returns a set containing all of the builtin metadata fields + */ + public static Set getBuiltInMetaDataFields() { + return builtInMetadataMappers.keySet(); + } + + private static Function> getFieldFilter(List mapperPlugins) { + Function> fieldFilter = MapperPlugin.NOOP_FIELD_FILTER; + for (MapperPlugin mapperPlugin : mapperPlugins) { + fieldFilter = and(fieldFilter, mapperPlugin.getFieldFilter()); + } + return fieldFilter; + } + + private static Function> and(Function> first, + Function> second) { + //the purpose of this method is to not chain no-op field predicates, so that we can easily find out when no plugins plug in + //a field filter, hence skip the mappings filtering part as a whole, as it requires parsing mappings into a map. + if (first == MapperPlugin.NOOP_FIELD_FILTER) { + return second; + } + if (second == MapperPlugin.NOOP_FIELD_FILTER) { + return first; + } + return index -> { + Predicate firstPredicate = first.apply(index); + Predicate secondPredicate = second.apply(index); + if (firstPredicate == MapperPlugin.NOOP_FIELD_PREDICATE) { + return secondPredicate; + } + if (secondPredicate == MapperPlugin.NOOP_FIELD_PREDICATE) { + return firstPredicate; + } + return firstPredicate.and(secondPredicate); + }; + } + @Override protected void configure() { bind(IndicesStore.class).asEagerSingleton(); diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesService.java b/core/src/main/java/org/elasticsearch/indices/IndicesService.java index e2c66260a39..e6f3007a799 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/core/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -127,7 +127,9 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Consumer; +import java.util.function.Function; import java.util.function.LongSupplier; +import java.util.function.Predicate; import java.util.function.Supplier; import java.util.stream.Collectors; @@ -1262,4 +1264,22 @@ public class IndicesService extends AbstractLifecycleComponent } } } + + /** + * Returns a function which given an index name, returns a predicate which fields must match in order to be returned by get mappings, + * get index, get field mappings and field capabilities API. Useful to filter the fields that such API return. + * The predicate receives the the field name as input argument. In case multiple plugins register a field filter through + * {@link org.elasticsearch.plugins.MapperPlugin#getFieldFilter()}, only fields that match all the registered filters will be + * returned by get mappings, get index, get field mappings and field capabilities API. + */ + public Function> getFieldFilter() { + return mapperRegistry.getFieldFilter(); + } + + /** + * Returns true if the provided field is a registered metadata field (including ones registered via plugins), false otherwise. + */ + public boolean isMetaDataField(String field) { + return mapperRegistry.isMetaDataField(field); + } } diff --git a/core/src/main/java/org/elasticsearch/indices/mapper/MapperRegistry.java b/core/src/main/java/org/elasticsearch/indices/mapper/MapperRegistry.java index bcc4c09d3dd..41d563c2037 100644 --- a/core/src/main/java/org/elasticsearch/indices/mapper/MapperRegistry.java +++ b/core/src/main/java/org/elasticsearch/indices/mapper/MapperRegistry.java @@ -21,10 +21,13 @@ package org.elasticsearch.indices.mapper; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MetadataFieldMapper; +import org.elasticsearch.plugins.MapperPlugin; import java.util.Collections; import java.util.LinkedHashMap; import java.util.Map; +import java.util.function.Function; +import java.util.function.Predicate; /** * A registry for all field mappers. @@ -33,11 +36,14 @@ public final class MapperRegistry { private final Map mapperParsers; private final Map metadataMapperParsers; + private final Function> fieldFilter; + public MapperRegistry(Map mapperParsers, - Map metadataMapperParsers) { + Map metadataMapperParsers, Function> fieldFilter) { this.mapperParsers = Collections.unmodifiableMap(new LinkedHashMap<>(mapperParsers)); this.metadataMapperParsers = Collections.unmodifiableMap(new LinkedHashMap<>(metadataMapperParsers)); + this.fieldFilter = fieldFilter; } /** @@ -55,4 +61,22 @@ public final class MapperRegistry { public Map getMetadataMapperParsers() { return metadataMapperParsers; } + + /** + * Returns true if the provide field is a registered metadata field, false otherwise + */ + public boolean isMetaDataField(String field) { + return getMetadataMapperParsers().containsKey(field); + } + + /** + * Returns a function that given an index name, returns a predicate that fields must match in order to be returned by get mappings, + * get index, get field mappings and field capabilities API. Useful to filter the fields that such API return. + * The predicate receives the field name as input arguments. In case multiple plugins register a field filter through + * {@link MapperPlugin#getFieldFilter()}, only fields that match all the registered filters will be returned by get mappings, + * get index, get field mappings and field capabilities API. + */ + public Function> getFieldFilter() { + return fieldFilter; + } } diff --git a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java index 71ad21c14d7..a847088869b 100644 --- a/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java +++ b/core/src/main/java/org/elasticsearch/indices/recovery/RecoverySourceHandler.java @@ -149,12 +149,13 @@ public class RecoverySourceHandler { final Translog translog = shard.getTranslog(); final long startingSeqNo; + final long requiredSeqNoRangeStart; final boolean isSequenceNumberBasedRecoveryPossible = request.startingSeqNo() != SequenceNumbers.UNASSIGNED_SEQ_NO && isTargetSameHistory() && isTranslogReadyForSequenceNumberBasedRecovery(); - if (isSequenceNumberBasedRecoveryPossible) { logger.trace("performing sequence numbers based recovery. starting at [{}]", request.startingSeqNo()); startingSeqNo = request.startingSeqNo(); + requiredSeqNoRangeStart = startingSeqNo; } else { final Engine.IndexCommitRef phase1Snapshot; try { @@ -162,10 +163,12 @@ public class RecoverySourceHandler { } catch (final Exception e) { throw new RecoveryEngineException(shard.shardId(), 1, "snapshot failed", e); } - // we set this to unassigned to create a translog roughly according to the retention policy - // on the target - startingSeqNo = SequenceNumbers.UNASSIGNED_SEQ_NO; - + // we set this to 0 to create a translog roughly according to the retention policy + // on the target. Note that it will still filter out legacy operations with no sequence numbers + startingSeqNo = 0; + // but we must have everything above the local checkpoint in the commit + requiredSeqNoRangeStart = + Long.parseLong(phase1Snapshot.getIndexCommit().getUserData().get(SequenceNumbers.LOCAL_CHECKPOINT_KEY)) + 1; try { phase1(phase1Snapshot.getIndexCommit(), translog::totalOperations); } catch (final Exception e) { @@ -178,6 +181,9 @@ public class RecoverySourceHandler { } } } + assert startingSeqNo >= 0 : "startingSeqNo must be non negative. got: " + startingSeqNo; + assert requiredSeqNoRangeStart >= startingSeqNo : "requiredSeqNoRangeStart [" + requiredSeqNoRangeStart + "] is lower than [" + + startingSeqNo + "]"; runUnderPrimaryPermit(() -> shard.initiateTracking(request.targetAllocationId())); @@ -187,10 +193,19 @@ public class RecoverySourceHandler { throw new RecoveryEngineException(shard.shardId(), 1, "prepare target for translog failed", e); } + final long endingSeqNo = shard.seqNoStats().getMaxSeqNo(); + /* + * We need to wait for all operations up to the current max to complete, otherwise we can not guarantee that all + * operations in the required range will be available for replaying from the translog of the source. + */ + cancellableThreads.execute(() -> shard.waitForOpsToComplete(endingSeqNo)); + + logger.trace("all operations up to [{}] completed, which will be used as an ending sequence number", endingSeqNo); + logger.trace("snapshot translog for recovery; current size is [{}]", translog.estimateTotalOperationsFromMinSeq(startingSeqNo)); final long targetLocalCheckpoint; try(Translog.Snapshot snapshot = translog.newSnapshotFromMinSeqNo(startingSeqNo)) { - targetLocalCheckpoint = phase2(startingSeqNo, snapshot); + targetLocalCheckpoint = phase2(startingSeqNo, requiredSeqNoRangeStart, endingSeqNo, snapshot); } catch (Exception e) { throw new RecoveryEngineException(shard.shardId(), 2, "phase2 failed", e); } @@ -224,7 +239,8 @@ public class RecoverySourceHandler { /** * Determines if the source translog is ready for a sequence-number-based peer recovery. The main condition here is that the source - * translog contains all operations between the local checkpoint on the target and the current maximum sequence number on the source. + * translog contains all operations above the local checkpoint on the target. We already know the that translog contains or will contain + * all ops above the source local checkpoint, so we can stop check there. * * @return {@code true} if the source is ready for a sequence-number-based recovery * @throws IOException if an I/O exception occurred reading the translog snapshot @@ -232,18 +248,10 @@ public class RecoverySourceHandler { boolean isTranslogReadyForSequenceNumberBasedRecovery() throws IOException { final long startingSeqNo = request.startingSeqNo(); assert startingSeqNo >= 0; - final long endingSeqNo = shard.seqNoStats().getMaxSeqNo(); - logger.trace("testing sequence numbers in range: [{}, {}]", startingSeqNo, endingSeqNo); + final long localCheckpoint = shard.getLocalCheckpoint(); + logger.trace("testing sequence numbers in range: [{}, {}]", startingSeqNo, localCheckpoint); // the start recovery request is initialized with the starting sequence number set to the target shard's local checkpoint plus one - if (startingSeqNo - 1 <= endingSeqNo) { - /* - * We need to wait for all operations up to the current max to complete, otherwise we can not guarantee that all - * operations in the required range will be available for replaying from the translog of the source. - */ - cancellableThreads.execute(() -> shard.waitForOpsToComplete(endingSeqNo)); - - logger.trace("all operations up to [{}] completed, checking translog content", endingSeqNo); - + if (startingSeqNo - 1 <= localCheckpoint) { final LocalCheckpointTracker tracker = new LocalCheckpointTracker(startingSeqNo, startingSeqNo - 1); try (Translog.Snapshot snapshot = shard.getTranslog().newSnapshotFromMinSeqNo(startingSeqNo)) { Translog.Operation operation; @@ -253,7 +261,7 @@ public class RecoverySourceHandler { } } } - return tracker.getCheckpoint() >= endingSeqNo; + return tracker.getCheckpoint() >= localCheckpoint; } else { return false; } @@ -433,13 +441,15 @@ public class RecoverySourceHandler { * point-in-time view of the translog). It then sends each translog operation to the target node so it can be replayed into the new * shard. * - * @param startingSeqNo the sequence number to start recovery from, or {@link SequenceNumbers#UNASSIGNED_SEQ_NO} if all - * ops should be sent - * @param snapshot a snapshot of the translog - * + * @param startingSeqNo the sequence number to start recovery from, or {@link SequenceNumbers#UNASSIGNED_SEQ_NO} if all + * ops should be sent + * @param requiredSeqNoRangeStart the lower sequence number of the required range (ending with endingSeqNo) + * @param endingSeqNo the highest sequence number that should be sent + * @param snapshot a snapshot of the translog * @return the local checkpoint on the target */ - long phase2(final long startingSeqNo, final Translog.Snapshot snapshot) throws IOException { + long phase2(final long startingSeqNo, long requiredSeqNoRangeStart, long endingSeqNo, final Translog.Snapshot snapshot) + throws IOException { if (shard.state() == IndexShardState.CLOSED) { throw new IndexShardClosedException(request.shardId()); } @@ -447,10 +457,11 @@ public class RecoverySourceHandler { final StopWatch stopWatch = new StopWatch().start(); - logger.trace("recovery [phase2]: sending transaction log operations"); + logger.trace("recovery [phase2]: sending transaction log operations (seq# from [" + startingSeqNo + "], " + + "required [" + requiredSeqNoRangeStart + ":" + endingSeqNo + "]"); // send all the snapshot's translog operations to the target - final SendSnapshotResult result = sendSnapshot(startingSeqNo, snapshot); + final SendSnapshotResult result = sendSnapshot(startingSeqNo, requiredSeqNoRangeStart, endingSeqNo, snapshot); stopWatch.stop(); logger.trace("recovery [phase2]: took [{}]", stopWatch.totalTime()); @@ -511,18 +522,26 @@ public class RecoverySourceHandler { *

* Operations are bulked into a single request depending on an operation count limit or size-in-bytes limit. * - * @param startingSeqNo the sequence number for which only operations with a sequence number greater than this will be sent - * @param snapshot the translog snapshot to replay operations from - * @return the local checkpoint on the target and the total number of operations sent + * @param startingSeqNo the sequence number for which only operations with a sequence number greater than this will be sent + * @param requiredSeqNoRangeStart the lower sequence number of the required range + * @param endingSeqNo the upper bound of the sequence number range to be sent (inclusive) + * @param snapshot the translog snapshot to replay operations from @return the local checkpoint on the target and the + * total number of operations sent * @throws IOException if an I/O exception occurred reading the translog snapshot */ - protected SendSnapshotResult sendSnapshot(final long startingSeqNo, final Translog.Snapshot snapshot) throws IOException { + protected SendSnapshotResult sendSnapshot(final long startingSeqNo, long requiredSeqNoRangeStart, long endingSeqNo, + final Translog.Snapshot snapshot) throws IOException { + assert requiredSeqNoRangeStart <= endingSeqNo + 1: + "requiredSeqNoRangeStart " + requiredSeqNoRangeStart + " is larger than endingSeqNo " + endingSeqNo; + assert startingSeqNo <= requiredSeqNoRangeStart : + "startingSeqNo " + startingSeqNo + " is larger than requiredSeqNoRangeStart " + requiredSeqNoRangeStart; int ops = 0; long size = 0; int skippedOps = 0; int totalSentOps = 0; final AtomicLong targetLocalCheckpoint = new AtomicLong(SequenceNumbers.UNASSIGNED_SEQ_NO); final List operations = new ArrayList<>(); + final LocalCheckpointTracker requiredOpsTracker = new LocalCheckpointTracker(endingSeqNo, requiredSeqNoRangeStart - 1); final int expectedTotalOps = snapshot.totalOperations(); if (expectedTotalOps == 0) { @@ -539,12 +558,9 @@ public class RecoverySourceHandler { throw new IndexShardClosedException(request.shardId()); } cancellableThreads.checkForCancel(); - /* - * If we are doing a sequence-number-based recovery, we have to skip older ops for which no sequence number was assigned, and - * any ops before the starting sequence number. - */ + final long seqNo = operation.seqNo(); - if (startingSeqNo >= 0 && (seqNo == SequenceNumbers.UNASSIGNED_SEQ_NO || seqNo < startingSeqNo)) { + if (seqNo < startingSeqNo || seqNo > endingSeqNo) { skippedOps++; continue; } @@ -552,6 +568,7 @@ public class RecoverySourceHandler { ops++; size += operation.estimateSize(); totalSentOps++; + requiredOpsTracker.markSeqNoAsCompleted(seqNo); // check if this request is past bytes threshold, and if so, send it off if (size >= chunkSizeInBytes) { @@ -569,8 +586,14 @@ public class RecoverySourceHandler { } assert expectedTotalOps == snapshot.overriddenOperations() + skippedOps + totalSentOps - : String.format(Locale.ROOT, "expected total [%d], overridden [%d], skipped [%d], total sent [%d]", - expectedTotalOps, snapshot.overriddenOperations(), skippedOps, totalSentOps); + : String.format(Locale.ROOT, "expected total [%d], overridden [%d], skipped [%d], total sent [%d]", + expectedTotalOps, snapshot.overriddenOperations(), skippedOps, totalSentOps); + + if (requiredOpsTracker.getCheckpoint() < endingSeqNo) { + throw new IllegalStateException("translog replay failed to cover required sequence numbers" + + " (required range [" + requiredSeqNoRangeStart + ":" + endingSeqNo + "). first missing op is [" + + (requiredOpsTracker.getCheckpoint() + 1) + "]"); + } logger.trace("sent final batch of [{}][{}] (total: [{}]) translog operations", ops, new ByteSizeValue(size), expectedTotalOps); diff --git a/core/src/main/java/org/elasticsearch/node/Node.java b/core/src/main/java/org/elasticsearch/node/Node.java index fee6d76ca3d..a4b7e5147d5 100644 --- a/core/src/main/java/org/elasticsearch/node/Node.java +++ b/core/src/main/java/org/elasticsearch/node/Node.java @@ -100,7 +100,6 @@ import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.indices.cluster.IndicesClusterStateService; -import org.elasticsearch.indices.mapper.MapperRegistry; import org.elasticsearch.indices.recovery.PeerRecoverySourceService; import org.elasticsearch.indices.recovery.PeerRecoveryTargetService; import org.elasticsearch.indices.recovery.RecoverySettings; @@ -449,6 +448,11 @@ public class Node implements Closeable { transportService, indicesService, pluginsService, circuitBreakerService, scriptModule.getScriptService(), httpServerTransport, ingestService, clusterService, settingsModule.getSettingsFilter(), responseCollectorService, searchTransportService); + + final SearchService searchService = newSearchService(clusterService, indicesService, + threadPool, scriptModule.getScriptService(), bigArrays, searchModule.getFetchPhase(), + responseCollectorService); + modules.add(b -> { b.bind(Node.class).toInstance(this); b.bind(NodeService.class).toInstance(nodeService); @@ -470,12 +474,10 @@ public class Node implements Closeable { b.bind(MetaDataUpgrader.class).toInstance(metaDataUpgrader); b.bind(MetaStateService.class).toInstance(metaStateService); b.bind(IndicesService.class).toInstance(indicesService); - b.bind(SearchService.class).toInstance(newSearchService(clusterService, indicesService, - threadPool, scriptModule.getScriptService(), bigArrays, searchModule.getFetchPhase(), - responseCollectorService)); + b.bind(SearchService.class).toInstance(searchService); b.bind(SearchTransportService.class).toInstance(searchTransportService); - b.bind(SearchPhaseController.class).toInstance(new SearchPhaseController(settings, bigArrays, - scriptModule.getScriptService())); + b.bind(SearchPhaseController.class).toInstance(new SearchPhaseController(settings, + searchService::createReduceContext)); b.bind(Transport.class).toInstance(transport); b.bind(TransportService.class).toInstance(transportService); b.bind(NetworkService.class).toInstance(networkService); diff --git a/core/src/main/java/org/elasticsearch/plugins/MapperPlugin.java b/core/src/main/java/org/elasticsearch/plugins/MapperPlugin.java index 5dfcdc6bda4..5edf994b32e 100644 --- a/core/src/main/java/org/elasticsearch/plugins/MapperPlugin.java +++ b/core/src/main/java/org/elasticsearch/plugins/MapperPlugin.java @@ -19,12 +19,14 @@ package org.elasticsearch.plugins; -import java.util.Collections; -import java.util.Map; - import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MetadataFieldMapper; +import java.util.Collections; +import java.util.Map; +import java.util.function.Function; +import java.util.function.Predicate; + /** * An extension point for {@link Plugin} implementations to add custom mappers */ @@ -32,7 +34,7 @@ public interface MapperPlugin { /** * Returns additional mapper implementations added by this plugin. - * + *

* The key of the returned {@link Map} is the unique name for the mapper which will be used * as the mapping {@code type}, and the value is a {@link Mapper.TypeParser} to parse the * mapper settings into a {@link Mapper}. @@ -43,7 +45,7 @@ public interface MapperPlugin { /** * Returns additional metadata mapper implementations added by this plugin. - * + *

* The key of the returned {@link Map} is the unique name for the metadata mapper, which * is used in the mapping json to configure the metadata mapper, and the value is a * {@link MetadataFieldMapper.TypeParser} to parse the mapper settings into a @@ -52,4 +54,25 @@ public interface MapperPlugin { default Map getMetadataMappers() { return Collections.emptyMap(); } + + /** + * Returns a function that given an index name returns a predicate which fields must match in order to be returned by get mappings, + * get index, get field mappings and field capabilities API. Useful to filter the fields that such API return. The predicate receives + * the field name as input argument and should return true to show the field and false to hide it. + */ + default Function> getFieldFilter() { + return NOOP_FIELD_FILTER; + } + + /** + * The default field predicate applied, which doesn't filter anything. That means that by default get mappings, get index + * get field mappings and field capabilities API will return every field that's present in the mappings. + */ + Predicate NOOP_FIELD_PREDICATE = field -> true; + + /** + * The default field filter applied, which doesn't filter anything. That means that by default get mappings, get index + * get field mappings and field capabilities API will return every field that's present in the mappings. + */ + Function> NOOP_FIELD_FILTER = index -> NOOP_FIELD_PREDICATE; } diff --git a/core/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java b/core/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java index 7c2cc4b33d4..7b2c375a1ee 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java @@ -24,6 +24,7 @@ import org.elasticsearch.action.search.MultiSearchRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.CheckedBiConsumer; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; @@ -93,12 +94,8 @@ public class RestMultiSearchAction extends BaseRestHandler { parseMultiLineRequest(restRequest, multiRequest.indicesOptions(), allowExplicitIndex, (searchRequest, parser) -> { - try { - searchRequest.source(SearchSourceBuilder.fromXContent(parser)); - multiRequest.add(searchRequest); - } catch (IOException e) { - throw new ElasticsearchParseException("Exception when parsing search request", e); - } + searchRequest.source(SearchSourceBuilder.fromXContent(parser)); + multiRequest.add(searchRequest); }); List requests = multiRequest.requests(); preFilterShardSize = Math.max(1, preFilterShardSize / (requests.size()+1)); @@ -113,7 +110,7 @@ public class RestMultiSearchAction extends BaseRestHandler { * Parses a multi-line {@link RestRequest} body, instantiating a {@link SearchRequest} for each line and applying the given consumer. */ public static void parseMultiLineRequest(RestRequest request, IndicesOptions indicesOptions, boolean allowExplicitIndex, - BiConsumer consumer) throws IOException { + CheckedBiConsumer consumer) throws IOException { String[] indices = Strings.splitStringByCommaToArray(request.param("index")); String[] types = Strings.splitStringByCommaToArray(request.param("type")); @@ -123,83 +120,8 @@ public class RestMultiSearchAction extends BaseRestHandler { final Tuple sourceTuple = request.contentOrSourceParam(); final XContent xContent = sourceTuple.v1().xContent(); final BytesReference data = sourceTuple.v2(); - - int from = 0; - int length = data.length(); - byte marker = xContent.streamSeparator(); - while (true) { - int nextMarker = findNextMarker(marker, from, data, length); - if (nextMarker == -1) { - break; - } - // support first line with \n - if (nextMarker == 0) { - from = nextMarker + 1; - continue; - } - - SearchRequest searchRequest = new SearchRequest(); - if (indices != null) { - searchRequest.indices(indices); - } - if (indicesOptions != null) { - searchRequest.indicesOptions(indicesOptions); - } - if (types != null && types.length > 0) { - searchRequest.types(types); - } - if (routing != null) { - searchRequest.routing(routing); - } - if (searchType != null) { - searchRequest.searchType(searchType); - } - - IndicesOptions defaultOptions = IndicesOptions.strictExpandOpenAndForbidClosed(); - - - // now parse the action - if (nextMarker - from > 0) { - try (XContentParser parser = xContent.createParser(request.getXContentRegistry(), data.slice(from, nextMarker - from))) { - Map source = parser.map(); - for (Map.Entry entry : source.entrySet()) { - Object value = entry.getValue(); - if ("index".equals(entry.getKey()) || "indices".equals(entry.getKey())) { - if (!allowExplicitIndex) { - throw new IllegalArgumentException("explicit index in multi search is not allowed"); - } - searchRequest.indices(nodeStringArrayValue(value)); - } else if ("type".equals(entry.getKey()) || "types".equals(entry.getKey())) { - searchRequest.types(nodeStringArrayValue(value)); - } else if ("search_type".equals(entry.getKey()) || "searchType".equals(entry.getKey())) { - searchRequest.searchType(nodeStringValue(value, null)); - } else if ("request_cache".equals(entry.getKey()) || "requestCache".equals(entry.getKey())) { - searchRequest.requestCache(nodeBooleanValue(value, entry.getKey())); - } else if ("preference".equals(entry.getKey())) { - searchRequest.preference(nodeStringValue(value, null)); - } else if ("routing".equals(entry.getKey())) { - searchRequest.routing(nodeStringValue(value, null)); - } - } - defaultOptions = IndicesOptions.fromMap(source, defaultOptions); - } - } - searchRequest.indicesOptions(defaultOptions); - - // move pointers - from = nextMarker + 1; - // now for the body - nextMarker = findNextMarker(marker, from, data, length); - if (nextMarker == -1) { - break; - } - BytesReference bytes = data.slice(from, nextMarker - from); - try (XContentParser parser = xContent.createParser(request.getXContentRegistry(), bytes)) { - consumer.accept(searchRequest, parser); - } - // move pointers - from = nextMarker + 1; - } + MultiSearchRequest.readMultiLineFormat(data, xContent, consumer, indices, indicesOptions, types, routing, + searchType, request.getXContentRegistry(), allowExplicitIndex); } @Override @@ -207,18 +129,6 @@ public class RestMultiSearchAction extends BaseRestHandler { return true; } - private static int findNextMarker(byte marker, int from, BytesReference data, int length) { - for (int i = from; i < length; i++) { - if (data.get(i) == marker) { - return i; - } - } - if (from != length) { - throw new IllegalArgumentException("The msearch request must be terminated by a newline [\n]"); - } - return -1; - } - @Override protected Set responseParams() { return RESPONSE_PARAMS; diff --git a/core/src/main/java/org/elasticsearch/search/SearchService.java b/core/src/main/java/org/elasticsearch/search/SearchService.java index 117a979639b..9ac83276000 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchService.java +++ b/core/src/main/java/org/elasticsearch/search/SearchService.java @@ -60,6 +60,8 @@ import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.SearchScript; import org.elasticsearch.search.aggregations.AggregationInitializationException; import org.elasticsearch.search.aggregations.AggregatorFactories; +import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.MultiBucketConsumerService; import org.elasticsearch.search.aggregations.SearchContextAggregations; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.collapse.CollapseContext; @@ -118,6 +120,7 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv Setting.positiveTimeSetting("search.max_keep_alive", timeValueHours(24), Property.NodeScope, Property.Dynamic); public static final Setting KEEPALIVE_INTERVAL_SETTING = Setting.positiveTimeSetting("search.keep_alive_interval", timeValueMinutes(1), Property.NodeScope); + /** * Enables low-level, frequent search cancellation checks. Enabling low-level checks will make long running searches to react * to the cancellation request faster. However, since it will produce more cancellation checks it might slow the search performance @@ -163,6 +166,8 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv private final ConcurrentMapLong activeContexts = ConcurrentCollections.newConcurrentMapLongWithAggressiveConcurrency(); + private final MultiBucketConsumerService multiBucketConsumerService; + public SearchService(ClusterService clusterService, IndicesService indicesService, ThreadPool threadPool, ScriptService scriptService, BigArrays bigArrays, FetchPhase fetchPhase, ResponseCollectorService responseCollectorService) { @@ -175,6 +180,7 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv this.bigArrays = bigArrays; this.queryPhase = new QueryPhase(settings); this.fetchPhase = fetchPhase; + this.multiBucketConsumerService = new MultiBucketConsumerService(clusterService, settings); TimeValue keepAliveInterval = KEEPALIVE_INTERVAL_SETTING.get(settings); setKeepAlives(DEFAULT_KEEPALIVE_SETTING.get(settings), MAX_KEEPALIVE_SETTING.get(settings)); @@ -741,7 +747,7 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv if (source.aggregations() != null) { try { AggregatorFactories factories = source.aggregations().build(context, null); - context.aggregations(new SearchContextAggregations(factories)); + context.aggregations(new SearchContextAggregations(factories, multiBucketConsumerService.create())); } catch (IOException e) { throw new AggregationInitializationException("Failed to create aggregators", e); } @@ -1017,4 +1023,8 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv public IndicesService getIndicesService() { return indicesService; } + + public InternalAggregation.ReduceContext createReduceContext(boolean finalReduce) { + return new InternalAggregation.ReduceContext(bigArrays, scriptService, multiBucketConsumerService.create(), finalReduce); + } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/AggregationPhase.java b/core/src/main/java/org/elasticsearch/search/aggregations/AggregationPhase.java index 9b4a02cd816..4dc765d0db1 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/AggregationPhase.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/AggregationPhase.java @@ -123,6 +123,7 @@ public class AggregationPhase implements SearchPhase { } List aggregations = new ArrayList<>(aggregators.length); + context.aggregations().resetBucketMultiConsumer(); for (Aggregator aggregator : context.aggregations().aggregators()) { try { aggregator.postCollection(); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/InternalAggregation.java b/core/src/main/java/org/elasticsearch/search/aggregations/InternalAggregation.java index 4c886d3a9be..7f6e74e68b2 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/InternalAggregation.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/InternalAggregation.java @@ -22,6 +22,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.NamedWriteable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.util.BigArray; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.action.search.RestSearchAction; @@ -33,6 +34,7 @@ import java.io.IOException; import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.function.IntConsumer; /** * An internal implementation of {@link Aggregation}. Serves as a base class for all aggregation implementations. @@ -43,11 +45,17 @@ public abstract class InternalAggregation implements Aggregation, NamedWriteable private final BigArrays bigArrays; private final ScriptService scriptService; + private final IntConsumer multiBucketConsumer; private final boolean isFinalReduce; public ReduceContext(BigArrays bigArrays, ScriptService scriptService, boolean isFinalReduce) { + this(bigArrays, scriptService, (s) -> {}, isFinalReduce); + } + + public ReduceContext(BigArrays bigArrays, ScriptService scriptService, IntConsumer multiBucketConsumer, boolean isFinalReduce) { this.bigArrays = bigArrays; this.scriptService = scriptService; + this.multiBucketConsumer = multiBucketConsumer; this.isFinalReduce = isFinalReduce; } @@ -67,6 +75,14 @@ public abstract class InternalAggregation implements Aggregation, NamedWriteable public ScriptService scriptService() { return scriptService; } + + /** + * Adds count buckets to the global count for the request and fails if this number is greater than + * the maximum number of buckets allowed in a response + */ + public void consumeBucketsAndMaybeBreak(int size) { + multiBucketConsumer.accept(size); + } } protected final String name; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/InternalMultiBucketAggregation.java b/core/src/main/java/org/elasticsearch/search/aggregations/InternalMultiBucketAggregation.java index 8e8f4edcf31..9084f415d77 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/InternalMultiBucketAggregation.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/InternalMultiBucketAggregation.java @@ -22,6 +22,7 @@ package org.elasticsearch.search.aggregations; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; +import org.elasticsearch.search.aggregations.bucket.SingleBucketAggregation; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import java.io.IOException; @@ -82,6 +83,39 @@ public abstract class InternalMultiBucketAggregation path) { diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/MultiBucketConsumerService.java b/core/src/main/java/org/elasticsearch/search/aggregations/MultiBucketConsumerService.java new file mode 100644 index 00000000000..63ba70c6f23 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/aggregations/MultiBucketConsumerService.java @@ -0,0 +1,126 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.search.aggregations; + +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.search.aggregations.bucket.BucketsAggregator; + +import java.io.IOException; +import java.util.function.IntConsumer; + +/** + * An aggregation service that creates instances of {@link MultiBucketConsumer}. + * The consumer is used by {@link BucketsAggregator} and {@link InternalMultiBucketAggregation} to limit the number of buckets created + * in {@link Aggregator#buildAggregation} and {@link InternalAggregation#reduce}. + * The limit can be set by changing the `search.max_buckets` cluster setting and defaults to 10000. + */ +public class MultiBucketConsumerService { + public static final int DEFAULT_MAX_BUCKETS = 10000; + public static final Setting MAX_BUCKET_SETTING = + Setting.intSetting("search.max_buckets", DEFAULT_MAX_BUCKETS, 0, Setting.Property.NodeScope, Setting.Property.Dynamic); + + private volatile int maxBucket; + + public MultiBucketConsumerService(ClusterService clusterService, Settings settings) { + this.maxBucket = MAX_BUCKET_SETTING.get(settings); + clusterService.getClusterSettings().addSettingsUpdateConsumer(MAX_BUCKET_SETTING, this::setMaxBucket); + } + + private void setMaxBucket(int maxBucket) { + this.maxBucket = maxBucket; + } + + public static class TooManyBucketsException extends AggregationExecutionException { + private final int maxBuckets; + + public TooManyBucketsException(String message, int maxBuckets) { + super(message); + this.maxBuckets = maxBuckets; + } + + public TooManyBucketsException(StreamInput in) throws IOException { + super(in); + maxBuckets = in.readInt(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeInt(maxBuckets); + } + + public int getMaxBuckets() { + return maxBuckets; + } + + @Override + public RestStatus status() { + return RestStatus.SERVICE_UNAVAILABLE; + } + + @Override + protected void metadataToXContent(XContentBuilder builder, Params params) throws IOException { + builder.field("max_buckets", maxBuckets); + } + } + + /** + * An {@link IntConsumer} that throws a {@link TooManyBucketsException} + * when the sum of the provided values is above the limit (`search.max_buckets`). + * It is used by aggregators to limit the number of bucket creation during + * {@link Aggregator#buildAggregation} and {@link InternalAggregation#reduce}. + */ + public static class MultiBucketConsumer implements IntConsumer { + private final int limit; + // aggregations execute in a single thread so no atomic here + private int count; + + public MultiBucketConsumer(int limit) { + this.limit = limit; + } + + @Override + public void accept(int value) { + count += value; + if (count > limit) { + throw new TooManyBucketsException("Trying to create too many buckets. Must be less than or equal to: [" + limit + + "] but was [" + count + "]. This limit can be set by changing the [" + + MAX_BUCKET_SETTING.getKey() + "] cluster level setting.", limit); + } + } + + public void reset() { + this.count = 0; + } + + public int getCount() { + return count; + } + } + + public MultiBucketConsumer create() { + return new MultiBucketConsumer(maxBucket); + } +} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/SearchContextAggregations.java b/core/src/main/java/org/elasticsearch/search/aggregations/SearchContextAggregations.java index 9476af03846..ab0a73d53ed 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/SearchContextAggregations.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/SearchContextAggregations.java @@ -18,19 +18,25 @@ */ package org.elasticsearch.search.aggregations; +import java.util.function.IntConsumer; + +import static org.elasticsearch.search.aggregations.MultiBucketConsumerService.MultiBucketConsumer; + /** * The aggregation context that is part of the search context. */ public class SearchContextAggregations { private final AggregatorFactories factories; + private final MultiBucketConsumer multiBucketConsumer; private Aggregator[] aggregators; /** * Creates a new aggregation context with the parsed aggregator factories */ - public SearchContextAggregations(AggregatorFactories factories) { + public SearchContextAggregations(AggregatorFactories factories, MultiBucketConsumer multiBucketConsumer) { this.factories = factories; + this.multiBucketConsumer = multiBucketConsumer; } public AggregatorFactories factories() { @@ -50,4 +56,15 @@ public class SearchContextAggregations { this.aggregators = aggregators; } + /** + * Returns a consumer for multi bucket aggregation that checks the total number of buckets + * created in the response + */ + public IntConsumer multiBucketConsumer() { + return multiBucketConsumer; + } + + void resetBucketMultiConsumer() { + multiBucketConsumer.reset(); + } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java index 546cb07af26..02cf3adf88a 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java @@ -34,10 +34,12 @@ import java.io.IOException; import java.util.Arrays; import java.util.List; import java.util.Map; +import java.util.function.IntConsumer; public abstract class BucketsAggregator extends AggregatorBase { private final BigArrays bigArrays; + private final IntConsumer multiBucketConsumer; private IntArray docCounts; public BucketsAggregator(String name, AggregatorFactories factories, SearchContext context, Aggregator parent, @@ -45,6 +47,11 @@ public abstract class BucketsAggregator extends AggregatorBase { super(name, factories, context, parent, pipelineAggregators, metaData); bigArrays = context.bigArrays(); docCounts = bigArrays.newIntArray(1, true); + if (context.aggregations() != null) { + multiBucketConsumer = context.aggregations().multiBucketConsumer(); + } else { + multiBucketConsumer = (count) -> {}; + } } /** @@ -104,6 +111,14 @@ public abstract class BucketsAggregator extends AggregatorBase { } } + /** + * Adds count buckets to the global count for the request and fails if this number is greater than + * the maximum number of buckets allowed in a response + */ + protected final void consumeBucketsAndMaybeBreak(int count) { + multiBucketConsumer.accept(count); + } + /** * Required method to build the child aggregations of the given bucket (identified by the bucket ordinal). */ diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/adjacency/AdjacencyMatrixAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/adjacency/AdjacencyMatrixAggregator.java index dd1e89897af..71314d1f539 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/adjacency/AdjacencyMatrixAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/adjacency/AdjacencyMatrixAggregator.java @@ -210,6 +210,7 @@ public class AdjacencyMatrixAggregator extends BucketsAggregator { InternalAdjacencyMatrix.InternalBucket bucket = new InternalAdjacencyMatrix.InternalBucket(keys[i], docCount, bucketAggregations(bucketOrd)); buckets.add(bucket); + consumeBucketsAndMaybeBreak(1); } } int pos = keys.length; @@ -223,6 +224,7 @@ public class AdjacencyMatrixAggregator extends BucketsAggregator { InternalAdjacencyMatrix.InternalBucket bucket = new InternalAdjacencyMatrix.InternalBucket(intersectKey, docCount, bucketAggregations(bucketOrd)); buckets.add(bucket); + consumeBucketsAndMaybeBreak(1); } pos++; } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/adjacency/InternalAdjacencyMatrix.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/adjacency/InternalAdjacencyMatrix.java index 602a0964ee9..8ce6304daf8 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/adjacency/InternalAdjacencyMatrix.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/adjacency/InternalAdjacencyMatrix.java @@ -214,7 +214,10 @@ public class InternalAdjacencyMatrix for (List sameRangeList : bucketsMap.values()) { InternalBucket reducedBucket = sameRangeList.get(0).reduce(sameRangeList, reduceContext); if(reducedBucket.docCount >= 1){ + reduceContext.consumeBucketsAndMaybeBreak(1); reducedBuckets.add(reducedBucket); + } else { + reduceContext.consumeBucketsAndMaybeBreak(-countInnerBucket(reducedBucket)); } } Collections.sort(reducedBuckets, Comparator.comparing(InternalBucket::getKey)); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java index c18c414abb6..9612ba2f895 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregator.java @@ -83,6 +83,7 @@ final class CompositeAggregator extends BucketsAggregator { @Override public InternalAggregation buildAggregation(long zeroBucket) throws IOException { assert zeroBucket == 0L; + consumeBucketsAndMaybeBreak(keys.size()); // Replay all documents that contain at least one top bucket (collected during the first pass). grow(keys.size()+1); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java index bfeabcb9833..824250948d7 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/composite/InternalComposite.java @@ -132,6 +132,7 @@ public class InternalComposite if (lastBucket != null && bucketIt.current.compareKey(lastBucket) != 0) { InternalBucket reduceBucket = buckets.get(0).reduce(buckets, reduceContext); buckets.clear(); + reduceContext.consumeBucketsAndMaybeBreak(1); result.add(reduceBucket); if (result.size() >= size) { break; @@ -145,6 +146,7 @@ public class InternalComposite } if (buckets.size() > 0) { InternalBucket reduceBucket = buckets.get(0).reduce(buckets, reduceContext); + reduceContext.consumeBucketsAndMaybeBreak(1); result.add(reduceBucket); } return new InternalComposite(name, size, sourceNames, result, reverseMuls, pipelineAggregators(), metaData); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregator.java index 97724aa8b97..80d5164a96c 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/FiltersAggregator.java @@ -166,6 +166,7 @@ public class FiltersAggregator extends BucketsAggregator { @Override public InternalAggregation buildAggregation(long owningBucketOrdinal) throws IOException { + consumeBucketsAndMaybeBreak(keys.length + (showOtherBucket ? 1 : 0)); List buckets = new ArrayList<>(keys.length); for (int i = 0; i < keys.length; i++) { long bucketOrd = bucketOrd(owningBucketOrdinal, i); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFilters.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFilters.java index b43ab7714e4..e522392cf4b 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFilters.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/filter/InternalFilters.java @@ -223,7 +223,8 @@ public class InternalFilters extends InternalMultiBucketAggregation(bucketsList.size()), keyed, pipelineAggregators(), + reduceContext.consumeBucketsAndMaybeBreak(bucketsList.size()); + InternalFilters reduced = new InternalFilters(name, new ArrayList<>(bucketsList.size()), keyed, pipelineAggregators(), getMetaData()); for (List sameRangeList : bucketsList) { reduced.buckets.add((sameRangeList.get(0)).reduce(sameRangeList, reduceContext)); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregator.java index ebcb7b39dba..ec54abb3340 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoHashGridAggregator.java @@ -106,6 +106,7 @@ public class GeoHashGridAggregator extends BucketsAggregator { public InternalGeoHashGrid buildAggregation(long owningBucketOrdinal) throws IOException { assert owningBucketOrdinal == 0; final int size = (int) Math.min(bucketOrds.size(), shardSize); + consumeBucketsAndMaybeBreak(size); InternalGeoHashGrid.BucketPriorityQueue ordered = new InternalGeoHashGrid.BucketPriorityQueue(size); OrdinalBucket spare = null; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoHashGrid.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoHashGrid.java index 20bccb68305..bc60f5945eb 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoHashGrid.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/InternalGeoHashGrid.java @@ -211,7 +211,12 @@ public class InternalGeoHashGrid extends InternalMultiBucketAggregation> cursor : buckets) { List sameCellBuckets = cursor.value; - ordered.insertWithOverflow(sameCellBuckets.get(0).reduce(sameCellBuckets, reduceContext)); + Bucket removed = ordered.insertWithOverflow(sameCellBuckets.get(0).reduce(sameCellBuckets, reduceContext)); + if (removed != null) { + reduceContext.consumeBucketsAndMaybeBreak(-countInnerBucket(removed)); + } else { + reduceContext.consumeBucketsAndMaybeBreak(1); + } } buckets.close(); Bucket[] list = new Bucket[ordered.size()]; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java index f5f7877572a..8d879b88b3d 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java @@ -127,6 +127,8 @@ class DateHistogramAggregator extends BucketsAggregator { @Override public InternalAggregation buildAggregation(long owningBucketOrdinal) throws IOException { assert owningBucketOrdinal == 0; + consumeBucketsAndMaybeBreak((int) bucketOrds.size()); + List buckets = new ArrayList<>((int) bucketOrds.size()); for (long i = 0; i < bucketOrds.size(); i++) { buckets.add(new InternalDateHistogram.Bucket(bucketOrds.get(i), bucketDocCount(i), keyed, formatter, bucketAggregations(i))); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramAggregator.java index 0c2ba554c0b..4938daad65b 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramAggregator.java @@ -131,6 +131,7 @@ class HistogramAggregator extends BucketsAggregator { @Override public InternalAggregation buildAggregation(long bucket) throws IOException { assert bucket == 0; + consumeBucketsAndMaybeBreak((int) bucketOrds.size()); List buckets = new ArrayList<>((int) bucketOrds.size()); for (long i = 0; i < bucketOrds.size(); i++) { double roundKey = Double.longBitsToDouble(bucketOrds.get(i)); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java index 1981b313b9c..dfa12db0cd3 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java @@ -344,7 +344,10 @@ public final class InternalDateHistogram extends InternalMultiBucketAggregation< // the key changes, reduce what we already buffered and reset the buffer for current buckets final Bucket reduced = currentBuckets.get(0).reduce(currentBuckets, reduceContext); if (reduced.getDocCount() >= minDocCount || reduceContext.isFinalReduce() == false) { + reduceContext.consumeBucketsAndMaybeBreak(1); reducedBuckets.add(reduced); + } else { + reduceContext.consumeBucketsAndMaybeBreak(-countInnerBucket(reduced)); } currentBuckets.clear(); key = top.current.key; @@ -365,7 +368,10 @@ public final class InternalDateHistogram extends InternalMultiBucketAggregation< if (currentBuckets.isEmpty() == false) { final Bucket reduced = currentBuckets.get(0).reduce(currentBuckets, reduceContext); if (reduced.getDocCount() >= minDocCount || reduceContext.isFinalReduce() == false) { + reduceContext.consumeBucketsAndMaybeBreak(1); reducedBuckets.add(reduced); + } else { + reduceContext.consumeBucketsAndMaybeBreak(-countInnerBucket(reduced)); } } } @@ -388,6 +394,7 @@ public final class InternalDateHistogram extends InternalMultiBucketAggregation< long key = bounds.getMin() + offset; long max = bounds.getMax() + offset; while (key <= max) { + reduceContext.consumeBucketsAndMaybeBreak(1); iter.add(new InternalDateHistogram.Bucket(key, 0, keyed, format, reducedEmptySubAggs)); key = nextKey(key).longValue(); } @@ -397,6 +404,7 @@ public final class InternalDateHistogram extends InternalMultiBucketAggregation< long key = bounds.getMin() + offset; if (key < firstBucket.key) { while (key < firstBucket.key) { + reduceContext.consumeBucketsAndMaybeBreak(1); iter.add(new InternalDateHistogram.Bucket(key, 0, keyed, format, reducedEmptySubAggs)); key = nextKey(key).longValue(); } @@ -412,6 +420,7 @@ public final class InternalDateHistogram extends InternalMultiBucketAggregation< if (lastBucket != null) { long key = nextKey(lastBucket.key).longValue(); while (key < nextBucket.key) { + reduceContext.consumeBucketsAndMaybeBreak(1); iter.add(new InternalDateHistogram.Bucket(key, 0, keyed, format, reducedEmptySubAggs)); key = nextKey(key).longValue(); } @@ -425,6 +434,7 @@ public final class InternalDateHistogram extends InternalMultiBucketAggregation< long key = nextKey(lastBucket.key).longValue(); long max = bounds.getMax() + offset; while (key <= max) { + reduceContext.consumeBucketsAndMaybeBreak(1); iter.add(new InternalDateHistogram.Bucket(key, 0, keyed, format, reducedEmptySubAggs)); key = nextKey(key).longValue(); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java index aa94bb76259..b3516b04dfc 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalHistogram.java @@ -326,7 +326,10 @@ public final class InternalHistogram extends InternalMultiBucketAggregation= minDocCount || reduceContext.isFinalReduce() == false) { + reduceContext.consumeBucketsAndMaybeBreak(1); reducedBuckets.add(reduced); + } else { + reduceContext.consumeBucketsAndMaybeBreak(-countInnerBucket(reduced)); } currentBuckets.clear(); key = top.current.key; @@ -347,7 +350,10 @@ public final class InternalHistogram extends InternalMultiBucketAggregation= minDocCount || reduceContext.isFinalReduce() == false) { + reduceContext.consumeBucketsAndMaybeBreak(1); reducedBuckets.add(reduced); + } else { + reduceContext.consumeBucketsAndMaybeBreak(-countInnerBucket(reduced)); } } } @@ -374,6 +380,7 @@ public final class InternalHistogram extends InternalMultiBucketAggregation buckets = new ArrayList<>(ranges.length); for (int i = 0; i < ranges.length; ++i) { long bucketOrd = bucket * ranges.length + i; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRange.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRange.java index 3336da08597..e7a3c35231c 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRange.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalBinaryRange.java @@ -241,6 +241,7 @@ public final class InternalBinaryRange @Override public InternalAggregation doReduce(List aggregations, ReduceContext reduceContext) { + reduceContext.consumeBucketsAndMaybeBreak(buckets.size()); long[] docCounts = new long[buckets.size()]; InternalAggregations[][] aggs = new InternalAggregations[buckets.size()][]; for (int i = 0; i < aggs.length; ++i) { diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalRange.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalRange.java index f5bb0e25c66..9485d534ab9 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalRange.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/InternalRange.java @@ -302,6 +302,7 @@ public class InternalRange aggregations, ReduceContext reduceContext) { + reduceContext.consumeBucketsAndMaybeBreak(ranges.size()); List[] rangeList = new List[ranges.size()]; for (int i = 0; i < rangeList.length; ++i) { rangeList[i] = new ArrayList<>(); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java index e502de6210b..2416bf99a11 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java @@ -323,6 +323,7 @@ public class RangeAggregator extends BucketsAggregator { @Override public InternalAggregation buildAggregation(long owningBucketOrdinal) throws IOException { + consumeBucketsAndMaybeBreak(ranges.length); List buckets = new ArrayList<>(ranges.length); for (int i = 0; i < ranges.length; i++) { Range range = ranges[i]; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/GlobalOrdinalsSignificantTermsAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/GlobalOrdinalsSignificantTermsAggregator.java index db620775b61..66b8f8d5b15 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/GlobalOrdinalsSignificantTermsAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/GlobalOrdinalsSignificantTermsAggregator.java @@ -131,6 +131,9 @@ public class GlobalOrdinalsSignificantTermsAggregator extends GlobalOrdinalsStri // global stats spare.updateScore(significanceHeuristic); spare = ordered.insertWithOverflow(spare); + if (spare == null) { + consumeBucketsAndMaybeBreak(1); + } } final SignificantStringTerms.Bucket[] list = new SignificantStringTerms.Bucket[ordered.size()]; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/InternalSignificantTerms.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/InternalSignificantTerms.java index fe072eb009a..42a3c6a849b 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/InternalSignificantTerms.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/InternalSignificantTerms.java @@ -241,7 +241,14 @@ public abstract class InternalSignificantTerms 0) && (b.subsetDf >= minDocCount)) || reduceContext.isFinalReduce() == false) { - ordered.insertWithOverflow(b); + B removed = ordered.insertWithOverflow(b); + if (removed == null) { + reduceContext.consumeBucketsAndMaybeBreak(1); + } else { + reduceContext.consumeBucketsAndMaybeBreak(-countInnerBucket(removed)); + } + } else { + reduceContext.consumeBucketsAndMaybeBreak(-countInnerBucket(b)); } } B[] list = createBucketsArray(ordered.size()); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantLongTermsAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantLongTermsAggregator.java index 93396af9961..235b3f41c08 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantLongTermsAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantLongTermsAggregator.java @@ -101,6 +101,9 @@ public class SignificantLongTermsAggregator extends LongTermsAggregator { spare.bucketOrd = i; spare = ordered.insertWithOverflow(spare); + if (spare == null) { + consumeBucketsAndMaybeBreak(1); + } } final SignificantLongTerms.Bucket[] list = new SignificantLongTerms.Bucket[ordered.size()]; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantStringTermsAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantStringTermsAggregator.java index c090d80b039..56258758907 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantStringTermsAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantStringTermsAggregator.java @@ -107,6 +107,9 @@ public class SignificantStringTermsAggregator extends StringTermsAggregator { spare.bucketOrd = i; spare = ordered.insertWithOverflow(spare); + if (spare == null) { + consumeBucketsAndMaybeBreak(1); + } } final SignificantStringTerms.Bucket[] list = new SignificantStringTerms.Bucket[ordered.size()]; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTextAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTextAggregator.java index 1060bf41488..4dae78aa140 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTextAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/SignificantTextAggregator.java @@ -59,7 +59,7 @@ import java.util.stream.Collectors; import static java.util.Collections.emptyList; public class SignificantTextAggregator extends BucketsAggregator { - + private final StringFilter includeExclude; protected final BucketCountThresholds bucketCountThresholds; protected long numCollectedDocs; @@ -90,20 +90,20 @@ public class SignificantTextAggregator extends BucketsAggregator { this.sourceFieldNames = sourceFieldNames; bucketOrds = new BytesRefHash(1, context.bigArrays()); if(filterDuplicateText){ - dupSequenceSpotter = new DuplicateByteSequenceSpotter(); + dupSequenceSpotter = new DuplicateByteSequenceSpotter(); lastTrieSize = dupSequenceSpotter.getEstimatedSizeInBytes(); } } - - + + @Override public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException { final BytesRefBuilder previous = new BytesRefBuilder(); return new LeafBucketCollectorBase(sub, null) { - + @Override public void collect(int doc, long bucket) throws IOException { collectFromSource(doc, bucket, fieldName, sourceFieldNames); @@ -112,8 +112,8 @@ public class SignificantTextAggregator extends BucketsAggregator { dupSequenceSpotter.startNewSequence(); } } - - private void processTokenStream(int doc, long bucket, TokenStream ts, BytesRefHash inDocTerms, String fieldText) + + private void processTokenStream(int doc, long bucket, TokenStream ts, BytesRefHash inDocTerms, String fieldText) throws IOException{ if (dupSequenceSpotter != null) { ts = new DeDuplicatingTokenFilter(ts, dupSequenceSpotter); @@ -151,35 +151,35 @@ public class SignificantTextAggregator extends BucketsAggregator { ts.close(); } } - + private void collectFromSource(int doc, long bucket, String indexedFieldName, String[] sourceFieldNames) throws IOException { MappedFieldType fieldType = context.getQueryShardContext().fieldMapper(indexedFieldName); if(fieldType == null){ throw new IllegalArgumentException("Aggregation [" + name + "] cannot process field ["+indexedFieldName - +"] since it is not present"); + +"] since it is not present"); } SourceLookup sourceLookup = context.lookup().source(); sourceLookup.setSegmentAndDocument(ctx, doc); BytesRefHash inDocTerms = new BytesRefHash(256, context.bigArrays()); - - try { + + try { for (String sourceField : sourceFieldNames) { - List textsToHighlight = sourceLookup.extractRawValues(sourceField); + List textsToHighlight = sourceLookup.extractRawValues(sourceField); textsToHighlight = textsToHighlight.stream().map(obj -> { if (obj instanceof BytesRef) { return fieldType.valueForDisplay(obj).toString(); } else { return obj; } - }).collect(Collectors.toList()); - - Analyzer analyzer = fieldType.indexAnalyzer(); + }).collect(Collectors.toList()); + + Analyzer analyzer = fieldType.indexAnalyzer(); for (Object fieldValue : textsToHighlight) { String fieldText = fieldValue.toString(); TokenStream ts = analyzer.tokenStream(indexedFieldName, fieldText); - processTokenStream(doc, bucket, ts, inDocTerms, fieldText); - } + processTokenStream(doc, bucket, ts, inDocTerms, fieldText); + } } } finally{ Releasables.close(inDocTerms); @@ -220,7 +220,10 @@ public class SignificantTextAggregator extends BucketsAggregator { spare.updateScore(significanceHeuristic); spare.bucketOrd = i; - spare = ordered.insertWithOverflow(spare); + spare = ordered.insertWithOverflow(spare); + if (spare == null) { + consumeBucketsAndMaybeBreak(1); + } } final SignificantStringTerms.Bucket[] list = new SignificantStringTerms.Bucket[ordered.size()]; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java index 55023eb263f..6ad14b8d0f9 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java @@ -204,6 +204,7 @@ public class GlobalOrdinalsStringTermsAggregator extends AbstractStringTermsAggr if (bucketCountThresholds.getShardMinDocCount() <= spare.docCount) { spare = ordered.insertWithOverflow(spare); if (spare == null) { + consumeBucketsAndMaybeBreak(1); spare = new OrdBucket(-1, 0, null, showTermDocCountError, 0); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java index 529191ac311..72a641ea5bb 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/InternalTerms.java @@ -293,7 +293,12 @@ public abstract class InternalTerms, B extends Int B removed = ordered.insertWithOverflow(b); if (removed != null) { otherDocCount += removed.getDocCount(); + reduceContext.consumeBucketsAndMaybeBreak(-countInnerBucket(removed)); + } else { + reduceContext.consumeBucketsAndMaybeBreak(1); } + } else { + reduceContext.consumeBucketsAndMaybeBreak(-countInnerBucket(b)); } } B[] list = createBucketsArray(ordered.size()); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongTermsAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongTermsAggregator.java index 4a3190571d7..7cd2c4e9b3a 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongTermsAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongTermsAggregator.java @@ -125,7 +125,6 @@ public class LongTermsAggregator extends TermsAggregator { } final int size = (int) Math.min(bucketOrds.size(), bucketCountThresholds.getShardSize()); - long otherDocCount = 0; BucketPriorityQueue ordered = new BucketPriorityQueue<>(size, order.comparator(this)); LongTerms.Bucket spare = null; @@ -138,7 +137,10 @@ public class LongTermsAggregator extends TermsAggregator { otherDocCount += spare.docCount; spare.bucketOrd = i; if (bucketCountThresholds.getShardMinDocCount() <= spare.docCount) { - spare = (LongTerms.Bucket) ordered.insertWithOverflow(spare); + spare = ordered.insertWithOverflow(spare); + if (spare == null) { + consumeBucketsAndMaybeBreak(1); + } } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsAggregator.java index 9ac2d4aaccf..95bc83ad88f 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsAggregator.java @@ -144,6 +144,9 @@ public class StringTermsAggregator extends AbstractStringTermsAggregator { spare.bucketOrd = i; if (bucketCountThresholds.getShardMinDocCount() <= spare.docCount) { spare = ordered.insertWithOverflow(spare); + if (spare == null) { + consumeBucketsAndMaybeBreak(1); + } } } diff --git a/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighter.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighter.java index c7943367d31..23ae1e9154c 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighter.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighter.java @@ -31,7 +31,6 @@ import org.apache.lucene.search.highlight.SimpleFragmenter; import org.apache.lucene.search.highlight.SimpleHTMLFormatter; import org.apache.lucene.search.highlight.SimpleSpanFragmenter; import org.apache.lucene.search.highlight.TextFragment; -import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefHash; import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.ExceptionsHelper; @@ -48,6 +47,9 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import static org.elasticsearch.search.fetch.subphase.highlight.UnifiedHighlighter.convertFieldValue; +import static org.elasticsearch.search.fetch.subphase.highlight.UnifiedHighlighter.getAnalyzer; + public class PlainHighlighter implements Highlighter { private static final String CACHE_KEY = "highlight-plain"; @@ -100,18 +102,12 @@ public class PlainHighlighter implements Highlighter { int numberOfFragments = field.fieldOptions().numberOfFragments() == 0 ? 1 : field.fieldOptions().numberOfFragments(); ArrayList fragsList = new ArrayList<>(); List textsToHighlight; - Analyzer analyzer = context.mapperService().documentMapper(hitContext.hit().getType()).mappers().indexAnalyzer(); - + Analyzer analyzer = getAnalyzer(context.mapperService().documentMapper(hitContext.hit().getType()), mapper.fieldType()); try { textsToHighlight = HighlightUtils.loadFieldValues(field, mapper, context, hitContext); for (Object textToHighlight : textsToHighlight) { - String text; - if (textToHighlight instanceof BytesRef) { - text = mapper.fieldType().valueForDisplay(textToHighlight).toString(); - } else { - text = textToHighlight.toString(); - } + String text = convertFieldValue(mapper.fieldType(), textToHighlight); try (TokenStream tokenStream = analyzer.tokenStream(mapper.fieldType().name(), text)) { if (!tokenStream.hasAttribute(CharTermAttribute.class) || !tokenStream.hasAttribute(OffsetAttribute.class)) { diff --git a/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/UnifiedHighlighter.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/UnifiedHighlighter.java index 034cac9e5f9..06dd9232a74 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/UnifiedHighlighter.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/UnifiedHighlighter.java @@ -32,8 +32,11 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.common.Strings; import org.elasticsearch.common.text.Text; +import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.FieldMapper; +import org.elasticsearch.index.mapper.KeywordFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.search.fetch.FetchPhaseExecutionException; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.internal.SearchContext; @@ -50,8 +53,6 @@ import java.util.stream.Collectors; import static org.apache.lucene.search.uhighlight.CustomUnifiedHighlighter.MULTIVAL_SEP_CHAR; public class UnifiedHighlighter implements Highlighter { - private static final String CACHE_KEY = "highlight-unified"; - @Override public boolean canHighlight(FieldMapper fieldMapper) { return true; @@ -63,36 +64,20 @@ public class UnifiedHighlighter implements Highlighter { SearchContextHighlight.Field field = highlighterContext.field; SearchContext context = highlighterContext.context; FetchSubPhase.HitContext hitContext = highlighterContext.hitContext; - - if (!hitContext.cache().containsKey(CACHE_KEY)) { - hitContext.cache().put(CACHE_KEY, new HighlighterEntry()); - } - - HighlighterEntry highlighterEntry = (HighlighterEntry) hitContext.cache().get(CACHE_KEY); - MapperHighlighterEntry mapperHighlighterEntry = highlighterEntry.mappers.get(fieldMapper); - - if (mapperHighlighterEntry == null) { - Encoder encoder = field.fieldOptions().encoder().equals("html") ? - HighlightUtils.Encoders.HTML : HighlightUtils.Encoders.DEFAULT; - CustomPassageFormatter passageFormatter = - new CustomPassageFormatter(field.fieldOptions().preTags()[0], - field.fieldOptions().postTags()[0], encoder); - mapperHighlighterEntry = new MapperHighlighterEntry(passageFormatter); - } + Encoder encoder = field.fieldOptions().encoder().equals("html") ? HighlightUtils.Encoders.HTML : HighlightUtils.Encoders.DEFAULT; + CustomPassageFormatter passageFormatter = new CustomPassageFormatter(field.fieldOptions().preTags()[0], + field.fieldOptions().postTags()[0], encoder); List snippets = new ArrayList<>(); int numberOfFragments; try { - Analyzer analyzer = - context.mapperService().documentMapper(hitContext.hit().getType()).mappers().indexAnalyzer(); + + final Analyzer analyzer = + getAnalyzer(context.mapperService().documentMapper(hitContext.hit().getType()), fieldMapper.fieldType()); List fieldValues = HighlightUtils.loadFieldValues(field, fieldMapper, context, hitContext); - fieldValues = fieldValues.stream().map(obj -> { - if (obj instanceof BytesRef) { - return fieldMapper.fieldType().valueForDisplay(obj).toString(); - } else { - return obj; - } - }).collect(Collectors.toList()); + fieldValues = fieldValues.stream() + .map((s) -> convertFieldValue(fieldMapper.fieldType(), s)) + .collect(Collectors.toList()); final IndexSearcher searcher = new IndexSearcher(hitContext.reader()); final CustomUnifiedHighlighter highlighter; final String fieldValue = mergeFieldValues(fieldValues, MULTIVAL_SEP_CHAR); @@ -102,15 +87,14 @@ public class UnifiedHighlighter implements Highlighter { // breaks the text on, so we don't lose the distinction between the different values of a field and we // get back a snippet per value CustomSeparatorBreakIterator breakIterator = new CustomSeparatorBreakIterator(MULTIVAL_SEP_CHAR); - highlighter = new CustomUnifiedHighlighter(searcher, analyzer, offsetSource, - mapperHighlighterEntry.passageFormatter, field.fieldOptions().boundaryScannerLocale(), - breakIterator, fieldValue, field.fieldOptions().noMatchSize()); + highlighter = new CustomUnifiedHighlighter(searcher, analyzer, offsetSource, passageFormatter, + field.fieldOptions().boundaryScannerLocale(), breakIterator, fieldValue, field.fieldOptions().noMatchSize()); numberOfFragments = fieldValues.size(); // we are highlighting the whole content, one snippet per value } else { //using paragraph separator we make sure that each field value holds a discrete passage for highlighting BreakIterator bi = getBreakIterator(field); - highlighter = new CustomUnifiedHighlighter(searcher, analyzer, offsetSource, - mapperHighlighterEntry.passageFormatter, field.fieldOptions().boundaryScannerLocale(), bi, + highlighter = new CustomUnifiedHighlighter(searcher, analyzer, offsetSource, passageFormatter, + field.fieldOptions().boundaryScannerLocale(), bi, fieldValue, field.fieldOptions().noMatchSize()); numberOfFragments = field.fieldOptions().numberOfFragments(); } @@ -210,6 +194,24 @@ public class UnifiedHighlighter implements Highlighter { return filteredSnippets; } + static Analyzer getAnalyzer(DocumentMapper docMapper, MappedFieldType type) { + if (type instanceof KeywordFieldMapper.KeywordFieldType) { + KeywordFieldMapper.KeywordFieldType keywordFieldType = (KeywordFieldMapper.KeywordFieldType) type; + if (keywordFieldType.normalizer() != null) { + return keywordFieldType.normalizer(); + } + } + return docMapper.mappers().indexAnalyzer(); + } + + static String convertFieldValue(MappedFieldType type, Object value) { + if (value instanceof BytesRef) { + return type.valueForDisplay(value).toString(); + } else { + return value.toString(); + } + } + private static String mergeFieldValues(List fieldValues, char valuesSeparator) { //postings highlighter accepts all values in a single string, as offsets etc. need to match with content //loaded from stored fields, we merge all values using a proper separator @@ -226,17 +228,4 @@ public class UnifiedHighlighter implements Highlighter { } return OffsetSource.ANALYSIS; } - - - private static class HighlighterEntry { - Map mappers = new HashMap<>(); - } - - private static class MapperHighlighterEntry { - final CustomPassageFormatter passageFormatter; - - private MapperHighlighterEntry(CustomPassageFormatter passageFormatter) { - this.passageFormatter = passageFormatter; - } - } } diff --git a/core/src/main/java/org/elasticsearch/threadpool/AutoQueueAdjustingExecutorBuilder.java b/core/src/main/java/org/elasticsearch/threadpool/AutoQueueAdjustingExecutorBuilder.java index 265e544d281..ec9d95c722d 100644 --- a/core/src/main/java/org/elasticsearch/threadpool/AutoQueueAdjustingExecutorBuilder.java +++ b/core/src/main/java/org/elasticsearch/threadpool/AutoQueueAdjustingExecutorBuilder.java @@ -19,23 +19,14 @@ package org.elasticsearch.threadpool; -import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.SizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.node.Node; -import org.elasticsearch.threadpool.ExecutorBuilder; -import org.elasticsearch.common.util.concurrent.QueueResizingEsThreadPoolExecutor; -import java.io.IOException; import java.util.Arrays; import java.util.List; import java.util.Locale; @@ -121,8 +112,16 @@ public final class AutoQueueAdjustingExecutorBuilder extends ExecutorBuilder, Integer> reverse = new HashMap<>(); for (Map.Entry> entry : ids.entrySet()) { diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestTests.java index 4acdfd636bf..41691f70c06 100644 --- a/core/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestTests.java @@ -20,13 +20,26 @@ package org.elasticsearch.action.admin.indices.create; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.action.admin.indices.alias.Alias; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.test.ESTestCase; import java.io.IOException; +import java.util.Map; +import java.util.Set; + +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_REPLICAS; +import static org.elasticsearch.cluster.metadata.IndexMetaData.SETTING_NUMBER_OF_SHARDS; +import static org.elasticsearch.common.xcontent.ToXContent.EMPTY_PARAMS; public class CreateIndexRequestTests extends ESTestCase { @@ -46,7 +59,7 @@ public class CreateIndexRequestTests extends ESTestCase { } } } - + public void testTopLevelKeys() throws IOException { String createIndex = "{\n" @@ -65,8 +78,168 @@ public class CreateIndexRequestTests extends ESTestCase { + "}"; CreateIndexRequest request = new CreateIndexRequest(); - ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, + ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> {request.source(createIndex, XContentType.JSON);}); assertEquals("unknown key [FOO_SHOULD_BE_ILLEGAL_HERE] for create index", e.getMessage()); } + + public void testToXContent() throws IOException { + CreateIndexRequest request = new CreateIndexRequest("foo"); + + String mapping = JsonXContent.contentBuilder().startObject().startObject("type").endObject().endObject().string(); + request.mapping("my_type", mapping, XContentType.JSON); + + Alias alias = new Alias("test_alias"); + alias.routing("1"); + alias.filter("{\"term\":{\"year\":2016}}"); + request.alias(alias); + + Settings.Builder settings = Settings.builder(); + settings.put(SETTING_NUMBER_OF_SHARDS, 10); + request.settings(settings); + + String actualRequestBody = Strings.toString(request); + + String expectedRequestBody = "{\"settings\":{\"index\":{\"number_of_shards\":\"10\"}}," + + "\"mappings\":{\"my_type\":{\"type\":{}}}," + + "\"aliases\":{\"test_alias\":{\"filter\":{\"term\":{\"year\":2016}},\"routing\":\"1\"}}}"; + + assertEquals(expectedRequestBody, actualRequestBody); + } + + public void testToAndFromXContent() throws IOException { + + final CreateIndexRequest createIndexRequest = createTestItem(); + + boolean humanReadable = randomBoolean(); + final XContentType xContentType = randomFrom(XContentType.values()); + BytesReference originalBytes = toShuffledXContent(createIndexRequest, xContentType, EMPTY_PARAMS, humanReadable); + + CreateIndexRequest parsedCreateIndexRequest = new CreateIndexRequest(createIndexRequest.index()); + parsedCreateIndexRequest.source(originalBytes, xContentType); + + assertMappingsEqual(createIndexRequest.mappings(), parsedCreateIndexRequest.mappings()); + assertAliasesEqual(createIndexRequest.aliases(), parsedCreateIndexRequest.aliases()); + assertEquals(createIndexRequest.settings(), parsedCreateIndexRequest.settings()); + } + + private void assertMappingsEqual(Map expected, Map actual) throws IOException { + assertEquals(expected.keySet(), actual.keySet()); + + for (Map.Entry expectedEntry : expected.entrySet()) { + String expectedValue = expectedEntry.getValue(); + String actualValue = actual.get(expectedEntry.getKey()); + XContentParser expectedJson = createParser(XContentType.JSON.xContent(), expectedValue); + XContentParser actualJson = createParser(XContentType.JSON.xContent(), actualValue); + assertEquals(expectedJson.mapOrdered(), actualJson.mapOrdered()); + } + } + + private static void assertAliasesEqual(Set expected, Set actual) throws IOException { + assertEquals(expected, actual); + + for (Alias expectedAlias : expected) { + for (Alias actualAlias : actual) { + if (expectedAlias.equals(actualAlias)) { + // As Alias#equals only looks at name, we check the equality of the other Alias parameters here. + assertEquals(expectedAlias.filter(), actualAlias.filter()); + assertEquals(expectedAlias.indexRouting(), actualAlias.indexRouting()); + assertEquals(expectedAlias.searchRouting(), actualAlias.searchRouting()); + } + } + } + } + + /** + * Returns a random {@link CreateIndexRequest}. + */ + private static CreateIndexRequest createTestItem() throws IOException { + String index = randomAlphaOfLength(5); + + CreateIndexRequest request = new CreateIndexRequest(index); + + int aliasesNo = randomIntBetween(0, 2); + for (int i = 0; i < aliasesNo; i++) { + request.alias(randomAlias()); + } + + if (randomBoolean()) { + String type = randomAlphaOfLength(5); + request.mapping(type, randomMapping(type)); + } + + if (randomBoolean()) { + request.settings(randomIndexSettings()); + } + + return request; + } + + private static Settings randomIndexSettings() { + Settings.Builder builder = Settings.builder(); + + if (randomBoolean()) { + int numberOfShards = randomIntBetween(1, 10); + builder.put(SETTING_NUMBER_OF_SHARDS, numberOfShards); + } + + if (randomBoolean()) { + int numberOfReplicas = randomIntBetween(1, 10); + builder.put(SETTING_NUMBER_OF_REPLICAS, numberOfReplicas); + } + + return builder.build(); + } + + private static XContentBuilder randomMapping(String type) throws IOException { + XContentBuilder builder = XContentFactory.jsonBuilder(); + builder.startObject().startObject(type); + + randomMappingFields(builder, true); + + builder.endObject().endObject(); + return builder; + } + + private static void randomMappingFields(XContentBuilder builder, boolean allowObjectField) throws IOException { + builder.startObject("properties"); + + int fieldsNo = randomIntBetween(0, 5); + for (int i = 0; i < fieldsNo; i++) { + builder.startObject(randomAlphaOfLength(5)); + + if (allowObjectField && randomBoolean()) { + randomMappingFields(builder, false); + } else { + builder.field("type", "text"); + } + + builder.endObject(); + } + + builder.endObject(); + } + + private static Alias randomAlias() { + Alias alias = new Alias(randomAlphaOfLength(5)); + + if (randomBoolean()) { + if (randomBoolean()) { + alias.routing(randomAlphaOfLength(5)); + } else { + if (randomBoolean()) { + alias.indexRouting(randomAlphaOfLength(5)); + } + if (randomBoolean()) { + alias.searchRouting(randomAlphaOfLength(5)); + } + } + } + + if (randomBoolean()) { + alias.filter("{\"term\":{\"year\":2016}}"); + } + + return alias; + } } diff --git a/core/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java b/core/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java index 15d24b85b49..c1f729a12ca 100644 --- a/core/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java +++ b/core/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java @@ -30,6 +30,7 @@ import org.elasticsearch.index.Index; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; +import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.dfs.DfsSearchResult; import org.elasticsearch.search.query.QuerySearchRequest; import org.elasticsearch.search.query.QuerySearchResult; @@ -56,7 +57,8 @@ public class DfsQueryPhaseTests extends ESTestCase { results.get(0).termsStatistics(new Term[0], new TermStatistics[0]); results.get(1).termsStatistics(new Term[0], new TermStatistics[0]); - SearchPhaseController controller = new SearchPhaseController(Settings.EMPTY, BigArrays.NON_RECYCLING_INSTANCE, null); + SearchPhaseController controller = new SearchPhaseController(Settings.EMPTY, + (b) -> new InternalAggregation.ReduceContext(BigArrays.NON_RECYCLING_INSTANCE, null, b)); SearchTransportService searchTransportService = new SearchTransportService( Settings.builder().put("search.remote.connect", false).build(), null, null) { @@ -113,7 +115,8 @@ public class DfsQueryPhaseTests extends ESTestCase { results.get(0).termsStatistics(new Term[0], new TermStatistics[0]); results.get(1).termsStatistics(new Term[0], new TermStatistics[0]); - SearchPhaseController controller = new SearchPhaseController(Settings.EMPTY, BigArrays.NON_RECYCLING_INSTANCE, null); + SearchPhaseController controller = new SearchPhaseController(Settings.EMPTY, + (b) -> new InternalAggregation.ReduceContext(BigArrays.NON_RECYCLING_INSTANCE, null, b)); SearchTransportService searchTransportService = new SearchTransportService( Settings.builder().put("search.remote.connect", false).build(), null, null) { @@ -169,7 +172,8 @@ public class DfsQueryPhaseTests extends ESTestCase { results.get(0).termsStatistics(new Term[0], new TermStatistics[0]); results.get(1).termsStatistics(new Term[0], new TermStatistics[0]); - SearchPhaseController controller = new SearchPhaseController(Settings.EMPTY, BigArrays.NON_RECYCLING_INSTANCE, null); + SearchPhaseController controller = new SearchPhaseController(Settings.EMPTY, + (b) -> new InternalAggregation.ReduceContext(BigArrays.NON_RECYCLING_INSTANCE, null, b)); SearchTransportService searchTransportService = new SearchTransportService( Settings.builder().put("search.remote.connect", false).build(), null, null) { diff --git a/core/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java b/core/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java index bd38a420f07..7f4fbc91157 100644 --- a/core/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java +++ b/core/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java @@ -29,6 +29,7 @@ import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; +import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.fetch.FetchSearchResult; import org.elasticsearch.search.fetch.QueryFetchSearchResult; import org.elasticsearch.search.fetch.ShardFetchSearchRequest; @@ -44,7 +45,8 @@ import java.util.concurrent.atomic.AtomicReference; public class FetchSearchPhaseTests extends ESTestCase { public void testShortcutQueryAndFetchOptimization() throws IOException { - SearchPhaseController controller = new SearchPhaseController(Settings.EMPTY, BigArrays.NON_RECYCLING_INSTANCE, null); + SearchPhaseController controller = new SearchPhaseController(Settings.EMPTY, + (b) -> new InternalAggregation.ReduceContext(BigArrays.NON_RECYCLING_INSTANCE, null, b)); MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(1); InitialSearchPhase.ArraySearchPhaseResults results = controller.newSearchPhaseResults(mockSearchPhaseContext.getRequest(), 1); @@ -85,7 +87,8 @@ public class FetchSearchPhaseTests extends ESTestCase { public void testFetchTwoDocument() throws IOException { MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(2); - SearchPhaseController controller = new SearchPhaseController(Settings.EMPTY, BigArrays.NON_RECYCLING_INSTANCE, null); + SearchPhaseController controller = new SearchPhaseController(Settings.EMPTY, + (b) -> new InternalAggregation.ReduceContext(BigArrays.NON_RECYCLING_INSTANCE, null, b)); InitialSearchPhase.ArraySearchPhaseResults results = controller.newSearchPhaseResults(mockSearchPhaseContext.getRequest(), 2); AtomicReference responseRef = new AtomicReference<>(); @@ -139,7 +142,8 @@ public class FetchSearchPhaseTests extends ESTestCase { public void testFailFetchOneDoc() throws IOException { MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(2); - SearchPhaseController controller = new SearchPhaseController(Settings.EMPTY, BigArrays.NON_RECYCLING_INSTANCE, null); + SearchPhaseController controller = new SearchPhaseController(Settings.EMPTY, + (b) -> new InternalAggregation.ReduceContext(BigArrays.NON_RECYCLING_INSTANCE, null, b)); InitialSearchPhase.ArraySearchPhaseResults results = controller.newSearchPhaseResults(mockSearchPhaseContext.getRequest(), 2); AtomicReference responseRef = new AtomicReference<>(); @@ -197,7 +201,8 @@ public class FetchSearchPhaseTests extends ESTestCase { int resultSetSize = randomIntBetween(0, 100); // we use at least 2 hits otherwise this is subject to single shard optimization and we trip an assert... int numHits = randomIntBetween(2, 100); // also numshards --> 1 hit per shard - SearchPhaseController controller = new SearchPhaseController(Settings.EMPTY, BigArrays.NON_RECYCLING_INSTANCE, null); + SearchPhaseController controller = new SearchPhaseController(Settings.EMPTY, + (b) -> new InternalAggregation.ReduceContext(BigArrays.NON_RECYCLING_INSTANCE, null, b)); MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(numHits); InitialSearchPhase.ArraySearchPhaseResults results = controller.newSearchPhaseResults(mockSearchPhaseContext.getRequest(), numHits); @@ -253,7 +258,8 @@ public class FetchSearchPhaseTests extends ESTestCase { public void testExceptionFailsPhase() throws IOException { MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(2); - SearchPhaseController controller = new SearchPhaseController(Settings.EMPTY, BigArrays.NON_RECYCLING_INSTANCE, null); + SearchPhaseController controller = new SearchPhaseController(Settings.EMPTY, + (b) -> new InternalAggregation.ReduceContext(BigArrays.NON_RECYCLING_INSTANCE, null, b)); InitialSearchPhase.ArraySearchPhaseResults results = controller.newSearchPhaseResults(mockSearchPhaseContext.getRequest(), 2); AtomicReference responseRef = new AtomicReference<>(); @@ -306,7 +312,8 @@ public class FetchSearchPhaseTests extends ESTestCase { public void testCleanupIrrelevantContexts() throws IOException { // contexts that are not fetched should be cleaned up MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(2); - SearchPhaseController controller = new SearchPhaseController(Settings.EMPTY, BigArrays.NON_RECYCLING_INSTANCE, null); + SearchPhaseController controller = new SearchPhaseController(Settings.EMPTY, + (b) -> new InternalAggregation.ReduceContext(BigArrays.NON_RECYCLING_INSTANCE, null, b)); InitialSearchPhase.ArraySearchPhaseResults results = controller.newSearchPhaseResults(mockSearchPhaseContext.getRequest(), 2); AtomicReference responseRef = new AtomicReference<>(); diff --git a/core/src/test/java/org/elasticsearch/action/search/MultiSearchRequestTests.java b/core/src/test/java/org/elasticsearch/action/search/MultiSearchRequestTests.java index e6de1d859d8..faec42b2587 100644 --- a/core/src/test/java/org/elasticsearch/action/search/MultiSearchRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/search/MultiSearchRequestTests.java @@ -19,24 +19,36 @@ package org.elasticsearch.action.search; +import org.apache.lucene.util.BytesRef; import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.common.CheckedBiConsumer; +import org.elasticsearch.common.CheckedRunnable; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.search.RestMultiSearchAction; +import org.elasticsearch.search.Scroll; +import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.StreamsUtils; import org.elasticsearch.test.rest.FakeRestRequest; import java.io.IOException; +import java.io.UncheckedIOException; import java.nio.charset.StandardCharsets; +import java.util.ArrayList; +import java.util.List; +import java.util.function.BiConsumer; import static java.util.Collections.singletonList; +import static org.elasticsearch.search.RandomSearchRequestGenerator.randomSearchRequest; +import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; @@ -202,4 +214,87 @@ public class MultiSearchRequestTests extends ESTestCase { return new NamedXContentRegistry(singletonList(new NamedXContentRegistry.Entry(QueryBuilder.class, new ParseField(MatchAllQueryBuilder.NAME), (p, c) -> MatchAllQueryBuilder.fromXContent(p)))); } + + public void testMultiLineSerialization() throws IOException { + int iters = 16; + for (int i = 0; i < iters; i++) { + // The only formats that support stream separator + XContentType xContentType = randomFrom(XContentType.JSON, XContentType.SMILE); + MultiSearchRequest originalRequest = createMultiSearchRequest(); + + byte[] originalBytes = MultiSearchRequest.writeMultiLineFormat(originalRequest, xContentType.xContent()); + MultiSearchRequest parsedRequest = new MultiSearchRequest(); + CheckedBiConsumer consumer = (r, p) -> { + SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(p); + if (searchSourceBuilder.equals(new SearchSourceBuilder()) == false) { + r.source(searchSourceBuilder); + } + parsedRequest.add(r); + }; + MultiSearchRequest.readMultiLineFormat(new BytesArray(originalBytes), xContentType.xContent(), + consumer, null, null, null, null, null, xContentRegistry(), true); + assertEquals(originalRequest, parsedRequest); + } + } + + public void testEqualsAndHashcode() throws IOException { + checkEqualsAndHashCode(createMultiSearchRequest(), MultiSearchRequestTests::copyRequest, MultiSearchRequestTests::mutate); + } + + private static MultiSearchRequest mutate(MultiSearchRequest searchRequest) throws IOException { + MultiSearchRequest mutation = copyRequest(searchRequest); + List> mutators = new ArrayList<>(); + mutators.add(() -> mutation.indicesOptions(randomValueOtherThan(searchRequest.indicesOptions(), + () -> IndicesOptions.fromOptions(randomBoolean(), randomBoolean(), randomBoolean(), randomBoolean())))); + mutators.add(() -> mutation.maxConcurrentSearchRequests(randomIntBetween(1, 32))); + mutators.add(() -> mutation.add(createSimpleSearchRequest())); + randomFrom(mutators).run(); + return mutation; + } + + private static MultiSearchRequest copyRequest(MultiSearchRequest request) throws IOException { + MultiSearchRequest copy = new MultiSearchRequest(); + if (request.maxConcurrentSearchRequests() > 0) { + copy.maxConcurrentSearchRequests(request.maxConcurrentSearchRequests()); + } + copy.indicesOptions(request.indicesOptions()); + for (SearchRequest searchRequest : request.requests()) { + copy.add(searchRequest); + } + return copy; + } + + private static MultiSearchRequest createMultiSearchRequest() throws IOException { + int numSearchRequest = randomIntBetween(1, 128); + MultiSearchRequest request = new MultiSearchRequest(); + for (int j = 0; j < numSearchRequest; j++) { + SearchRequest searchRequest = createSimpleSearchRequest(); + + // scroll is not supported in the current msearch api, so unset it: + searchRequest.scroll((Scroll) null); + + // only expand_wildcards, ignore_unavailable and allow_no_indices can be specified from msearch api, so unset other options: + IndicesOptions randomlyGenerated = searchRequest.indicesOptions(); + IndicesOptions msearchDefault = IndicesOptions.strictExpandOpenAndForbidClosed(); + searchRequest.indicesOptions(IndicesOptions.fromOptions( + randomlyGenerated.ignoreUnavailable(), randomlyGenerated.allowNoIndices(), randomlyGenerated.expandWildcardsOpen(), + randomlyGenerated.expandWildcardsClosed(), msearchDefault.allowAliasesToMultipleIndices(), + msearchDefault.forbidClosedIndices(), msearchDefault.ignoreAliases() + )); + + request.add(searchRequest); + } + return request; + } + + private static SearchRequest createSimpleSearchRequest() throws IOException { + return randomSearchRequest(() -> { + // No need to return a very complex SearchSourceBuilder here, that is tested elsewhere + SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder(); + searchSourceBuilder.from(randomInt(10)); + searchSourceBuilder.size(randomIntBetween(20, 100)); + return searchSourceBuilder; + }); + } + } diff --git a/core/src/test/java/org/elasticsearch/action/search/MultiSearchResponseTests.java b/core/src/test/java/org/elasticsearch/action/search/MultiSearchResponseTests.java new file mode 100644 index 00000000000..874bea5ff65 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/action/search/MultiSearchResponseTests.java @@ -0,0 +1,86 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.action.search; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.search.internal.InternalSearchResponse; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.nullValue; + +public class MultiSearchResponseTests extends ESTestCase { + + public void testFromXContent() throws IOException { + for (int runs = 0; runs < 20; runs++) { + MultiSearchResponse expected = createTestInstance(); + XContentType xContentType = randomFrom(XContentType.values()); + BytesReference shuffled = toShuffledXContent(expected, xContentType, ToXContent.EMPTY_PARAMS, false); + XContentParser parser = createParser(XContentFactory.xContent(xContentType), shuffled); + MultiSearchResponse actual = MultiSearchResponse.fromXContext(parser); + assertThat(parser.nextToken(), nullValue()); + + assertThat(actual.getTook(), equalTo(expected.getTook())); + assertThat(actual.getResponses().length, equalTo(expected.getResponses().length)); + for (int i = 0; i < expected.getResponses().length; i++) { + MultiSearchResponse.Item expectedItem = expected.getResponses()[i]; + MultiSearchResponse.Item actualItem = actual.getResponses()[i]; + if (expectedItem.isFailure()) { + assertThat(actualItem.getResponse(), nullValue()); + assertThat(actualItem.getFailureMessage(), containsString(expectedItem.getFailureMessage())); + } else { + assertThat(actualItem.getResponse().toString(), equalTo(expectedItem.getResponse().toString())); + assertThat(actualItem.getFailure(), nullValue()); + } + } + } + } + + private static MultiSearchResponse createTestInstance() { + int numItems = randomIntBetween(0, 128); + MultiSearchResponse.Item[] items = new MultiSearchResponse.Item[numItems]; + for (int i = 0; i < numItems; i++) { + if (randomBoolean()) { + // Creating a minimal response is OK, because SearchResponse self + // is tested elsewhere. + long tookInMillis = randomNonNegativeLong(); + int totalShards = randomIntBetween(1, Integer.MAX_VALUE); + int successfulShards = randomIntBetween(0, totalShards); + int skippedShards = totalShards - successfulShards; + InternalSearchResponse internalSearchResponse = InternalSearchResponse.empty(); + SearchResponse.Clusters clusters = new SearchResponse.Clusters(totalShards, successfulShards, skippedShards); + SearchResponse searchResponse = new SearchResponse(internalSearchResponse, null, totalShards, + successfulShards, skippedShards, tookInMillis, ShardSearchFailure.EMPTY_ARRAY, clusters); + items[i] = new MultiSearchResponse.Item(searchResponse, null); + } else { + items[i] = new MultiSearchResponse.Item(null, new ElasticsearchException("an error")); + } + } + return new MultiSearchResponse(items, randomNonNegativeLong()); + } + +} diff --git a/core/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java b/core/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java index 7501a7a90be..393c45fa572 100644 --- a/core/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java +++ b/core/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.aggregations.AggregationBuilders; +import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.metrics.max.InternalMax; import org.elasticsearch.search.builder.SearchSourceBuilder; @@ -66,7 +67,8 @@ public class SearchPhaseControllerTests extends ESTestCase { @Before public void setup() { - searchPhaseController = new SearchPhaseController(Settings.EMPTY, BigArrays.NON_RECYCLING_INSTANCE, null); + searchPhaseController = new SearchPhaseController(Settings.EMPTY, + (b) -> new InternalAggregation.ReduceContext(BigArrays.NON_RECYCLING_INSTANCE, null, b)); } public void testSort() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsIT.java b/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsIT.java index 520c881aa7e..79cc13594e9 100644 --- a/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsIT.java +++ b/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsIT.java @@ -1025,6 +1025,53 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase { assertEquals("expected to find term statistics in exactly one shard!", 2, sumDocFreq); } + public void testWithKeywordAndNormalizer() throws IOException, ExecutionException, InterruptedException { + // setup indices + String[] indexNames = new String[] {"with_tv", "without_tv"}; + Settings.Builder builder = Settings.builder() + .put(indexSettings()) + .put("index.analysis.analyzer.my_analyzer.tokenizer", "keyword") + .putList("index.analysis.analyzer.my_analyzer.filter", "lowercase") + .putList("index.analysis.normalizer.my_normalizer.filter", "lowercase"); + assertAcked(prepareCreate(indexNames[0]).setSettings(builder.build()) + .addMapping("type1", "field1", "type=text,term_vector=with_positions_offsets,analyzer=my_analyzer", + "field2", "type=text,term_vector=with_positions_offsets,analyzer=keyword")); + assertAcked(prepareCreate(indexNames[1]).setSettings(builder.build()) + .addMapping("type1", "field1", "type=keyword,normalizer=my_normalizer", "field2", "type=keyword")); + ensureGreen(); + + // index documents with and without term vectors + String[] content = new String[] { "Hello World", "hello world", "HELLO WORLD" }; + + List indexBuilders = new ArrayList<>(); + for (String indexName : indexNames) { + for (int id = 0; id < content.length; id++) { + indexBuilders.add(client().prepareIndex() + .setIndex(indexName) + .setType("type1") + .setId(String.valueOf(id)) + .setSource("field1", content[id], "field2", content[id])); + } + } + indexRandom(true, indexBuilders); + + // request tvs and compare from each index + for (int id = 0; id < content.length; id++) { + Fields[] fields = new Fields[2]; + for (int j = 0; j < indexNames.length; j++) { + TermVectorsResponse resp = client().prepareTermVector(indexNames[j], "type1", String.valueOf(id)) + .setOffsets(true) + .setPositions(true) + .setSelectedFields("field1", "field2") + .get(); + assertThat("doc with index: " + indexNames[j] + ", type1 and id: " + id, resp.isExists(), equalTo(true)); + fields[j] = resp.getFields(); + } + compareTermVectors("field1", fields[0], fields[1]); + compareTermVectors("field2", fields[0], fields[1]); + } + } + private void checkBestTerms(Terms terms, List expectedTerms) throws IOException { final TermsEnum termsEnum = terms.iterator(); List bestTerms = new ArrayList<>(); diff --git a/core/src/test/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaDataTests.java b/core/src/test/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaDataTests.java index 8f247abcf33..d5f441436e7 100644 --- a/core/src/test/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaDataTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaDataTests.java @@ -21,6 +21,7 @@ package org.elasticsearch.cluster.metadata; import org.elasticsearch.Version; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; @@ -120,4 +121,45 @@ public class IndexTemplateMetaDataTests extends ESTestCase { assertThat(indexTemplateMetaData, equalTo(indexTemplateMetaDataRoundTrip)); } + public void testValidateInvalidIndexPatterns() throws Exception { + final IllegalArgumentException emptyPatternError = expectThrows(IllegalArgumentException.class, () -> { + new IndexTemplateMetaData(randomRealisticUnicodeOfLengthBetween(5, 10), randomInt(), randomInt(), + Collections.emptyList(), Settings.EMPTY, ImmutableOpenMap.of(), ImmutableOpenMap.of(), ImmutableOpenMap.of()); + }); + assertThat(emptyPatternError.getMessage(), equalTo("Index patterns must not be null or empty; got []")); + + final IllegalArgumentException nullPatternError = expectThrows(IllegalArgumentException.class, () -> { + new IndexTemplateMetaData(randomRealisticUnicodeOfLengthBetween(5, 10), randomInt(), randomInt(), + null, Settings.EMPTY, ImmutableOpenMap.of(), ImmutableOpenMap.of(), ImmutableOpenMap.of()); + }); + assertThat(nullPatternError.getMessage(), equalTo("Index patterns must not be null or empty; got null")); + + final String templateWithEmptyPattern = "{\"index_patterns\" : [],\"order\" : 1000," + + "\"settings\" : {\"number_of_shards\" : 10,\"number_of_replicas\" : 1}," + + "\"mappings\" : {\"doc\" :" + + "{\"properties\":{\"" + + randomAlphaOfLength(10) + "\":{\"type\":\"text\"},\"" + + randomAlphaOfLength(10) + "\":{\"type\":\"keyword\"}}" + + "}}}"; + try (XContentParser parser = + XContentHelper.createParser(NamedXContentRegistry.EMPTY, new BytesArray(templateWithEmptyPattern), XContentType.JSON)) { + final IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, + () -> IndexTemplateMetaData.Builder.fromXContent(parser, randomAlphaOfLengthBetween(1, 100))); + assertThat(ex.getMessage(), equalTo("Index patterns must not be null or empty; got []")); + } + + final String templateWithoutPattern = "{\"order\" : 1000," + + "\"settings\" : {\"number_of_shards\" : 10,\"number_of_replicas\" : 1}," + + "\"mappings\" : {\"doc\" :" + + "{\"properties\":{\"" + + randomAlphaOfLength(10) + "\":{\"type\":\"text\"},\"" + + randomAlphaOfLength(10) + "\":{\"type\":\"keyword\"}}" + + "}}}"; + try (XContentParser parser = + XContentHelper.createParser(NamedXContentRegistry.EMPTY, new BytesArray(templateWithoutPattern), XContentType.JSON)) { + final IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, + () -> IndexTemplateMetaData.Builder.fromXContent(parser, randomAlphaOfLengthBetween(1, 100))); + assertThat(ex.getMessage(), equalTo("Index patterns must not be null or empty; got null")); + } + } } diff --git a/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeServiceTests.java b/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeServiceTests.java index 0fa6831fb06..e329e70134c 100644 --- a/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeServiceTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeServiceTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.Version; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.indices.mapper.MapperRegistry; +import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.VersionUtils; @@ -31,8 +32,8 @@ public class MetaDataIndexUpgradeServiceTests extends ESTestCase { public void testArchiveBrokenIndexSettings() { MetaDataIndexUpgradeService service = new MetaDataIndexUpgradeService(Settings.EMPTY, xContentRegistry(), - new MapperRegistry(Collections.emptyMap(), Collections.emptyMap()), IndexScopedSettings.DEFAULT_SCOPED_SETTINGS, - Collections.emptyList()); + new MapperRegistry(Collections.emptyMap(), Collections.emptyMap(), MapperPlugin.NOOP_FIELD_FILTER), + IndexScopedSettings.DEFAULT_SCOPED_SETTINGS, Collections.emptyList()); IndexMetaData src = newIndexMeta("foo", Settings.EMPTY); IndexMetaData indexMetaData = service.archiveBrokenIndexSettings(src); assertSame(indexMetaData, src); @@ -59,8 +60,8 @@ public class MetaDataIndexUpgradeServiceTests extends ESTestCase { public void testUpgrade() { MetaDataIndexUpgradeService service = new MetaDataIndexUpgradeService(Settings.EMPTY, xContentRegistry(), - new MapperRegistry(Collections.emptyMap(), Collections.emptyMap()), IndexScopedSettings.DEFAULT_SCOPED_SETTINGS, - Collections.emptyList()); + new MapperRegistry(Collections.emptyMap(), Collections.emptyMap(), MapperPlugin.NOOP_FIELD_FILTER), + IndexScopedSettings.DEFAULT_SCOPED_SETTINGS, Collections.emptyList()); IndexMetaData src = newIndexMeta("foo", Settings.builder().put("index.refresh_interval", "-200").build()); assertFalse(service.isUpgraded(src)); src = service.upgradeIndexMetaData(src, Version.CURRENT.minimumIndexCompatibilityVersion()); @@ -72,8 +73,8 @@ public class MetaDataIndexUpgradeServiceTests extends ESTestCase { public void testIsUpgraded() { MetaDataIndexUpgradeService service = new MetaDataIndexUpgradeService(Settings.EMPTY, xContentRegistry(), - new MapperRegistry(Collections.emptyMap(), Collections.emptyMap()), IndexScopedSettings.DEFAULT_SCOPED_SETTINGS, - Collections.emptyList()); + new MapperRegistry(Collections.emptyMap(), Collections.emptyMap(), MapperPlugin.NOOP_FIELD_FILTER), + IndexScopedSettings.DEFAULT_SCOPED_SETTINGS, Collections.emptyList()); IndexMetaData src = newIndexMeta("foo", Settings.builder().put("index.refresh_interval", "-200").build()); assertFalse(service.isUpgraded(src)); Version version = VersionUtils.randomVersionBetween(random(), VersionUtils.getFirstVersion(), VersionUtils.getPreviousVersion()); @@ -85,8 +86,8 @@ public class MetaDataIndexUpgradeServiceTests extends ESTestCase { public void testFailUpgrade() { MetaDataIndexUpgradeService service = new MetaDataIndexUpgradeService(Settings.EMPTY, xContentRegistry(), - new MapperRegistry(Collections.emptyMap(), Collections.emptyMap()), IndexScopedSettings.DEFAULT_SCOPED_SETTINGS, - Collections.emptyList()); + new MapperRegistry(Collections.emptyMap(), Collections.emptyMap(), MapperPlugin.NOOP_FIELD_FILTER), + IndexScopedSettings.DEFAULT_SCOPED_SETTINGS, Collections.emptyList()); Version minCompat = Version.CURRENT.minimumIndexCompatibilityVersion(); Version indexUpgraded = VersionUtils.randomVersionBetween(random(), minCompat, VersionUtils.getPreviousVersion(Version.CURRENT)); Version indexCreated = Version.fromString((minCompat.major - 1) + "." + randomInt(5) + "." + randomInt(5)); @@ -111,14 +112,13 @@ public class MetaDataIndexUpgradeServiceTests extends ESTestCase { public void testPluginUpgrade() { MetaDataIndexUpgradeService service = new MetaDataIndexUpgradeService(Settings.EMPTY, xContentRegistry(), - new MapperRegistry(Collections.emptyMap(), Collections.emptyMap()), IndexScopedSettings.DEFAULT_SCOPED_SETTINGS, - Collections.singletonList( - indexMetaData -> IndexMetaData.builder(indexMetaData) - .settings( - Settings.builder() - .put(indexMetaData.getSettings()) - .put("index.refresh_interval", "10s") - ).build())); + new MapperRegistry(Collections.emptyMap(), Collections.emptyMap(), MapperPlugin.NOOP_FIELD_FILTER), + IndexScopedSettings.DEFAULT_SCOPED_SETTINGS, Collections.singletonList( + indexMetaData -> IndexMetaData.builder(indexMetaData).settings( + Settings.builder() + .put(indexMetaData.getSettings()) + .put("index.refresh_interval", "10s") + ).build())); IndexMetaData src = newIndexMeta("foo", Settings.builder().put("index.refresh_interval", "200s").build()); assertFalse(service.isUpgraded(src)); src = service.upgradeIndexMetaData(src, Version.CURRENT.minimumIndexCompatibilityVersion()); @@ -129,12 +129,12 @@ public class MetaDataIndexUpgradeServiceTests extends ESTestCase { public void testPluginUpgradeFailure() { MetaDataIndexUpgradeService service = new MetaDataIndexUpgradeService(Settings.EMPTY, xContentRegistry(), - new MapperRegistry(Collections.emptyMap(), Collections.emptyMap()), IndexScopedSettings.DEFAULT_SCOPED_SETTINGS, - Collections.singletonList( - indexMetaData -> { - throw new IllegalStateException("Cannot upgrade index " + indexMetaData.getIndex().getName()); - } - )); + new MapperRegistry(Collections.emptyMap(), Collections.emptyMap(), MapperPlugin.NOOP_FIELD_FILTER), + IndexScopedSettings.DEFAULT_SCOPED_SETTINGS, Collections.singletonList( + indexMetaData -> { + throw new IllegalStateException("Cannot upgrade index " + indexMetaData.getIndex().getName()); + } + )); IndexMetaData src = newIndexMeta("foo", Settings.EMPTY); String message = expectThrows(IllegalStateException.class, () -> service.upgradeIndexMetaData(src, Version.CURRENT.minimumIndexCompatibilityVersion())).getMessage(); @@ -150,7 +150,6 @@ public class MetaDataIndexUpgradeServiceTests extends ESTestCase { .put(IndexMetaData.SETTING_VERSION_UPGRADED, Version.V_5_0_0_beta1) .put(indexSettings) .build(); - IndexMetaData metaData = IndexMetaData.builder(name).settings(build).build(); - return metaData; + return IndexMetaData.builder(name).settings(build).build(); } } diff --git a/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java b/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java index dd7683c1de2..acf6525f99d 100644 --- a/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java @@ -21,17 +21,21 @@ package org.elasticsearch.cluster.metadata; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterModule; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.Index; +import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.test.ESTestCase; import java.io.IOException; @@ -231,4 +235,380 @@ public class MetaDataTests extends ESTestCase { ); assertThat(fromStreamMeta.indexGraveyard(), equalTo(fromStreamMeta.indexGraveyard())); } + + public void testFindMappings() throws IOException { + MetaData metaData = MetaData.builder() + .put(IndexMetaData.builder("index1") + .settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)) + .putMapping("doc", FIND_MAPPINGS_TEST_ITEM)) + .put(IndexMetaData.builder("index2") + .settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)) + .putMapping("doc", FIND_MAPPINGS_TEST_ITEM)).build(); + + { + ImmutableOpenMap> mappings = metaData.findMappings(Strings.EMPTY_ARRAY, + Strings.EMPTY_ARRAY, MapperPlugin.NOOP_FIELD_FILTER); + assertEquals(0, mappings.size()); + } + { + ImmutableOpenMap> mappings = metaData.findMappings(new String[]{"index1"}, + new String[]{"notfound"}, MapperPlugin.NOOP_FIELD_FILTER); + assertEquals(0, mappings.size()); + } + { + ImmutableOpenMap> mappings = metaData.findMappings(new String[]{"index1"}, + Strings.EMPTY_ARRAY, MapperPlugin.NOOP_FIELD_FILTER); + assertEquals(1, mappings.size()); + assertIndexMappingsNotFiltered(mappings, "index1"); + } + { + ImmutableOpenMap> mappings = metaData.findMappings( + new String[]{"index1", "index2"}, + new String[]{randomBoolean() ? "doc" : "_all"}, MapperPlugin.NOOP_FIELD_FILTER); + assertEquals(2, mappings.size()); + assertIndexMappingsNotFiltered(mappings, "index1"); + assertIndexMappingsNotFiltered(mappings, "index2"); + } + } + + public void testFindMappingsNoOpFilters() throws IOException { + MappingMetaData originalMappingMetaData = new MappingMetaData("doc", + XContentHelper.convertToMap(JsonXContent.jsonXContent, FIND_MAPPINGS_TEST_ITEM, true)); + + MetaData metaData = MetaData.builder() + .put(IndexMetaData.builder("index1") + .settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)) + .putMapping(originalMappingMetaData)).build(); + + { + ImmutableOpenMap> mappings = metaData.findMappings(new String[]{"index1"}, + randomBoolean() ? Strings.EMPTY_ARRAY : new String[]{"_all"}, MapperPlugin.NOOP_FIELD_FILTER); + ImmutableOpenMap index1 = mappings.get("index1"); + MappingMetaData mappingMetaData = index1.get("doc"); + assertSame(originalMappingMetaData, mappingMetaData); + } + { + ImmutableOpenMap> mappings = metaData.findMappings(new String[]{"index1"}, + randomBoolean() ? Strings.EMPTY_ARRAY : new String[]{"_all"}, index -> field -> randomBoolean()); + ImmutableOpenMap index1 = mappings.get("index1"); + MappingMetaData mappingMetaData = index1.get("doc"); + assertNotSame(originalMappingMetaData, mappingMetaData); + } + { + ImmutableOpenMap> mappings = metaData.findMappings(new String[]{"index1"}, + new String[]{"doc"}, MapperPlugin.NOOP_FIELD_FILTER); + ImmutableOpenMap index1 = mappings.get("index1"); + MappingMetaData mappingMetaData = index1.get("doc"); + assertSame(originalMappingMetaData, mappingMetaData); + } + { + ImmutableOpenMap> mappings = metaData.findMappings(new String[]{"index1"}, + new String[]{"doc"}, index -> field -> randomBoolean()); + ImmutableOpenMap index1 = mappings.get("index1"); + MappingMetaData mappingMetaData = index1.get("doc"); + assertNotSame(originalMappingMetaData, mappingMetaData); + } + } + + @SuppressWarnings("unchecked") + public void testFindMappingsWithFilters() throws IOException { + String mapping = FIND_MAPPINGS_TEST_ITEM; + if (randomBoolean()) { + Map stringObjectMap = XContentHelper.convertToMap(JsonXContent.jsonXContent, FIND_MAPPINGS_TEST_ITEM, false); + Map doc = (Map)stringObjectMap.get("doc"); + try (XContentBuilder builder = JsonXContent.contentBuilder()) { + builder.map(doc); + mapping = builder.string(); + } + } + + MetaData metaData = MetaData.builder() + .put(IndexMetaData.builder("index1") + .settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)) + .putMapping("doc", mapping)) + .put(IndexMetaData.builder("index2") + .settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)) + .putMapping("doc", mapping)) + .put(IndexMetaData.builder("index3") + .settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)) + .putMapping("doc", mapping)).build(); + + { + ImmutableOpenMap> mappings = metaData.findMappings( + new String[]{"index1", "index2", "index3"}, + new String[]{"doc"}, index -> { + if (index.equals("index1")) { + return field -> field.startsWith("name.") == false && field.startsWith("properties.key.") == false + && field.equals("age") == false && field.equals("address.location") == false; + } + if (index.equals("index2")) { + return field -> false; + } + return MapperPlugin.NOOP_FIELD_PREDICATE; + }); + + + + assertIndexMappingsNoFields(mappings, "index2"); + assertIndexMappingsNotFiltered(mappings, "index3"); + + ImmutableOpenMap index1Mappings = mappings.get("index1"); + assertNotNull(index1Mappings); + + assertEquals(1, index1Mappings.size()); + MappingMetaData docMapping = index1Mappings.get("doc"); + assertNotNull(docMapping); + + Map sourceAsMap = docMapping.getSourceAsMap(); + assertEquals(3, sourceAsMap.size()); + assertTrue(sourceAsMap.containsKey("_routing")); + assertTrue(sourceAsMap.containsKey("_source")); + + Map typeProperties = (Map) sourceAsMap.get("properties"); + assertEquals(6, typeProperties.size()); + assertTrue(typeProperties.containsKey("birth")); + assertTrue(typeProperties.containsKey("ip")); + assertTrue(typeProperties.containsKey("suggest")); + + Map name = (Map) typeProperties.get("name"); + assertNotNull(name); + assertEquals(1, name.size()); + Map nameProperties = (Map) name.get("properties"); + assertNotNull(nameProperties); + assertEquals(0, nameProperties.size()); + + Map address = (Map) typeProperties.get("address"); + assertNotNull(address); + assertEquals(2, address.size()); + assertTrue(address.containsKey("type")); + Map addressProperties = (Map) address.get("properties"); + assertNotNull(addressProperties); + assertEquals(2, addressProperties.size()); + assertLeafs(addressProperties, "street", "area"); + + Map properties = (Map) typeProperties.get("properties"); + assertNotNull(properties); + assertEquals(2, properties.size()); + assertTrue(properties.containsKey("type")); + Map propertiesProperties = (Map) properties.get("properties"); + assertNotNull(propertiesProperties); + assertEquals(2, propertiesProperties.size()); + assertLeafs(propertiesProperties, "key"); + assertMultiField(propertiesProperties, "value", "keyword"); + } + + { + ImmutableOpenMap> mappings = metaData.findMappings( + new String[]{"index1", "index2" , "index3"}, + new String[]{"doc"}, index -> field -> (index.equals("index3") && field.endsWith("keyword"))); + + assertIndexMappingsNoFields(mappings, "index1"); + assertIndexMappingsNoFields(mappings, "index2"); + ImmutableOpenMap index3 = mappings.get("index3"); + assertEquals(1, index3.size()); + MappingMetaData mappingMetaData = index3.get("doc"); + Map sourceAsMap = mappingMetaData.getSourceAsMap(); + assertEquals(3, sourceAsMap.size()); + assertTrue(sourceAsMap.containsKey("_routing")); + assertTrue(sourceAsMap.containsKey("_source")); + Map typeProperties = (Map) sourceAsMap.get("properties"); + assertNotNull(typeProperties); + assertEquals(1, typeProperties.size()); + Map properties = (Map) typeProperties.get("properties"); + assertNotNull(properties); + assertEquals(2, properties.size()); + assertTrue(properties.containsKey("type")); + Map propertiesProperties = (Map) properties.get("properties"); + assertNotNull(propertiesProperties); + assertEquals(2, propertiesProperties.size()); + Map key = (Map) propertiesProperties.get("key"); + assertEquals(1, key.size()); + Map keyProperties = (Map) key.get("properties"); + assertEquals(1, keyProperties.size()); + assertLeafs(keyProperties, "keyword"); + Map value = (Map) propertiesProperties.get("value"); + assertEquals(1, value.size()); + Map valueProperties = (Map) value.get("properties"); + assertEquals(1, valueProperties.size()); + assertLeafs(valueProperties, "keyword"); + } + + { + ImmutableOpenMap> mappings = metaData.findMappings( + new String[]{"index1", "index2" , "index3"}, + new String[]{"doc"}, index -> field -> (index.equals("index2"))); + + assertIndexMappingsNoFields(mappings, "index1"); + assertIndexMappingsNoFields(mappings, "index3"); + assertIndexMappingsNotFiltered(mappings, "index2"); + } + } + + @SuppressWarnings("unchecked") + private static void assertIndexMappingsNoFields(ImmutableOpenMap> mappings, + String index) { + ImmutableOpenMap indexMappings = mappings.get(index); + assertNotNull(indexMappings); + assertEquals(1, indexMappings.size()); + MappingMetaData docMapping = indexMappings.get("doc"); + assertNotNull(docMapping); + Map sourceAsMap = docMapping.getSourceAsMap(); + assertEquals(3, sourceAsMap.size()); + assertTrue(sourceAsMap.containsKey("_routing")); + assertTrue(sourceAsMap.containsKey("_source")); + Map typeProperties = (Map) sourceAsMap.get("properties"); + assertEquals(0, typeProperties.size()); + } + + @SuppressWarnings("unchecked") + private static void assertIndexMappingsNotFiltered(ImmutableOpenMap> mappings, + String index) { + ImmutableOpenMap indexMappings = mappings.get(index); + assertNotNull(indexMappings); + + assertEquals(1, indexMappings.size()); + MappingMetaData docMapping = indexMappings.get("doc"); + assertNotNull(docMapping); + + Map sourceAsMap = docMapping.getSourceAsMap(); + assertEquals(3, sourceAsMap.size()); + assertTrue(sourceAsMap.containsKey("_routing")); + assertTrue(sourceAsMap.containsKey("_source")); + + Map typeProperties = (Map) sourceAsMap.get("properties"); + assertEquals(7, typeProperties.size()); + assertTrue(typeProperties.containsKey("birth")); + assertTrue(typeProperties.containsKey("age")); + assertTrue(typeProperties.containsKey("ip")); + assertTrue(typeProperties.containsKey("suggest")); + + Map name = (Map) typeProperties.get("name"); + assertNotNull(name); + assertEquals(1, name.size()); + Map nameProperties = (Map) name.get("properties"); + assertNotNull(nameProperties); + assertEquals(2, nameProperties.size()); + assertLeafs(nameProperties, "first", "last"); + + Map address = (Map) typeProperties.get("address"); + assertNotNull(address); + assertEquals(2, address.size()); + assertTrue(address.containsKey("type")); + Map addressProperties = (Map) address.get("properties"); + assertNotNull(addressProperties); + assertEquals(3, addressProperties.size()); + assertLeafs(addressProperties, "street", "location", "area"); + + Map properties = (Map) typeProperties.get("properties"); + assertNotNull(properties); + assertEquals(2, properties.size()); + assertTrue(properties.containsKey("type")); + Map propertiesProperties = (Map) properties.get("properties"); + assertNotNull(propertiesProperties); + assertEquals(2, propertiesProperties.size()); + assertMultiField(propertiesProperties, "key", "keyword"); + assertMultiField(propertiesProperties, "value", "keyword"); + } + + @SuppressWarnings("unchecked") + public static void assertLeafs(Map properties, String... fields) { + for (String field : fields) { + assertTrue(properties.containsKey(field)); + @SuppressWarnings("unchecked") + Map fieldProp = (Map)properties.get(field); + assertNotNull(fieldProp); + assertFalse(fieldProp.containsKey("properties")); + assertFalse(fieldProp.containsKey("fields")); + } + } + + public static void assertMultiField(Map properties, String field, String... subFields) { + assertTrue(properties.containsKey(field)); + @SuppressWarnings("unchecked") + Map fieldProp = (Map)properties.get(field); + assertNotNull(fieldProp); + assertTrue(fieldProp.containsKey("fields")); + @SuppressWarnings("unchecked") + Map subFieldsDef = (Map) fieldProp.get("fields"); + assertLeafs(subFieldsDef, subFields); + } + + private static final String FIND_MAPPINGS_TEST_ITEM = "{\n" + + " \"doc\": {\n" + + " \"_routing\": {\n" + + " \"required\":true\n" + + " }," + + " \"_source\": {\n" + + " \"enabled\":false\n" + + " }," + + " \"properties\": {\n" + + " \"name\": {\n" + + " \"properties\": {\n" + + " \"first\": {\n" + + " \"type\": \"keyword\"\n" + + " },\n" + + " \"last\": {\n" + + " \"type\": \"keyword\"\n" + + " }\n" + + " }\n" + + " },\n" + + " \"birth\": {\n" + + " \"type\": \"date\"\n" + + " },\n" + + " \"age\": {\n" + + " \"type\": \"integer\"\n" + + " },\n" + + " \"ip\": {\n" + + " \"type\": \"ip\"\n" + + " },\n" + + " \"suggest\" : {\n" + + " \"type\": \"completion\"\n" + + " },\n" + + " \"address\": {\n" + + " \"type\": \"object\",\n" + + " \"properties\": {\n" + + " \"street\": {\n" + + " \"type\": \"keyword\"\n" + + " },\n" + + " \"location\": {\n" + + " \"type\": \"geo_point\"\n" + + " },\n" + + " \"area\": {\n" + + " \"type\": \"geo_shape\", \n" + + " \"tree\": \"quadtree\",\n" + + " \"precision\": \"1m\"\n" + + " }\n" + + " }\n" + + " },\n" + + " \"properties\": {\n" + + " \"type\": \"nested\",\n" + + " \"properties\": {\n" + + " \"key\" : {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\" : {\n" + + " \"type\" : \"keyword\"\n" + + " }\n" + + " }\n" + + " },\n" + + " \"value\" : {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\" : {\n" + + " \"type\" : \"keyword\"\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + "}"; } diff --git a/core/src/test/java/org/elasticsearch/cluster/metadata/TemplateUpgradeServiceTests.java b/core/src/test/java/org/elasticsearch/cluster/metadata/TemplateUpgradeServiceTests.java index e1763fa6a5d..2e82397767f 100644 --- a/core/src/test/java/org/elasticsearch/cluster/metadata/TemplateUpgradeServiceTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/metadata/TemplateUpgradeServiceTests.java @@ -54,6 +54,8 @@ import java.util.Set; import java.util.concurrent.ExecutorService; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; +import java.util.stream.Collectors; +import java.util.stream.IntStream; import static java.util.Collections.emptyMap; import static org.elasticsearch.test.VersionUtils.randomVersion; @@ -61,9 +63,7 @@ import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.CoreMatchers.startsWith; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasSize; -import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.notNullValue; import static org.mockito.Matchers.any; import static org.mockito.Mockito.doAnswer; @@ -83,16 +83,17 @@ public class TemplateUpgradeServiceTests extends ESTestCase { boolean shouldChange = randomBoolean(); MetaData metaData = randomMetaData( - IndexTemplateMetaData.builder("user_template").build(), - IndexTemplateMetaData.builder("removed_test_template").build(), - IndexTemplateMetaData.builder("changed_test_template").build() + IndexTemplateMetaData.builder("user_template").patterns(randomIndexPatterns()).build(), + IndexTemplateMetaData.builder("removed_test_template").patterns(randomIndexPatterns()).build(), + IndexTemplateMetaData.builder("changed_test_template").patterns(randomIndexPatterns()).build() ); TemplateUpgradeService service = new TemplateUpgradeService(Settings.EMPTY, null, clusterService, null, Arrays.asList( templates -> { if (shouldAdd) { - assertNull(templates.put("added_test_template", IndexTemplateMetaData.builder("added_test_template").build())); + assertNull(templates.put("added_test_template", + IndexTemplateMetaData.builder("added_test_template").patterns(randomIndexPatterns()).build())); } return templates; }, @@ -105,7 +106,7 @@ public class TemplateUpgradeServiceTests extends ESTestCase { templates -> { if (shouldChange) { assertNotNull(templates.put("changed_test_template", - IndexTemplateMetaData.builder("changed_test_template").order(10).build())); + IndexTemplateMetaData.builder("changed_test_template").patterns(randomIndexPatterns()).order(10).build())); } return templates; } @@ -234,9 +235,9 @@ public class TemplateUpgradeServiceTests extends ESTestCase { AtomicInteger updateInvocation = new AtomicInteger(); MetaData metaData = randomMetaData( - IndexTemplateMetaData.builder("user_template").build(), - IndexTemplateMetaData.builder("removed_test_template").build(), - IndexTemplateMetaData.builder("changed_test_template").build() + IndexTemplateMetaData.builder("user_template").patterns(randomIndexPatterns()).build(), + IndexTemplateMetaData.builder("removed_test_template").patterns(randomIndexPatterns()).build(), + IndexTemplateMetaData.builder("changed_test_template").patterns(randomIndexPatterns()).build() ); ThreadPool threadPool = mock(ThreadPool.class); @@ -390,4 +391,10 @@ public class TemplateUpgradeServiceTests extends ESTestCase { } return builder.build(); } + + List randomIndexPatterns() { + return IntStream.range(0, between(1, 10)) + .mapToObj(n -> randomUnicodeOfCodepointLengthBetween(1, 100)) + .collect(Collectors.toList()); + } } diff --git a/core/src/test/java/org/elasticsearch/cluster/serialization/ClusterSerializationTests.java b/core/src/test/java/org/elasticsearch/cluster/serialization/ClusterSerializationTests.java index a892b2a2934..d64b4a66ee7 100644 --- a/core/src/test/java/org/elasticsearch/cluster/serialization/ClusterSerializationTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/serialization/ClusterSerializationTests.java @@ -46,6 +46,7 @@ import org.elasticsearch.test.VersionUtils; import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import static org.hamcrest.Matchers.equalTo; @@ -170,8 +171,8 @@ public class ClusterSerializationTests extends ESAllocationTestCase { public void testObjectReuseWhenApplyingClusterStateDiff() throws Exception { IndexMetaData indexMetaData = IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(10).numberOfReplicas(1).build(); - IndexTemplateMetaData indexTemplateMetaData - = IndexTemplateMetaData.builder("test-template").patterns(new ArrayList<>()).build(); + IndexTemplateMetaData indexTemplateMetaData = IndexTemplateMetaData.builder("test-template") + .patterns(Arrays.asList(generateRandomStringArray(10, 100, false, false))).build(); MetaData metaData = MetaData.builder().put(indexMetaData, true).put(indexTemplateMetaData).build(); RoutingTable routingTable = RoutingTable.builder().addAsNew(metaData.index("test")).build(); diff --git a/core/src/test/java/org/elasticsearch/cluster/serialization/ClusterStateToStringTests.java b/core/src/test/java/org/elasticsearch/cluster/serialization/ClusterStateToStringTests.java index 65d780f3fd9..86c6d0e02eb 100644 --- a/core/src/test/java/org/elasticsearch/cluster/serialization/ClusterStateToStringTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/serialization/ClusterStateToStringTests.java @@ -32,6 +32,8 @@ import org.elasticsearch.cluster.routing.allocation.AllocationService; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; +import java.util.Arrays; + import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; import static org.hamcrest.Matchers.containsString; @@ -40,7 +42,8 @@ public class ClusterStateToStringTests extends ESAllocationTestCase { public void testClusterStateSerialization() throws Exception { MetaData metaData = MetaData.builder() .put(IndexMetaData.builder("test_idx").settings(settings(Version.CURRENT)).numberOfShards(10).numberOfReplicas(1)) - .put(IndexTemplateMetaData.builder("test_template").build()) + .put(IndexTemplateMetaData.builder("test_template") + .patterns(Arrays.asList(generateRandomStringArray(10, 100, false,false))).build()) .build(); RoutingTable routingTable = RoutingTable.builder() diff --git a/core/src/test/java/org/elasticsearch/cluster/service/TaskExecutorTests.java b/core/src/test/java/org/elasticsearch/cluster/service/TaskExecutorTests.java index fe426fdd42a..dc835a8d3b0 100644 --- a/core/src/test/java/org/elasticsearch/cluster/service/TaskExecutorTests.java +++ b/core/src/test/java/org/elasticsearch/cluster/service/TaskExecutorTests.java @@ -66,7 +66,7 @@ public class TaskExecutorTests extends ESTestCase { @Before public void setUpExecutor() { - threadExecutor = EsExecutors.newSinglePrioritizing("test_thread", + threadExecutor = EsExecutors.newSinglePrioritizing(getClass().getName() + "/" + getTestName(), daemonThreadFactory(Settings.EMPTY, "test_thread"), threadPool.getThreadContext(), threadPool.scheduler()); } diff --git a/core/src/test/java/org/elasticsearch/common/bytes/ByteBufferReferenceTests.java b/core/src/test/java/org/elasticsearch/common/bytes/ByteBufferReferenceTests.java new file mode 100644 index 00000000000..9560fd40038 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/common/bytes/ByteBufferReferenceTests.java @@ -0,0 +1,44 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.bytes; + +import java.io.IOException; +import java.nio.ByteBuffer; + +public class ByteBufferReferenceTests extends AbstractBytesReferenceTestCase { + + private void initializeBytes(byte[] bytes) { + for (int i = 0 ; i < bytes.length; ++i) { + bytes[i] = (byte) i; + } + } + + @Override + protected BytesReference newBytesReference(int length) throws IOException { + return newBytesReferenceWithOffsetOfZero(length); + } + + @Override + protected BytesReference newBytesReferenceWithOffsetOfZero(int length) throws IOException { + byte[] bytes = new byte[length]; + initializeBytes(bytes); + return new ByteBufferReference(ByteBuffer.wrap(bytes)); + } +} diff --git a/core/src/test/java/org/elasticsearch/common/geo/BaseGeoParsingTestCase.java b/core/src/test/java/org/elasticsearch/common/geo/BaseGeoParsingTestCase.java index 3297e956e60..fff415de555 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/BaseGeoParsingTestCase.java +++ b/core/src/test/java/org/elasticsearch/common/geo/BaseGeoParsingTestCase.java @@ -18,10 +18,21 @@ */ package org.elasticsearch.common.geo; +import com.vividsolutions.jts.geom.Geometry; import com.vividsolutions.jts.geom.GeometryFactory; +import org.elasticsearch.common.geo.parsers.ShapeParser; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.hamcrest.ElasticsearchGeoAssertions; +import org.locationtech.spatial4j.shape.Shape; +import org.locationtech.spatial4j.shape.ShapeCollection; +import org.locationtech.spatial4j.shape.jts.JtsGeometry; import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; import static org.elasticsearch.common.geo.builders.ShapeBuilder.SPATIAL_CONTEXT; @@ -35,4 +46,35 @@ abstract class BaseGeoParsingTestCase extends ESTestCase { public abstract void testParseMultiLineString() throws IOException; public abstract void testParsePolygon() throws IOException; public abstract void testParseMultiPolygon() throws IOException; + public abstract void testParseEnvelope() throws IOException; + public abstract void testParseGeometryCollection() throws IOException; + + protected void assertValidException(XContentBuilder builder, Class expectedException) throws IOException { + XContentParser parser = createParser(builder); + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, expectedException); + } + + protected void assertGeometryEquals(Shape expected, XContentBuilder geoJson) throws IOException { + XContentParser parser = createParser(geoJson); + parser.nextToken(); + ElasticsearchGeoAssertions.assertEquals(expected, ShapeParser.parse(parser).build()); + } + + protected ShapeCollection shapeCollection(Shape... shapes) { + return new ShapeCollection<>(Arrays.asList(shapes), SPATIAL_CONTEXT); + } + + protected ShapeCollection shapeCollection(Geometry... geoms) { + List shapes = new ArrayList<>(geoms.length); + for (Geometry geom : geoms) { + shapes.add(jtsGeom(geom)); + } + return new ShapeCollection<>(shapes, SPATIAL_CONTEXT); + } + + protected JtsGeometry jtsGeom(Geometry geom) { + return new JtsGeometry(geom, SPATIAL_CONTEXT, false, false); + } + } diff --git a/core/src/test/java/org/elasticsearch/common/geo/GeoDistanceTests.java b/core/src/test/java/org/elasticsearch/common/geo/GeoDistanceTests.java index 6624a91a6c2..c2e62277da7 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/GeoDistanceTests.java +++ b/core/src/test/java/org/elasticsearch/common/geo/GeoDistanceTests.java @@ -81,29 +81,62 @@ public class GeoDistanceTests extends ESTestCase { assertThat(GeoUtils.rectangleContainsPoint(box, 0, -178), equalTo(false)); } - /** - * The old plane calculation in 1.x/2.x incorrectly computed the plane distance in decimal degrees. This test is - * well intended but bogus. todo: fix w/ new plane distance calculation - * note: plane distance error varies by latitude so the test will need to correctly estimate expected error - */ - @AwaitsFix(bugUrl = "old plane calculation incorrectly computed everything in degrees. fix this bogus test") - public void testArcDistanceVsPlaneInEllipsis() { - GeoPoint centre = new GeoPoint(48.8534100, 2.3488000); - GeoPoint northernPoint = new GeoPoint(48.8801108681, 2.35152032666); - GeoPoint westernPoint = new GeoPoint(48.85265, 2.308896); + private static double arcDistance(GeoPoint p1, GeoPoint p2) { + return GeoDistance.ARC.calculate(p1.lat(), p1.lon(), p2.lat(), p2.lon(), DistanceUnit.METERS); + } - // With GeoDistance.ARC both the northern and western points are within the 4km range - assertThat(GeoDistance.ARC.calculate(centre.lat(), centre.lon(), northernPoint.lat(), - northernPoint.lon(), DistanceUnit.KILOMETERS), lessThan(4D)); - assertThat(GeoDistance.ARC.calculate(centre.lat(), centre.lon(), westernPoint.lat(), - westernPoint.lon(), DistanceUnit.KILOMETERS), lessThan(4D)); + private static double planeDistance(GeoPoint p1, GeoPoint p2) { + return GeoDistance.PLANE.calculate(p1.lat(), p1.lon(), p2.lat(), p2.lon(), DistanceUnit.METERS); + } - // With GeoDistance.PLANE, only the northern point is within the 4km range, - // the western point is outside of the range due to the simple math it employs, - // meaning results will appear elliptical - assertThat(GeoDistance.PLANE.calculate(centre.lat(), centre.lon(), northernPoint.lat(), - northernPoint.lon(), DistanceUnit.KILOMETERS), lessThan(4D)); - assertThat(GeoDistance.PLANE.calculate(centre.lat(), centre.lon(), westernPoint.lat(), - westernPoint.lon(), DistanceUnit.KILOMETERS), greaterThan(4D)); + public void testArcDistanceVsPlane() { + // sameLongitude and sameLatitude are both 90 degrees away from basePoint along great circles + final GeoPoint basePoint = new GeoPoint(45, 90); + final GeoPoint sameLongitude = new GeoPoint(-45, 90); + final GeoPoint sameLatitude = new GeoPoint(45, -90); + + double sameLongitudeArcDistance = arcDistance(basePoint, sameLongitude); + double sameLatitudeArcDistance = arcDistance(basePoint, sameLatitude); + double sameLongitudePlaneDistance = planeDistance(basePoint, sameLongitude); + double sameLatitudePlaneDistance = planeDistance(basePoint, sameLatitude); + + // GeoDistance.PLANE measures the distance along a straight line in + // (lat, long) space so agrees with GeoDistance.ARC along a line of + // constant longitude but takes a longer route if there is east/west + // movement. + + assertThat("Arc and plane should agree on sameLongitude", + Math.abs(sameLongitudeArcDistance - sameLongitudePlaneDistance), lessThan(0.001)); + + assertThat("Arc and plane should disagree on sameLatitude (by >4000km)", + sameLatitudePlaneDistance - sameLatitudeArcDistance, greaterThan(4.0e6)); + + // GeoDistance.ARC calculates the great circle distance (on a sphere) so these should agree as they're both 90 degrees + assertThat("Arc distances should agree", Math.abs(sameLongitudeArcDistance - sameLatitudeArcDistance), lessThan(0.001)); + } + + public void testArcDistanceVsPlaneAccuracy() { + // These points only differ by a few degrees so the calculation methods + // should match more closely. Check that the deviation is small enough, + // but not too small. + + // The biggest deviations are away from the equator and the poles so pick a suitably troublesome latitude. + GeoPoint basePoint = new GeoPoint(randomDoubleBetween(30.0, 60.0, true), randomDoubleBetween(-180.0, 180.0, true)); + GeoPoint sameLongitude = new GeoPoint(randomDoubleBetween(-90.0, 90.0, true), basePoint.lon()); + GeoPoint sameLatitude = new GeoPoint(basePoint.lat(), basePoint.lon() + randomDoubleBetween(4.0, 10.0, true)); + + double sameLongitudeArcDistance = arcDistance(basePoint, sameLongitude); + double sameLatitudeArcDistance = arcDistance(basePoint, sameLatitude); + double sameLongitudePlaneDistance = planeDistance(basePoint, sameLongitude); + double sameLatitudePlaneDistance = planeDistance(basePoint, sameLatitude); + + assertThat("Arc and plane should agree [" + basePoint + "] to [" + sameLongitude + "] (within 1cm)", + Math.abs(sameLongitudeArcDistance - sameLongitudePlaneDistance), lessThan(0.01)); + + assertThat("Arc and plane should very roughly agree [" + basePoint + "] to [" + sameLatitude + "]", + sameLatitudePlaneDistance - sameLatitudeArcDistance, lessThan(600.0)); + + assertThat("Arc and plane should disagree by some margin [" + basePoint + "] to [" + sameLatitude + "]", + sameLatitudePlaneDistance - sameLatitudeArcDistance, greaterThan(15.0)); } } diff --git a/core/src/test/java/org/elasticsearch/common/geo/GeoJsonShapeParserTests.java b/core/src/test/java/org/elasticsearch/common/geo/GeoJsonShapeParserTests.java index 32f384d96b1..fc987c7e3ca 100644 --- a/core/src/test/java/org/elasticsearch/common/geo/GeoJsonShapeParserTests.java +++ b/core/src/test/java/org/elasticsearch/common/geo/GeoJsonShapeParserTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.common.geo; import com.vividsolutions.jts.geom.Coordinate; -import com.vividsolutions.jts.geom.Geometry; import com.vividsolutions.jts.geom.LineString; import com.vividsolutions.jts.geom.LinearRing; import com.vividsolutions.jts.geom.MultiLineString; @@ -39,12 +38,10 @@ import org.locationtech.spatial4j.shape.Circle; import org.locationtech.spatial4j.shape.Rectangle; import org.locationtech.spatial4j.shape.Shape; import org.locationtech.spatial4j.shape.ShapeCollection; -import org.locationtech.spatial4j.shape.jts.JtsGeometry; import org.locationtech.spatial4j.shape.jts.JtsPoint; import java.io.IOException; import java.util.ArrayList; -import java.util.Arrays; import java.util.List; import static org.elasticsearch.common.geo.builders.ShapeBuilder.SPATIAL_CONTEXT; @@ -159,6 +156,7 @@ public class GeoJsonShapeParserTests extends BaseGeoParsingTestCase { assertGeometryEquals(jtsGeom(expectedLS), lineGeoJson); } + @Override public void testParseEnvelope() throws IOException { // test #1: envelope with expected coordinate order (TopLeft, BottomRight) XContentBuilder multilinesGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "envelope") @@ -1033,27 +1031,4 @@ public class GeoJsonShapeParserTests extends BaseGeoParsingTestCase { ElasticsearchGeoAssertions.assertMultiPolygon(shape); } - - private void assertGeometryEquals(Shape expected, XContentBuilder geoJson) throws IOException { - XContentParser parser = createParser(geoJson); - parser.nextToken(); - ElasticsearchGeoAssertions.assertEquals(expected, ShapeParser.parse(parser).build()); - } - - private ShapeCollection shapeCollection(Shape... shapes) { - return new ShapeCollection<>(Arrays.asList(shapes), SPATIAL_CONTEXT); - } - - private ShapeCollection shapeCollection(Geometry... geoms) { - List shapes = new ArrayList<>(geoms.length); - for (Geometry geom : geoms) { - shapes.add(jtsGeom(geom)); - } - return new ShapeCollection<>(shapes, SPATIAL_CONTEXT); - } - - private JtsGeometry jtsGeom(Geometry geom) { - return new JtsGeometry(geom, SPATIAL_CONTEXT, false, false); - } - } diff --git a/core/src/test/java/org/elasticsearch/common/geo/GeoWKTShapeParserTests.java b/core/src/test/java/org/elasticsearch/common/geo/GeoWKTShapeParserTests.java new file mode 100644 index 00000000000..191ce702052 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/common/geo/GeoWKTShapeParserTests.java @@ -0,0 +1,255 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.common.geo; + +import com.vividsolutions.jts.geom.Coordinate; +import com.vividsolutions.jts.geom.LineString; +import com.vividsolutions.jts.geom.LinearRing; +import com.vividsolutions.jts.geom.MultiLineString; +import com.vividsolutions.jts.geom.Point; +import com.vividsolutions.jts.geom.Polygon; +import org.apache.lucene.geo.GeoTestUtil; +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.geo.builders.CoordinatesBuilder; +import org.elasticsearch.common.geo.builders.EnvelopeBuilder; +import org.elasticsearch.common.geo.builders.GeometryCollectionBuilder; +import org.elasticsearch.common.geo.builders.LineStringBuilder; +import org.elasticsearch.common.geo.builders.MultiLineStringBuilder; +import org.elasticsearch.common.geo.builders.MultiPointBuilder; +import org.elasticsearch.common.geo.builders.MultiPolygonBuilder; +import org.elasticsearch.common.geo.builders.PointBuilder; +import org.elasticsearch.common.geo.builders.PolygonBuilder; +import org.elasticsearch.common.geo.builders.ShapeBuilder; +import org.elasticsearch.common.geo.parsers.GeoWKTParser; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.test.geo.RandomShapeGenerator; +import org.locationtech.spatial4j.exception.InvalidShapeException; +import org.locationtech.spatial4j.shape.Rectangle; +import org.locationtech.spatial4j.shape.Shape; +import org.locationtech.spatial4j.shape.ShapeCollection; +import org.locationtech.spatial4j.shape.jts.JtsPoint; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import static org.elasticsearch.common.geo.builders.ShapeBuilder.SPATIAL_CONTEXT; + +/** + * Tests for {@code GeoWKTShapeParser} + */ +public class GeoWKTShapeParserTests extends BaseGeoParsingTestCase { + + private static XContentBuilder toWKTContent(ShapeBuilder builder, boolean generateMalformed) + throws IOException { + String wkt = builder.toWKT(); + if (generateMalformed) { + // malformed - extra paren + // TODO generate more malformed WKT + wkt += GeoWKTParser.RPAREN; + } + if (randomBoolean()) { + // test comments + wkt = "# " + wkt + "\n" + wkt; + } + return XContentFactory.jsonBuilder().value(wkt); + } + + private void assertExpected(Shape expected, ShapeBuilder builder) throws IOException { + XContentBuilder xContentBuilder = toWKTContent(builder, false); + assertGeometryEquals(expected, xContentBuilder); + } + + private void assertMalformed(Shape expected, ShapeBuilder builder) throws IOException { + XContentBuilder xContentBuilder = toWKTContent(builder, true); + assertValidException(xContentBuilder, ElasticsearchParseException.class); + } + + @Override + public void testParsePoint() throws IOException { + GeoPoint p = RandomShapeGenerator.randomPoint(random()); + Coordinate c = new Coordinate(p.lon(), p.lat()); + Point expected = GEOMETRY_FACTORY.createPoint(c); + assertExpected(new JtsPoint(expected, SPATIAL_CONTEXT), new PointBuilder().coordinate(c)); + assertMalformed(new JtsPoint(expected, SPATIAL_CONTEXT), new PointBuilder().coordinate(c)); + } + + @Override + public void testParseMultiPoint() throws IOException { + int numPoints = randomIntBetween(2, 100); + List coordinates = new ArrayList<>(numPoints); + Shape[] shapes = new Shape[numPoints]; + GeoPoint p = new GeoPoint(); + for (int i = 0; i < numPoints; ++i) { + p.reset(GeoTestUtil.nextLatitude(), GeoTestUtil.nextLongitude()); + coordinates.add(new Coordinate(p.lon(), p.lat())); + shapes[i] = SPATIAL_CONTEXT.makePoint(p.lon(), p.lat()); + } + ShapeCollection expected = shapeCollection(shapes); + assertExpected(expected, new MultiPointBuilder(coordinates)); + assertMalformed(expected, new MultiPointBuilder(coordinates)); + } + + private List randomLineStringCoords() { + int numPoints = randomIntBetween(2, 100); + List coordinates = new ArrayList<>(numPoints); + GeoPoint p; + for (int i = 0; i < numPoints; ++i) { + p = RandomShapeGenerator.randomPointIn(random(), -90d, -90d, 90d, 90d); + coordinates.add(new Coordinate(p.lon(), p.lat())); + } + return coordinates; + } + + @Override + public void testParseLineString() throws IOException { + List coordinates = randomLineStringCoords(); + LineString expected = GEOMETRY_FACTORY.createLineString(coordinates.toArray(new Coordinate[coordinates.size()])); + assertExpected(jtsGeom(expected), new LineStringBuilder(coordinates)); + } + + @Override + public void testParseMultiLineString() throws IOException { + int numLineStrings = randomIntBetween(2, 8); + List lineStrings = new ArrayList<>(numLineStrings); + MultiLineStringBuilder builder = new MultiLineStringBuilder(); + for (int j = 0; j < numLineStrings; ++j) { + List lsc = randomLineStringCoords(); + Coordinate [] coords = lsc.toArray(new Coordinate[lsc.size()]); + lineStrings.add(GEOMETRY_FACTORY.createLineString(coords)); + builder.linestring(new LineStringBuilder(lsc)); + } + MultiLineString expected = GEOMETRY_FACTORY.createMultiLineString( + lineStrings.toArray(new LineString[lineStrings.size()])); + assertExpected(jtsGeom(expected), builder); + assertMalformed(jtsGeom(expected), builder); + } + + @Override + public void testParsePolygon() throws IOException { + PolygonBuilder builder = PolygonBuilder.class.cast( + RandomShapeGenerator.createShape(random(), RandomShapeGenerator.ShapeType.POLYGON)); + Coordinate[] coords = builder.coordinates()[0][0]; + LinearRing shell = GEOMETRY_FACTORY.createLinearRing(coords); + Polygon expected = GEOMETRY_FACTORY.createPolygon(shell, null); + assertExpected(jtsGeom(expected), builder); + assertMalformed(jtsGeom(expected), builder); + } + + @Override + public void testParseMultiPolygon() throws IOException { + int numPolys = randomIntBetween(2, 8); + MultiPolygonBuilder builder = new MultiPolygonBuilder(); + PolygonBuilder pb; + Coordinate[] coordinates; + Polygon[] shapes = new Polygon[numPolys]; + LinearRing shell; + for (int i = 0; i < numPolys; ++i) { + pb = PolygonBuilder.class.cast(RandomShapeGenerator.createShape(random(), RandomShapeGenerator.ShapeType.POLYGON)); + builder.polygon(pb); + coordinates = pb.coordinates()[0][0]; + shell = GEOMETRY_FACTORY.createLinearRing(coordinates); + shapes[i] = GEOMETRY_FACTORY.createPolygon(shell, null); + } + Shape expected = shapeCollection(shapes); + assertExpected(expected, builder); + assertMalformed(expected, builder); + } + + public void testParsePolygonWithHole() throws IOException { + // add 3d point to test ISSUE #10501 + List shellCoordinates = new ArrayList<>(); + shellCoordinates.add(new Coordinate(100, 0, 15.0)); + shellCoordinates.add(new Coordinate(101, 0)); + shellCoordinates.add(new Coordinate(101, 1)); + shellCoordinates.add(new Coordinate(100, 1, 10.0)); + shellCoordinates.add(new Coordinate(100, 0)); + + List holeCoordinates = new ArrayList<>(); + holeCoordinates.add(new Coordinate(100.2, 0.2)); + holeCoordinates.add(new Coordinate(100.8, 0.2)); + holeCoordinates.add(new Coordinate(100.8, 0.8)); + holeCoordinates.add(new Coordinate(100.2, 0.8)); + holeCoordinates.add(new Coordinate(100.2, 0.2)); + + PolygonBuilder polygonWithHole = new PolygonBuilder(new CoordinatesBuilder().coordinates(shellCoordinates)); + polygonWithHole.hole(new LineStringBuilder(holeCoordinates)); + + LinearRing shell = GEOMETRY_FACTORY.createLinearRing( + shellCoordinates.toArray(new Coordinate[shellCoordinates.size()])); + LinearRing[] holes = new LinearRing[1]; + holes[0] = GEOMETRY_FACTORY.createLinearRing( + holeCoordinates.toArray(new Coordinate[holeCoordinates.size()])); + Polygon expected = GEOMETRY_FACTORY.createPolygon(shell, holes); + + assertExpected(jtsGeom(expected), polygonWithHole); + assertMalformed(jtsGeom(expected), polygonWithHole); + } + + public void testParseSelfCrossingPolygon() throws IOException { + // test self crossing ccw poly not crossing dateline + List shellCoordinates = new ArrayList<>(); + shellCoordinates.add(new Coordinate(176, 15)); + shellCoordinates.add(new Coordinate(-177, 10)); + shellCoordinates.add(new Coordinate(-177, -10)); + shellCoordinates.add(new Coordinate(176, -15)); + shellCoordinates.add(new Coordinate(-177, 15)); + shellCoordinates.add(new Coordinate(172, 0)); + shellCoordinates.add(new Coordinate(176, 15)); + + PolygonBuilder poly = new PolygonBuilder(new CoordinatesBuilder().coordinates(shellCoordinates)); + XContentBuilder builder = XContentFactory.jsonBuilder().value(poly.toWKT()); + assertValidException(builder, InvalidShapeException.class); + } + + public void testMalformedWKT() throws IOException { + // malformed points in a polygon is a common typo + String malformedWKT = "POLYGON ((100, 5) (100, 10) (90, 10), (90, 5), (100, 5)"; + XContentBuilder builder = XContentFactory.jsonBuilder().value(malformedWKT); + assertValidException(builder, ElasticsearchParseException.class); + } + + @Override + public void testParseEnvelope() throws IOException { + org.apache.lucene.geo.Rectangle r = GeoTestUtil.nextBox(); + EnvelopeBuilder builder = new EnvelopeBuilder(new Coordinate(r.minLon, r.maxLat), new Coordinate(r.maxLon, r.minLat)); + Rectangle expected = SPATIAL_CONTEXT.makeRectangle(r.minLon, r.maxLon, r.minLat, r.maxLat); + assertExpected(expected, builder); + assertMalformed(expected, builder); + } + + public void testInvalidGeometryType() throws IOException { + XContentBuilder builder = XContentFactory.jsonBuilder().value("UnknownType (-1 -2)"); + assertValidException(builder, IllegalArgumentException.class); + } + + @Override + public void testParseGeometryCollection() throws IOException { + if (rarely()) { + // assert empty shape collection + GeometryCollectionBuilder builder = new GeometryCollectionBuilder(); + Shape[] expected = new Shape[0]; + assertEquals(shapeCollection(expected).isEmpty(), builder.build().isEmpty()); + } else { + GeometryCollectionBuilder gcb = RandomShapeGenerator.createGeometryCollection(random()); + assertExpected(gcb.build(), gcb); + } + } +} diff --git a/core/src/test/java/org/elasticsearch/common/settings/KeyStoreWrapperTests.java b/core/src/test/java/org/elasticsearch/common/settings/KeyStoreWrapperTests.java index 11d1e1f5735..c3b34b7c3ef 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/KeyStoreWrapperTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/KeyStoreWrapperTests.java @@ -97,4 +97,14 @@ public class KeyStoreWrapperTests extends ESTestCase { keystore.decrypt(new char[0]); assertEquals(seed.toString(), keystore.getString(KeyStoreWrapper.SEED_SETTING.getKey()).toString()); } + + public void testIllegalSettingName() throws Exception { + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> KeyStoreWrapper.validateSettingName("UpperCase")); + assertTrue(e.getMessage().contains("does not match the allowed setting name pattern")); + KeyStoreWrapper keystore = KeyStoreWrapper.create(new char[0]); + e = expectThrows(IllegalArgumentException.class, () -> keystore.setString("UpperCase", new char[0])); + assertTrue(e.getMessage().contains("does not match the allowed setting name pattern")); + e = expectThrows(IllegalArgumentException.class, () -> keystore.setFile("UpperCase", new byte[0])); + assertTrue(e.getMessage().contains("does not match the allowed setting name pattern")); + } } diff --git a/core/src/test/java/org/elasticsearch/common/settings/SettingsTests.java b/core/src/test/java/org/elasticsearch/common/settings/SettingsTests.java index 04cd1717e7f..039de112fac 100644 --- a/core/src/test/java/org/elasticsearch/common/settings/SettingsTests.java +++ b/core/src/test/java/org/elasticsearch/common/settings/SettingsTests.java @@ -486,6 +486,15 @@ public class SettingsTests extends ESTestCase { assertTrue(e.getMessage().contains("must be stored inside the Elasticsearch keystore")); } + public void testSecureSettingIllegalName() { + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> + SecureSetting.secureString("UpperCaseSetting", null)); + assertTrue(e.getMessage().contains("does not match the allowed setting name pattern")); + e = expectThrows(IllegalArgumentException.class, () -> + SecureSetting.secureFile("UpperCaseSetting", null)); + assertTrue(e.getMessage().contains("does not match the allowed setting name pattern")); + } + public void testGetAsArrayFailsOnDuplicates() { final IllegalStateException e = expectThrows(IllegalStateException.class, () -> Settings.builder() .put("foobar.0", "bar") diff --git a/core/src/test/java/org/elasticsearch/common/util/concurrent/EsExecutorsTests.java b/core/src/test/java/org/elasticsearch/common/util/concurrent/EsExecutorsTests.java index 142123bb483..cc6152d9896 100644 --- a/core/src/test/java/org/elasticsearch/common/util/concurrent/EsExecutorsTests.java +++ b/core/src/test/java/org/elasticsearch/common/util/concurrent/EsExecutorsTests.java @@ -44,8 +44,13 @@ public class EsExecutorsTests extends ESTestCase { return TimeUnit.values()[between(0, TimeUnit.values().length - 1)]; } + private String getName() { + return getClass().getName() + "/" + getTestName(); + } + public void testFixedForcedExecution() throws Exception { - EsThreadPoolExecutor executor = EsExecutors.newFixed(getTestName(), 1, 1, EsExecutors.daemonThreadFactory("test"), threadContext); + EsThreadPoolExecutor executor = + EsExecutors.newFixed(getName(), 1, 1, EsExecutors.daemonThreadFactory("test"), threadContext); final CountDownLatch wait = new CountDownLatch(1); final CountDownLatch exec1Wait = new CountDownLatch(1); @@ -107,7 +112,8 @@ public class EsExecutorsTests extends ESTestCase { } public void testFixedRejected() throws Exception { - EsThreadPoolExecutor executor = EsExecutors.newFixed(getTestName(), 1, 1, EsExecutors.daemonThreadFactory("test"), threadContext); + EsThreadPoolExecutor executor = + EsExecutors.newFixed(getName(), 1, 1, EsExecutors.daemonThreadFactory("test"), threadContext); final CountDownLatch wait = new CountDownLatch(1); final CountDownLatch exec1Wait = new CountDownLatch(1); @@ -165,7 +171,8 @@ public class EsExecutorsTests extends ESTestCase { final int max = between(min + 1, 6); final ThreadBarrier barrier = new ThreadBarrier(max + 1); - ThreadPoolExecutor pool = EsExecutors.newScaling(getTestName(), min, max, between(1, 100), randomTimeUnit(), EsExecutors.daemonThreadFactory("test"), threadContext); + ThreadPoolExecutor pool = + EsExecutors.newScaling(getClass().getName() + "/" + getTestName(), min, max, between(1, 100), randomTimeUnit(), EsExecutors.daemonThreadFactory("test"), threadContext); assertThat("Min property", pool.getCorePoolSize(), equalTo(min)); assertThat("Max property", pool.getMaximumPoolSize(), equalTo(max)); @@ -201,7 +208,8 @@ public class EsExecutorsTests extends ESTestCase { final int max = between(min + 1, 6); final ThreadBarrier barrier = new ThreadBarrier(max + 1); - final ThreadPoolExecutor pool = EsExecutors.newScaling(getTestName(), min, max, between(1, 100), TimeUnit.MILLISECONDS, EsExecutors.daemonThreadFactory("test"), threadContext); + final ThreadPoolExecutor pool = + EsExecutors.newScaling(getClass().getName() + "/" + getTestName(), min, max, between(1, 100), TimeUnit.MILLISECONDS, EsExecutors.daemonThreadFactory("test"), threadContext); assertThat("Min property", pool.getCorePoolSize(), equalTo(min)); assertThat("Max property", pool.getMaximumPoolSize(), equalTo(max)); @@ -241,7 +249,8 @@ public class EsExecutorsTests extends ESTestCase { int queue = between(0, 100); int actions = queue + pool; final CountDownLatch latch = new CountDownLatch(1); - EsThreadPoolExecutor executor = EsExecutors.newFixed(getTestName(), pool, queue, EsExecutors.daemonThreadFactory("dummy"), threadContext); + EsThreadPoolExecutor executor = + EsExecutors.newFixed(getName(), pool, queue, EsExecutors.daemonThreadFactory("dummy"), threadContext); try { for (int i = 0; i < actions; i++) { executor.execute(new Runnable() { @@ -272,7 +281,7 @@ public class EsExecutorsTests extends ESTestCase { assertFalse("Thread pool registering as terminated when it isn't", e.isExecutorShutdown()); String message = ExceptionsHelper.detailedMessage(e); assertThat(message, containsString("of dummy runnable")); - assertThat(message, containsString("on EsThreadPoolExecutor[testRejectionMessage")); + assertThat(message, containsString("on EsThreadPoolExecutor[name = " + getName())); assertThat(message, containsString("queue capacity = " + queue)); assertThat(message, containsString("[Running")); /* @@ -312,7 +321,7 @@ public class EsExecutorsTests extends ESTestCase { assertTrue("Thread pool not registering as terminated when it is", e.isExecutorShutdown()); String message = ExceptionsHelper.detailedMessage(e); assertThat(message, containsString("of dummy runnable")); - assertThat(message, containsString("on EsThreadPoolExecutor[" + getTestName())); + assertThat(message, containsString("on EsThreadPoolExecutor[name = " + getName())); assertThat(message, containsString("queue capacity = " + queue)); assertThat(message, containsString("[Terminated")); assertThat(message, containsString("active threads = 0")); @@ -330,7 +339,8 @@ public class EsExecutorsTests extends ESTestCase { threadContext.putHeader("foo", "bar"); final Integer one = new Integer(1); threadContext.putTransient("foo", one); - EsThreadPoolExecutor executor = EsExecutors.newFixed(getTestName(), pool, queue, EsExecutors.daemonThreadFactory("dummy"), threadContext); + EsThreadPoolExecutor executor = + EsExecutors.newFixed(getName(), pool, queue, EsExecutors.daemonThreadFactory("dummy"), threadContext); try { executor.execute(() -> { try { @@ -360,7 +370,8 @@ public class EsExecutorsTests extends ESTestCase { int queue = between(0, 100); final CountDownLatch latch = new CountDownLatch(1); final CountDownLatch executed = new CountDownLatch(1); - EsThreadPoolExecutor executor = EsExecutors.newFixed(getTestName(), pool, queue, EsExecutors.daemonThreadFactory("dummy"), threadContext); + EsThreadPoolExecutor executor = + EsExecutors.newFixed(getName(), pool, queue, EsExecutors.daemonThreadFactory("dummy"), threadContext); try { Runnable r = () -> { latch.countDown(); @@ -379,6 +390,6 @@ public class EsExecutorsTests extends ESTestCase { latch.countDown(); terminate(executor); } - } + } diff --git a/core/src/test/java/org/elasticsearch/common/util/concurrent/EsThreadPoolExecutorTests.java b/core/src/test/java/org/elasticsearch/common/util/concurrent/EsThreadPoolExecutorTests.java new file mode 100644 index 00000000000..9b9aa50bd16 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/common/util/concurrent/EsThreadPoolExecutorTests.java @@ -0,0 +1,87 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.util.concurrent; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.threadpool.ThreadPool; + +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.atomic.AtomicBoolean; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.hasToString; + +public class EsThreadPoolExecutorTests extends ESSingleNodeTestCase { + + @Override + protected Settings nodeSettings() { + return Settings.builder() + .put("node.name", "es-thread-pool-executor-tests") + .put("thread_pool.bulk.size", 1) + .put("thread_pool.bulk.queue_size", 0) + .put("thread_pool.search.size", 1) + .put("thread_pool.search.queue_size", 1) + .build(); + } + + public void testRejectedExecutionExceptionContainsNodeName() { + // we test a fixed and an auto-queue executor but not scaling since it does not reject + runThreadPoolExecutorTest(1, ThreadPool.Names.BULK); + runThreadPoolExecutorTest(2, ThreadPool.Names.SEARCH); + + } + + private void runThreadPoolExecutorTest(final int fill, final String executor) { + final CountDownLatch latch = new CountDownLatch(fill); + for (int i = 0; i < fill; i++) { + node().injector().getInstance(ThreadPool.class).executor(executor).execute(() -> { + try { + latch.await(); + } catch (final InterruptedException e) { + throw new RuntimeException(e); + } + }); + } + + final AtomicBoolean rejected = new AtomicBoolean(); + node().injector().getInstance(ThreadPool.class).executor(executor).execute(new AbstractRunnable() { + @Override + public void onFailure(Exception e) { + + } + + @Override + public void onRejection(Exception e) { + rejected.set(true); + assertThat(e, hasToString(containsString("name = es-thread-pool-executor-tests/" + executor + ", "))); + } + + @Override + protected void doRun() throws Exception { + + } + }); + + latch.countDown(); + assertTrue(rejected.get()); + } + +} diff --git a/core/src/test/java/org/elasticsearch/common/util/concurrent/PrioritizedExecutorsTests.java b/core/src/test/java/org/elasticsearch/common/util/concurrent/PrioritizedExecutorsTests.java index 17b43a079dc..1eacb4cb18c 100644 --- a/core/src/test/java/org/elasticsearch/common/util/concurrent/PrioritizedExecutorsTests.java +++ b/core/src/test/java/org/elasticsearch/common/util/concurrent/PrioritizedExecutorsTests.java @@ -45,6 +45,10 @@ public class PrioritizedExecutorsTests extends ESTestCase { private final ThreadContext holder = new ThreadContext(Settings.EMPTY); + private String getName() { + return getClass().getName() + "/" + getTestName(); + } + public void testPriorityQueue() throws Exception { PriorityBlockingQueue queue = new PriorityBlockingQueue<>(); List priorities = Arrays.asList(Priority.values()); @@ -65,7 +69,8 @@ public class PrioritizedExecutorsTests extends ESTestCase { } public void testSubmitPrioritizedExecutorWithRunnables() throws Exception { - ExecutorService executor = EsExecutors.newSinglePrioritizing(getTestName(), EsExecutors.daemonThreadFactory(getTestName()), holder, null); + ExecutorService executor = + EsExecutors.newSinglePrioritizing(getName(), EsExecutors.daemonThreadFactory(getTestName()), holder, null); List results = new ArrayList<>(8); CountDownLatch awaitingLatch = new CountDownLatch(1); CountDownLatch finishedLatch = new CountDownLatch(8); @@ -94,7 +99,8 @@ public class PrioritizedExecutorsTests extends ESTestCase { } public void testExecutePrioritizedExecutorWithRunnables() throws Exception { - ExecutorService executor = EsExecutors.newSinglePrioritizing(getTestName(), EsExecutors.daemonThreadFactory(getTestName()), holder, null); + ExecutorService executor = + EsExecutors.newSinglePrioritizing(getName(), EsExecutors.daemonThreadFactory(getTestName()), holder, null); List results = new ArrayList<>(8); CountDownLatch awaitingLatch = new CountDownLatch(1); CountDownLatch finishedLatch = new CountDownLatch(8); @@ -123,7 +129,8 @@ public class PrioritizedExecutorsTests extends ESTestCase { } public void testSubmitPrioritizedExecutorWithCallables() throws Exception { - ExecutorService executor = EsExecutors.newSinglePrioritizing(getTestName(), EsExecutors.daemonThreadFactory(getTestName()), holder, null); + ExecutorService executor = + EsExecutors.newSinglePrioritizing(getName(), EsExecutors.daemonThreadFactory(getTestName()), holder, null); List results = new ArrayList<>(8); CountDownLatch awaitingLatch = new CountDownLatch(1); CountDownLatch finishedLatch = new CountDownLatch(8); @@ -182,7 +189,8 @@ public class PrioritizedExecutorsTests extends ESTestCase { public void testTimeout() throws Exception { ScheduledExecutorService timer = Executors.newSingleThreadScheduledExecutor(EsExecutors.daemonThreadFactory(getTestName())); - PrioritizedEsThreadPoolExecutor executor = EsExecutors.newSinglePrioritizing(getTestName(), EsExecutors.daemonThreadFactory(getTestName()), holder, timer); + PrioritizedEsThreadPoolExecutor executor = + EsExecutors.newSinglePrioritizing(getName(), EsExecutors.daemonThreadFactory(getTestName()), holder, timer); final CountDownLatch invoked = new CountDownLatch(1); final CountDownLatch block = new CountDownLatch(1); executor.execute(new Runnable() { @@ -245,7 +253,8 @@ public class PrioritizedExecutorsTests extends ESTestCase { ThreadPool threadPool = new TestThreadPool("test"); final ScheduledThreadPoolExecutor timer = (ScheduledThreadPoolExecutor) threadPool.scheduler(); final AtomicBoolean timeoutCalled = new AtomicBoolean(); - PrioritizedEsThreadPoolExecutor executor = EsExecutors.newSinglePrioritizing(getTestName(), EsExecutors.daemonThreadFactory(getTestName()), holder, timer); + PrioritizedEsThreadPoolExecutor executor = + EsExecutors.newSinglePrioritizing(getName(), EsExecutors.daemonThreadFactory(getTestName()), holder, timer); final CountDownLatch invoked = new CountDownLatch(1); executor.execute(new Runnable() { @Override diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java b/core/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java index 853294de186..d40d558d20b 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java @@ -112,7 +112,8 @@ public class UnicastZenPingTests extends ESTestCase { threadPool = new TestThreadPool(getClass().getName()); final ThreadFactory threadFactory = EsExecutors.daemonThreadFactory("[" + getClass().getName() + "]"); executorService = - EsExecutors.newScaling(getClass().getName(), 0, 2, 60, TimeUnit.SECONDS, threadFactory, threadPool.getThreadContext()); + EsExecutors.newScaling( + getClass().getName() + "/" + getTestName(), 0, 2, 60, TimeUnit.SECONDS, threadFactory, threadPool.getThreadContext()); closeables = new Stack<>(); } diff --git a/core/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java b/core/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java index e7daa9a791d..cef3502a077 100644 --- a/core/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java +++ b/core/src/test/java/org/elasticsearch/gateway/GatewayMetaStateTests.java @@ -308,7 +308,8 @@ public class GatewayMetaStateTests extends ESAllocationTestCase { Collections.emptyList(), Collections.singletonList( templates -> { - templates.put("added_test_template", IndexTemplateMetaData.builder("added_test_template").build()); + templates.put("added_test_template", IndexTemplateMetaData.builder("added_test_template") + .patterns(Arrays.asList(generateRandomStringArray(10, 100, false, false))).build()); return templates; } )); @@ -438,14 +439,17 @@ public class GatewayMetaStateTests extends ESAllocationTestCase { Collections.emptyList(), Arrays.asList( indexTemplateMetaDatas -> { - indexTemplateMetaDatas.put("template1", IndexTemplateMetaData.builder("template1").settings( - Settings.builder().put(IndexMetaData.INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), 20).build()).build()); + indexTemplateMetaDatas.put("template1", IndexTemplateMetaData.builder("template1") + .patterns(Arrays.asList(generateRandomStringArray(10, 100, false, false))) + .settings(Settings.builder().put(IndexMetaData.INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), 20).build()) + .build()); return indexTemplateMetaDatas; }, indexTemplateMetaDatas -> { - indexTemplateMetaDatas.put("template2", IndexTemplateMetaData.builder("template2").settings( - Settings.builder().put(IndexMetaData.INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), 10).build()).build()); + indexTemplateMetaDatas.put("template2", IndexTemplateMetaData.builder("template2") + .patterns(Arrays.asList(generateRandomStringArray(10, 100, false, false))) + .settings(Settings.builder().put(IndexMetaData.INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), 10).build()).build()); return indexTemplateMetaDatas; } @@ -535,6 +539,7 @@ public class GatewayMetaStateTests extends ESAllocationTestCase { .settings(settings(Version.CURRENT) .put(IndexMetaData.INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), randomIntBetween(0, 3)) .put(IndexMetaData.INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), randomIntBetween(1, 5))) + .patterns(Arrays.asList(generateRandomStringArray(10, 100, false, false))) .build(); builder.put(templateMetaData); } diff --git a/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java b/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java index 50be2516f4e..f82f2c39f44 100644 --- a/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java +++ b/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java @@ -39,6 +39,7 @@ import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.indices.mapper.MapperRegistry; +import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; @@ -92,7 +93,7 @@ public class CodecTests extends ESTestCase { IndexSettings settings = IndexSettingsModule.newIndexSettings("_na", nodeSettings); SimilarityService similarityService = new SimilarityService(settings, null, Collections.emptyMap()); IndexAnalyzers indexAnalyzers = createTestAnalysis(settings, nodeSettings).indexAnalyzers; - MapperRegistry mapperRegistry = new MapperRegistry(Collections.emptyMap(), Collections.emptyMap()); + MapperRegistry mapperRegistry = new MapperRegistry(Collections.emptyMap(), Collections.emptyMap(), MapperPlugin.NOOP_FIELD_FILTER); MapperService service = new MapperService(settings, indexAnalyzers, xContentRegistry(), similarityService, mapperRegistry, () -> null); return new CodecService(service, ESLoggerFactory.getLogger("test")); diff --git a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index 026a01a23c3..1b700b80086 100644 --- a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -105,7 +105,6 @@ import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.RootObjectMapper; import org.elasticsearch.index.mapper.SeqNoFieldMapper; import org.elasticsearch.index.mapper.SourceFieldMapper; -import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.seqno.SeqNoStats; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.seqno.SequenceNumbersService; @@ -2547,8 +2546,8 @@ public class InternalEngineTests extends EngineTestCase { threadPool, config.getIndexSettings(), null, store, newMergePolicy(), config.getAnalyzer(), config.getSimilarity(), new CodecService(null, logger), config.getEventListener(), IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), false, translogConfig, TimeValue.timeValueMinutes(5), - config.getRefreshListeners(), null, config.getTranslogRecoveryRunner(), new NoneCircuitBreakerService()); - + config.getExternalRefreshListener(), config.getInternalRefreshListener(), null, config.getTranslogRecoveryRunner(), + new NoneCircuitBreakerService()); try { InternalEngine internalEngine = new InternalEngine(brokenConfig); fail("translog belongs to a different engine"); @@ -2601,7 +2600,8 @@ public class InternalEngineTests extends EngineTestCase { threadPool, indexSettings, null, store, newMergePolicy(), config.getAnalyzer(), config.getSimilarity(), new CodecService(null, logger), config.getEventListener(), IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), false, config.getTranslogConfig(), TimeValue.timeValueMinutes(5), - config.getRefreshListeners(), null, config.getTranslogRecoveryRunner(), new NoneCircuitBreakerService()); + config.getExternalRefreshListener(), config.getInternalRefreshListener(), null, config.getTranslogRecoveryRunner(), + new NoneCircuitBreakerService()); engine = new InternalEngine(newConfig); if (newConfig.getOpenMode() == EngineConfig.OpenMode.OPEN_INDEX_AND_TRANSLOG) { engine.recoverFromTranslog(); @@ -2631,7 +2631,8 @@ public class InternalEngineTests extends EngineTestCase { threadPool, config.getIndexSettings(), null, store, newMergePolicy(), config.getAnalyzer(), config.getSimilarity(), new CodecService(null, logger), config.getEventListener(), IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), true, config.getTranslogConfig(), TimeValue.timeValueMinutes(5), - config.getRefreshListeners(), null, config.getTranslogRecoveryRunner(), new NoneCircuitBreakerService()); + config.getExternalRefreshListener(), config.getInternalRefreshListener(), null, config.getTranslogRecoveryRunner(), + new NoneCircuitBreakerService()); engine = new InternalEngine(newConfig); if (newConfig.getOpenMode() == EngineConfig.OpenMode.OPEN_INDEX_AND_TRANSLOG) { engine.recoverFromTranslog(); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/ExternalFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/ExternalFieldMapperTests.java index c878a4767b9..72d6e8c4c2c 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/ExternalFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/ExternalFieldMapperTests.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.indices.mapper.MapperRegistry; +import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.InternalSettingsPlugin; @@ -60,7 +61,8 @@ public class ExternalFieldMapperTests extends ESSingleNodeTestCase { IndexService indexService = createIndex("test", settings); MapperRegistry mapperRegistry = new MapperRegistry( Collections.singletonMap(ExternalMapperPlugin.EXTERNAL, new ExternalMapper.TypeParser(ExternalMapperPlugin.EXTERNAL, "foo")), - Collections.singletonMap(ExternalMetadataMapper.CONTENT_TYPE, new ExternalMetadataMapper.TypeParser())); + Collections.singletonMap(ExternalMetadataMapper.CONTENT_TYPE, new ExternalMetadataMapper.TypeParser()), + MapperPlugin.NOOP_FIELD_FILTER); Supplier queryShardContext = () -> { return indexService.newQueryShardContext(0, null, () -> { throw new UnsupportedOperationException(); }, null); @@ -111,7 +113,7 @@ public class ExternalFieldMapperTests extends ESSingleNodeTestCase { mapperParsers.put(ExternalMapperPlugin.EXTERNAL, new ExternalMapper.TypeParser(ExternalMapperPlugin.EXTERNAL, "foo")); mapperParsers.put(TextFieldMapper.CONTENT_TYPE, new TextFieldMapper.TypeParser()); mapperParsers.put(KeywordFieldMapper.CONTENT_TYPE, new KeywordFieldMapper.TypeParser()); - MapperRegistry mapperRegistry = new MapperRegistry(mapperParsers, Collections.emptyMap()); + MapperRegistry mapperRegistry = new MapperRegistry(mapperParsers, Collections.emptyMap(), MapperPlugin.NOOP_FIELD_FILTER); Supplier queryShardContext = () -> { return indexService.newQueryShardContext(0, null, () -> { throw new UnsupportedOperationException(); }, null); @@ -177,7 +179,7 @@ public class ExternalFieldMapperTests extends ESSingleNodeTestCase { mapperParsers.put(ExternalMapperPlugin.EXTERNAL, new ExternalMapper.TypeParser(ExternalMapperPlugin.EXTERNAL, "foo")); mapperParsers.put(ExternalMapperPlugin.EXTERNAL_BIS, new ExternalMapper.TypeParser(ExternalMapperPlugin.EXTERNAL, "bar")); mapperParsers.put(TextFieldMapper.CONTENT_TYPE, new TextFieldMapper.TypeParser()); - MapperRegistry mapperRegistry = new MapperRegistry(mapperParsers, Collections.emptyMap()); + MapperRegistry mapperRegistry = new MapperRegistry(mapperParsers, Collections.emptyMap(), MapperPlugin.NOOP_FIELD_FILTER); Supplier queryShardContext = () -> { return indexService.newQueryShardContext(0, null, () -> { throw new UnsupportedOperationException(); }, null); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/FieldFilterMapperPluginTests.java b/core/src/test/java/org/elasticsearch/index/mapper/FieldFilterMapperPluginTests.java new file mode 100644 index 00000000000..86587be951f --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/mapper/FieldFilterMapperPluginTests.java @@ -0,0 +1,326 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.mapper; + +import org.elasticsearch.action.admin.indices.get.GetIndexRequest; +import org.elasticsearch.action.admin.indices.get.GetIndexResponse; +import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse; +import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; +import org.elasticsearch.action.fieldcaps.FieldCapabilities; +import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest; +import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse; +import org.elasticsearch.cluster.metadata.MappingMetaData; +import org.elasticsearch.common.collect.ImmutableOpenMap; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.indices.IndicesModule; +import org.elasticsearch.plugins.MapperPlugin; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.junit.Before; + +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.Set; +import java.util.function.Function; +import java.util.function.Predicate; + +import static org.elasticsearch.cluster.metadata.MetaDataTests.assertLeafs; +import static org.elasticsearch.cluster.metadata.MetaDataTests.assertMultiField; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; + +public class FieldFilterMapperPluginTests extends ESSingleNodeTestCase { + + @Override + protected Collection> getPlugins() { + return Collections.singleton(FieldFilterPlugin.class); + } + + @Before + public void putMappings() { + assertAcked(client().admin().indices().prepareCreate("index1")); + assertAcked(client().admin().indices().prepareCreate("filtered")); + assertAcked(client().admin().indices().preparePutMapping("index1", "filtered") + .setType("doc").setSource(TEST_ITEM, XContentType.JSON)); + } + + public void testGetMappings() { + GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings().get(); + assertExpectedMappings(getMappingsResponse.mappings()); + } + + public void testGetIndex() { + GetIndexResponse getIndexResponse = client().admin().indices().prepareGetIndex() + .setFeatures(GetIndexRequest.Feature.MAPPINGS).get(); + assertExpectedMappings(getIndexResponse.mappings()); + } + + public void testGetFieldMappings() { + GetFieldMappingsResponse getFieldMappingsResponse = client().admin().indices().prepareGetFieldMappings().setFields("*").get(); + Map>> mappings = getFieldMappingsResponse.mappings(); + assertEquals(2, mappings.size()); + assertFieldMappings(mappings.get("index1"), ALL_FLAT_FIELDS); + assertFieldMappings(mappings.get("filtered"), FILTERED_FLAT_FIELDS); + //double check that submitting the filtered mappings to an unfiltered index leads to the same get field mappings output + //as the one coming from a filtered index with same mappings + GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings("filtered").get(); + ImmutableOpenMap filtered = getMappingsResponse.getMappings().get("filtered"); + assertAcked(client().admin().indices().prepareCreate("test").addMapping("doc", filtered.get("doc").getSourceAsMap())); + GetFieldMappingsResponse response = client().admin().indices().prepareGetFieldMappings("test").setFields("*").get(); + assertEquals(1, response.mappings().size()); + assertFieldMappings(response.mappings().get("test"), FILTERED_FLAT_FIELDS); + } + + public void testFieldCapabilities() { + FieldCapabilitiesResponse index1 = client().fieldCaps(new FieldCapabilitiesRequest().fields("*").indices("index1")).actionGet(); + assertFieldCaps(index1, ALL_FLAT_FIELDS); + FieldCapabilitiesResponse filtered = client().fieldCaps(new FieldCapabilitiesRequest().fields("*").indices("filtered")).actionGet(); + assertFieldCaps(filtered, FILTERED_FLAT_FIELDS); + //double check that submitting the filtered mappings to an unfiltered index leads to the same field_caps output + //as the one coming from a filtered index with same mappings + GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings("filtered").get(); + ImmutableOpenMap filteredMapping = getMappingsResponse.getMappings().get("filtered"); + assertAcked(client().admin().indices().prepareCreate("test").addMapping("doc", filteredMapping.get("doc").getSourceAsMap())); + FieldCapabilitiesResponse test = client().fieldCaps(new FieldCapabilitiesRequest().fields("*").indices("test")).actionGet(); + assertFieldCaps(test, FILTERED_FLAT_FIELDS); + } + + private static void assertFieldCaps(FieldCapabilitiesResponse fieldCapabilitiesResponse, String[] expectedFields) { + Map> responseMap = fieldCapabilitiesResponse.get(); + Set builtInMetaDataFields = IndicesModule.getBuiltInMetaDataFields(); + for (String field : builtInMetaDataFields) { + Map remove = responseMap.remove(field); + assertNotNull(" expected field [" + field + "] not found", remove); + } + for (String field : expectedFields) { + Map remove = responseMap.remove(field); + assertNotNull(" expected field [" + field + "] not found", remove); + } + assertEquals("Some unexpected fields were returned: " + responseMap.keySet(), 0, responseMap.size()); + } + + private static void assertFieldMappings(Map> mappings, + String[] expectedFields) { + assertEquals(1, mappings.size()); + Map fields = new HashMap<>(mappings.get("doc")); + Set builtInMetaDataFields = IndicesModule.getBuiltInMetaDataFields(); + for (String field : builtInMetaDataFields) { + GetFieldMappingsResponse.FieldMappingMetaData fieldMappingMetaData = fields.remove(field); + assertNotNull(" expected field [" + field + "] not found", fieldMappingMetaData); + } + for (String field : expectedFields) { + GetFieldMappingsResponse.FieldMappingMetaData fieldMappingMetaData = fields.remove(field); + assertNotNull("expected field [" + field + "] not found", fieldMappingMetaData); + } + assertEquals("Some unexpected fields were returned: " + fields.keySet(), 0, fields.size()); + } + + private void assertExpectedMappings(ImmutableOpenMap> mappings) { + assertEquals(2, mappings.size()); + assertNotFiltered(mappings.get("index1")); + ImmutableOpenMap filtered = mappings.get("filtered"); + assertFiltered(filtered); + assertMappingsAreValid(filtered.get("doc").getSourceAsMap()); + } + + private void assertMappingsAreValid(Map sourceAsMap) { + //check that the returned filtered mappings are still valid mappings by submitting them and retrieving them back + assertAcked(client().admin().indices().prepareCreate("test").addMapping("doc", sourceAsMap)); + GetMappingsResponse testMappingsResponse = client().admin().indices().prepareGetMappings("test").get(); + assertEquals(1, testMappingsResponse.getMappings().size()); + //the mappings are returned unfiltered for this index, yet they are the same as the previous ones that were returned filtered + assertFiltered(testMappingsResponse.getMappings().get("test")); + } + + @SuppressWarnings("unchecked") + private static void assertFiltered(ImmutableOpenMap mappings) { + assertEquals(1, mappings.size()); + MappingMetaData mappingMetaData = mappings.get("doc"); + assertNotNull(mappingMetaData); + Map sourceAsMap = mappingMetaData.getSourceAsMap(); + assertEquals(4, sourceAsMap.size()); + assertTrue(sourceAsMap.containsKey("_meta")); + assertTrue(sourceAsMap.containsKey("_routing")); + assertTrue(sourceAsMap.containsKey("_source")); + Map typeProperties = (Map)sourceAsMap.get("properties"); + assertEquals(4, typeProperties.size()); + + Map name = (Map)typeProperties.get("name"); + assertEquals(1, name.size()); + Map nameProperties = (Map)name.get("properties"); + assertEquals(1, nameProperties.size()); + assertLeafs(nameProperties, "last_visible"); + + assertLeafs(typeProperties, "age_visible"); + + Map address = (Map) typeProperties.get("address"); + assertNotNull(address); + assertEquals(1, address.size()); + Map addressProperties = (Map) address.get("properties"); + assertNotNull(addressProperties); + assertEquals(1, addressProperties.size()); + assertLeafs(addressProperties, "area_visible"); + + Map properties = (Map) typeProperties.get("properties"); + assertNotNull(properties); + assertEquals(2, properties.size()); + assertEquals("nested", properties.get("type")); + Map propertiesProperties = (Map) properties.get("properties"); + assertNotNull(propertiesProperties); + assertEquals(2, propertiesProperties.size()); + assertLeafs(propertiesProperties, "key_visible"); + + Map value = (Map) propertiesProperties.get("value"); + assertNotNull(value); + assertEquals(1, value.size()); + Map valueProperties = (Map) value.get("properties"); + assertNotNull(valueProperties); + assertEquals(1, valueProperties.size()); + assertLeafs(valueProperties, "keyword_visible"); + } + + @SuppressWarnings("unchecked") + private static void assertNotFiltered(ImmutableOpenMap mappings) { + assertEquals(1, mappings.size()); + MappingMetaData mappingMetaData = mappings.get("doc"); + assertNotNull(mappingMetaData); + Map sourceAsMap = mappingMetaData.getSourceAsMap(); + assertEquals(4, sourceAsMap.size()); + assertTrue(sourceAsMap.containsKey("_meta")); + assertTrue(sourceAsMap.containsKey("_routing")); + assertTrue(sourceAsMap.containsKey("_source")); + Map typeProperties = (Map)sourceAsMap.get("properties"); + assertEquals(5, typeProperties.size()); + + Map name = (Map)typeProperties.get("name"); + assertEquals(1, name.size()); + Map nameProperties = (Map)name.get("properties"); + assertEquals(2, nameProperties.size()); + assertLeafs(nameProperties, "first", "last_visible"); + + assertLeafs(typeProperties, "birth", "age_visible"); + + Map address = (Map) typeProperties.get("address"); + assertNotNull(address); + assertEquals(1, address.size()); + Map addressProperties = (Map) address.get("properties"); + assertNotNull(addressProperties); + assertEquals(3, addressProperties.size()); + assertLeafs(addressProperties, "street", "location", "area_visible"); + + Map properties = (Map) typeProperties.get("properties"); + assertNotNull(properties); + assertEquals(2, properties.size()); + assertTrue(properties.containsKey("type")); + Map propertiesProperties = (Map) properties.get("properties"); + assertNotNull(propertiesProperties); + assertEquals(2, propertiesProperties.size()); + assertMultiField(propertiesProperties, "key_visible", "keyword"); + assertMultiField(propertiesProperties, "value", "keyword_visible"); + } + + public static class FieldFilterPlugin extends Plugin implements MapperPlugin { + + @Override + public Function> getFieldFilter() { + return index -> index.equals("filtered") ? field -> field.endsWith("visible") : MapperPlugin.NOOP_FIELD_PREDICATE; + } + } + + private static final String[] ALL_FLAT_FIELDS = new String[]{ + "name.first", "name.last_visible", "birth", "age_visible", "address.street", "address.location", "address.area_visible", + "properties.key_visible", "properties.key_visible.keyword", "properties.value", "properties.value.keyword_visible" + }; + + private static final String[] FILTERED_FLAT_FIELDS = new String[]{ + "name.last_visible", "age_visible", "address.area_visible", "properties.key_visible", "properties.value.keyword_visible" + }; + + private static final String TEST_ITEM = "{\n" + + " \"doc\": {\n" + + " \"_meta\": {\n" + + " \"version\":0.19\n" + + " }," + + " \"_routing\": {\n" + + " \"required\":true\n" + + " }," + + " \"_source\": {\n" + + " \"enabled\":false\n" + + " }," + + " \"properties\": {\n" + + " \"name\": {\n" + + " \"properties\": {\n" + + " \"first\": {\n" + + " \"type\": \"keyword\"\n" + + " },\n" + + " \"last_visible\": {\n" + + " \"type\": \"keyword\"\n" + + " }\n" + + " }\n" + + " },\n" + + " \"birth\": {\n" + + " \"type\": \"date\"\n" + + " },\n" + + " \"age_visible\": {\n" + + " \"type\": \"integer\"\n" + + " },\n" + + " \"address\": {\n" + + " \"type\": \"object\",\n" + + " \"properties\": {\n" + + " \"street\": {\n" + + " \"type\": \"keyword\"\n" + + " },\n" + + " \"location\": {\n" + + " \"type\": \"geo_point\"\n" + + " },\n" + + " \"area_visible\": {\n" + + " \"type\": \"geo_shape\", \n" + + " \"tree\": \"quadtree\",\n" + + " \"precision\": \"1m\"\n" + + " }\n" + + " }\n" + + " },\n" + + " \"properties\": {\n" + + " \"type\": \"nested\",\n" + + " \"properties\": {\n" + + " \"key_visible\" : {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword\" : {\n" + + " \"type\" : \"keyword\"\n" + + " }\n" + + " }\n" + + " },\n" + + " \"value\" : {\n" + + " \"type\": \"text\",\n" + + " \"fields\": {\n" + + " \"keyword_visible\" : {\n" + + " \"type\" : \"keyword\"\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + "}"; +} diff --git a/core/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java b/core/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java index 844d6b0aaf9..f4e646030f2 100644 --- a/core/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java +++ b/core/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java @@ -58,10 +58,12 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import static org.hamcrest.Matchers.anyOf; +import static org.hamcrest.Matchers.both; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.lessThan; +import static org.hamcrest.Matchers.lessThanOrEqualTo; import static org.hamcrest.Matchers.not; public class RecoveryDuringReplicationTests extends ESIndexLevelReplicationTestCase { @@ -374,15 +376,15 @@ public class RecoveryDuringReplicationTests extends ESIndexLevelReplicationTestC IndexShard newReplica = shards.addReplicaWithExistingPath(replica.shardPath(), replica.routingEntry().currentNodeId()); CountDownLatch recoveryStart = new CountDownLatch(1); - AtomicBoolean preparedForTranslog = new AtomicBoolean(false); + AtomicBoolean opsSent = new AtomicBoolean(false); final Future recoveryFuture = shards.asyncRecoverReplica(newReplica, (indexShard, node) -> { recoveryStart.countDown(); return new RecoveryTarget(indexShard, node, recoveryListener, l -> { }) { @Override - public void prepareForTranslogOperations(int totalTranslogOps) throws IOException { - preparedForTranslog.set(true); - super.prepareForTranslogOperations(totalTranslogOps); + public long indexTranslogOperations(List operations, int totalTranslogOps) throws IOException { + opsSent.set(true); + return super.indexTranslogOperations(operations, totalTranslogOps); } }; }); @@ -390,9 +392,10 @@ public class RecoveryDuringReplicationTests extends ESIndexLevelReplicationTestC recoveryStart.await(); // index some more - docs += shards.indexDocs(randomInt(5)); + final int indexedDuringRecovery = shards.indexDocs(randomInt(5)); + docs += indexedDuringRecovery; - assertFalse("recovery should wait on pending docs", preparedForTranslog.get()); + assertFalse("recovery should wait on pending docs", opsSent.get()); primaryEngineFactory.releaseLatchedIndexers(); pendingDocsDone.await(); @@ -401,7 +404,9 @@ public class RecoveryDuringReplicationTests extends ESIndexLevelReplicationTestC recoveryFuture.get(); assertThat(newReplica.recoveryState().getIndex().fileDetails(), empty()); - assertThat(newReplica.recoveryState().getTranslog().recoveredOperations(), equalTo(docs)); + assertThat(newReplica.recoveryState().getTranslog().recoveredOperations(), + // we don't know which of the inflight operations made it into the translog range we re-play + both(greaterThanOrEqualTo(docs-indexedDuringRecovery)).and(lessThanOrEqualTo(docs))); shards.assertAllEqual(docs); } finally { diff --git a/core/src/test/java/org/elasticsearch/index/search/geo/GeoPointParsingTests.java b/core/src/test/java/org/elasticsearch/index/search/geo/GeoPointParsingTests.java index 6cf3c7efeaf..15fdbe828b0 100644 --- a/core/src/test/java/org/elasticsearch/index/search/geo/GeoPointParsingTests.java +++ b/core/src/test/java/org/elasticsearch/index/search/geo/GeoPointParsingTests.java @@ -29,8 +29,10 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.geo.RandomGeoGenerator; import java.io.IOException; +import java.util.function.DoubleSupplier; import static org.elasticsearch.common.geo.GeoHashUtils.stringEncode; +import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode; import static org.hamcrest.Matchers.is; public class GeoPointParsingTests extends ESTestCase { @@ -57,32 +59,14 @@ public class GeoPointParsingTests extends ESTestCase { } public void testEqualsHashCodeContract() { - // generate a random geopoint - final GeoPoint x = RandomGeoGenerator.randomPoint(random()); - final GeoPoint y = new GeoPoint(x.lat(), x.lon()); - final GeoPoint z = new GeoPoint(y.lat(), y.lon()); - // GeoPoint doesn't care about coordinate system bounds, this simply validates inequality - final GeoPoint a = new GeoPoint(x.lat() + randomIntBetween(1, 5), x.lon() + randomIntBetween(1, 5)); - - /** equality test */ - // reflexive - assertTrue(x.equals(x)); - // symmetry - assertTrue(x.equals(y)); - // transitivity - assertTrue(y.equals(z)); - assertTrue(x.equals(z)); - // inequality - assertFalse(x.equals(a)); - - /** hashCode test */ - // symmetry - assertTrue(x.hashCode() == y.hashCode()); - // transitivity - assertTrue(y.hashCode() == z.hashCode()); - assertTrue(x.hashCode() == z.hashCode()); - // inequality - assertFalse(x.hashCode() == a.hashCode()); + // GeoPoint doesn't care about coordinate system bounds, this simply validates equality and hashCode. + final DoubleSupplier randomDelta = () -> randomValueOtherThan(0.0, () -> randomDoubleBetween(-1000000, 1000000, true)); + checkEqualsAndHashCode(RandomGeoGenerator.randomPoint(random()), GeoPoint::new, + pt -> new GeoPoint(pt.lat() + randomDelta.getAsDouble(), pt.lon())); + checkEqualsAndHashCode(RandomGeoGenerator.randomPoint(random()), GeoPoint::new, + pt -> new GeoPoint(pt.lat(), pt.lon() + randomDelta.getAsDouble())); + checkEqualsAndHashCode(RandomGeoGenerator.randomPoint(random()), GeoPoint::new, + pt -> new GeoPoint(pt.lat() + randomDelta.getAsDouble(), pt.lon() + randomDelta.getAsDouble())); } public void testGeoPointParsing() throws IOException { diff --git a/core/src/test/java/org/elasticsearch/index/seqno/GlobalCheckpointSyncActionTests.java b/core/src/test/java/org/elasticsearch/index/seqno/GlobalCheckpointSyncActionTests.java index a03b506cba5..71faecfcea5 100644 --- a/core/src/test/java/org/elasticsearch/index/seqno/GlobalCheckpointSyncActionTests.java +++ b/core/src/test/java/org/elasticsearch/index/seqno/GlobalCheckpointSyncActionTests.java @@ -38,6 +38,7 @@ import org.elasticsearch.transport.TransportService; import java.util.Collections; +import static org.elasticsearch.mock.orig.Mockito.never; import static org.elasticsearch.mock.orig.Mockito.when; import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; import static org.mockito.Mockito.mock; @@ -86,9 +87,25 @@ public class GlobalCheckpointSyncActionTests extends ESTestCase { final ShardId shardId = new ShardId(index, id); when(indexShard.shardId()).thenReturn(shardId); + final Translog.Durability durability = randomFrom(Translog.Durability.ASYNC, Translog.Durability.REQUEST); + when(indexShard.getTranslogDurability()).thenReturn(durability); + final Translog translog = mock(Translog.class); when(indexShard.getTranslog()).thenReturn(translog); + final long globalCheckpoint = randomIntBetween(Math.toIntExact(SequenceNumbers.NO_OPS_PERFORMED), Integer.MAX_VALUE); + final long lastSyncedGlobalCheckpoint; + if (randomBoolean() && globalCheckpoint != SequenceNumbers.NO_OPS_PERFORMED) { + lastSyncedGlobalCheckpoint = + randomIntBetween(Math.toIntExact(SequenceNumbers.NO_OPS_PERFORMED), Math.toIntExact(globalCheckpoint) - 1); + assert lastSyncedGlobalCheckpoint < globalCheckpoint; + } else { + lastSyncedGlobalCheckpoint = globalCheckpoint; + } + + when(indexShard.getGlobalCheckpoint()).thenReturn(globalCheckpoint); + when(translog.getLastSyncedGlobalCheckpoint()).thenReturn(lastSyncedGlobalCheckpoint); + final GlobalCheckpointSyncAction action = new GlobalCheckpointSyncAction( Settings.EMPTY, transportService, @@ -105,7 +122,11 @@ public class GlobalCheckpointSyncActionTests extends ESTestCase { action.shardOperationOnReplica(new GlobalCheckpointSyncAction.Request(indexShard.shardId()), indexShard); } - verify(translog).sync(); + if (durability == Translog.Durability.ASYNC || lastSyncedGlobalCheckpoint == globalCheckpoint) { + verify(translog, never()).sync(); + } else { + verify(translog).sync(); + } } } diff --git a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index 96bcb9382ee..dc4294f30f5 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -92,6 +92,7 @@ import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SeqNoFieldMapper; import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.index.mapper.Uid; +import org.elasticsearch.index.refresh.RefreshStats; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.snapshots.IndexShardSnapshotStatus; import org.elasticsearch.index.store.Store; @@ -1215,26 +1216,15 @@ public class IndexShardTests extends IndexShardTestCase { long refreshCount = shard.refreshStats().getTotal(); indexDoc(shard, "test", "test"); try (Engine.GetResult ignored = shard.get(new Engine.Get(true, "test", "test", - new Term(IdFieldMapper.NAME, Uid.encodeId("test"))))) { - assertThat(shard.refreshStats().getTotal(), equalTo(refreshCount)); + new Term(IdFieldMapper.NAME, Uid.encodeId("test"))))) { + assertThat(shard.refreshStats().getTotal(), equalTo(refreshCount+1)); } + indexDoc(shard, "test", "test"); + shard.writeIndexingBuffer(); + assertThat(shard.refreshStats().getTotal(), equalTo(refreshCount+2)); closeShards(shard); } - private ParsedDocument testParsedDocument(String id, String type, String routing, - ParseContext.Document document, BytesReference source, Mapping mappingUpdate) { - Field idField = new Field("_id", id, IdFieldMapper.Defaults.FIELD_TYPE); - Field versionField = new NumericDocValuesField("_version", 0); - SeqNoFieldMapper.SequenceIDFields seqID = SeqNoFieldMapper.SequenceIDFields.emptySeqID(); - document.add(idField); - document.add(versionField); - document.add(seqID.seqNo); - document.add(seqID.seqNoDocValue); - document.add(seqID.primaryTerm); - return new ParsedDocument(versionField, seqID, id, type, routing, Arrays.asList(document), source, XContentType.JSON, - mappingUpdate); - } - public void testIndexingOperationsListeners() throws IOException { IndexShard shard = newStartedShard(true); indexDoc(shard, "test", "0", "{\"foo\" : \"bar\"}"); diff --git a/core/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java b/core/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java index e3158a21853..125f45fd007 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/RefreshListenersTests.java @@ -55,7 +55,6 @@ import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.test.DummyShardLock; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; -import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.Scheduler.Cancellable; @@ -123,7 +122,8 @@ public class RefreshListenersTests extends ESTestCase { EngineConfig config = new EngineConfig(EngineConfig.OpenMode.CREATE_INDEX_AND_TRANSLOG, shardId, allocationId, threadPool, indexSettings, null, store, newMergePolicy(), iwc.getAnalyzer(), iwc.getSimilarity(), new CodecService(null, logger), eventListener, IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), false, translogConfig, - TimeValue.timeValueMinutes(5), Collections.singletonList(listeners), null, null, new NoneCircuitBreakerService()); + TimeValue.timeValueMinutes(5), Collections.singletonList(listeners), Collections.emptyList(), null, null, + new NoneCircuitBreakerService()); engine = new InternalEngine(config); listeners.setTranslog(engine.getTranslog()); } diff --git a/core/src/test/java/org/elasticsearch/index/translog/CountedBitSetTests.java b/core/src/test/java/org/elasticsearch/index/translog/CountedBitSetTests.java index 5174d1755be..b68607f02d6 100644 --- a/core/src/test/java/org/elasticsearch/index/translog/CountedBitSetTests.java +++ b/core/src/test/java/org/elasticsearch/index/translog/CountedBitSetTests.java @@ -26,7 +26,9 @@ import java.util.List; import java.util.stream.Collectors; import java.util.stream.IntStream; +import static org.hamcrest.Matchers.allOf; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.lessThan; public class CountedBitSetTests extends ESTestCase { @@ -53,6 +55,7 @@ public class CountedBitSetTests extends ESTestCase { int numBits = (short) randomIntBetween(8, 4096); final CountedBitSet countedBitSet = new CountedBitSet((short) numBits); final List values = IntStream.range(0, numBits).boxed().collect(Collectors.toList()); + final long ramBytesUsedWithBitSet = countedBitSet.ramBytesUsed(); for (int i = 1; i < numBits; i++) { final int value = values.get(i); @@ -65,6 +68,7 @@ public class CountedBitSetTests extends ESTestCase { assertThat(countedBitSet.isInternalBitsetReleased(), equalTo(false)); assertThat(countedBitSet.length(), equalTo(numBits)); assertThat(countedBitSet.cardinality(), equalTo(i)); + assertThat(countedBitSet.ramBytesUsed(), equalTo(ramBytesUsedWithBitSet)); } // The missing piece to fill all bits. @@ -79,6 +83,7 @@ public class CountedBitSetTests extends ESTestCase { assertThat(countedBitSet.isInternalBitsetReleased(), equalTo(true)); assertThat(countedBitSet.length(), equalTo(numBits)); assertThat(countedBitSet.cardinality(), equalTo(numBits)); + assertThat(countedBitSet.ramBytesUsed(), allOf(equalTo(CountedBitSet.BASE_RAM_BYTES_USED), lessThan(ramBytesUsedWithBitSet))); } // Tests with released internal bitset. diff --git a/core/src/test/java/org/elasticsearch/index/translog/TruncateTranslogIT.java b/core/src/test/java/org/elasticsearch/index/translog/TruncateTranslogIT.java index d98359cdd06..90fe9e8404b 100644 --- a/core/src/test/java/org/elasticsearch/index/translog/TruncateTranslogIT.java +++ b/core/src/test/java/org/elasticsearch/index/translog/TruncateTranslogIT.java @@ -57,6 +57,7 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.test.engine.MockEngineSupport; +import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.transport.MockTransportService; import java.io.IOException; @@ -84,6 +85,7 @@ public class TruncateTranslogIT extends ESIntegTestCase { return Arrays.asList(MockTransportService.TestPlugin.class, MockEngineFactoryPlugin.class); } + @TestLogging("org.elasticsearch.index.engine:TRACE,org.elasticsearch.index.translog:TRACE") public void testCorruptTranslogTruncation() throws Exception { internalCluster().startNodes(2, Settings.EMPTY); diff --git a/core/src/test/java/org/elasticsearch/indices/IndicesModuleTests.java b/core/src/test/java/org/elasticsearch/indices/IndicesModuleTests.java index 298bb57c499..6c4cda7fc52 100644 --- a/core/src/test/java/org/elasticsearch/indices/IndicesModuleTests.java +++ b/core/src/test/java/org/elasticsearch/indices/IndicesModuleTests.java @@ -19,23 +19,37 @@ package org.elasticsearch.indices; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.stream.Collectors; - import org.elasticsearch.index.mapper.FieldNamesFieldMapper; import org.elasticsearch.index.mapper.IdFieldMapper; +import org.elasticsearch.index.mapper.IndexFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MetadataFieldMapper; +import org.elasticsearch.index.mapper.ParentFieldMapper; +import org.elasticsearch.index.mapper.RoutingFieldMapper; +import org.elasticsearch.index.mapper.SeqNoFieldMapper; +import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.mapper.TextFieldMapper; +import org.elasticsearch.index.mapper.TypeFieldMapper; +import org.elasticsearch.index.mapper.UidFieldMapper; +import org.elasticsearch.index.mapper.VersionFieldMapper; import org.elasticsearch.indices.mapper.MapperRegistry; import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.test.ESTestCase; -import org.hamcrest.Matchers; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.function.Function; +import java.util.function.Predicate; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.greaterThan; public class IndicesModuleTests extends ESTestCase { @@ -59,7 +73,7 @@ public class IndicesModuleTests extends ESTestCase { } } - List fakePlugins = Arrays.asList(new MapperPlugin() { + private final List fakePlugins = Arrays.asList(new MapperPlugin() { @Override public Map getMappers() { return Collections.singletonMap("fake-mapper", new FakeMapperParser()); @@ -70,17 +84,44 @@ public class IndicesModuleTests extends ESTestCase { } }); + private static String[] EXPECTED_METADATA_FIELDS = new String[]{UidFieldMapper.NAME, IdFieldMapper.NAME, RoutingFieldMapper.NAME, + IndexFieldMapper.NAME, SourceFieldMapper.NAME, TypeFieldMapper.NAME, VersionFieldMapper.NAME, ParentFieldMapper.NAME, + SeqNoFieldMapper.NAME, FieldNamesFieldMapper.NAME}; + public void testBuiltinMappers() { IndicesModule module = new IndicesModule(Collections.emptyList()); assertFalse(module.getMapperRegistry().getMapperParsers().isEmpty()); assertFalse(module.getMapperRegistry().getMetadataMapperParsers().isEmpty()); + Map metadataMapperParsers = module.getMapperRegistry().getMetadataMapperParsers(); + int i = 0; + for (String field : metadataMapperParsers.keySet()) { + assertEquals(EXPECTED_METADATA_FIELDS[i++], field); + } } public void testBuiltinWithPlugins() { + IndicesModule noPluginsModule = new IndicesModule(Collections.emptyList()); IndicesModule module = new IndicesModule(fakePlugins); MapperRegistry registry = module.getMapperRegistry(); - assertThat(registry.getMapperParsers().size(), Matchers.greaterThan(1)); - assertThat(registry.getMetadataMapperParsers().size(), Matchers.greaterThan(1)); + assertThat(registry.getMapperParsers().size(), greaterThan(noPluginsModule.getMapperRegistry().getMapperParsers().size())); + assertThat(registry.getMetadataMapperParsers().size(), + greaterThan(noPluginsModule.getMapperRegistry().getMetadataMapperParsers().size())); + Map metadataMapperParsers = module.getMapperRegistry().getMetadataMapperParsers(); + Iterator iterator = metadataMapperParsers.keySet().iterator(); + assertEquals(UidFieldMapper.NAME, iterator.next()); + String last = null; + while(iterator.hasNext()) { + last = iterator.next(); + } + assertEquals(FieldNamesFieldMapper.NAME, last); + } + + public void testGetBuiltInMetaDataFields() { + Set builtInMetaDataFields = IndicesModule.getBuiltInMetaDataFields(); + int i = 0; + for (String field : builtInMetaDataFields) { + assertEquals(EXPECTED_METADATA_FIELDS[i++], field); + } } public void testDuplicateBuiltinMapper() { @@ -92,7 +133,7 @@ public class IndicesModuleTests extends ESTestCase { }); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new IndicesModule(plugins)); - assertThat(e.getMessage(), Matchers.containsString("already registered")); + assertThat(e.getMessage(), containsString("already registered")); } public void testDuplicateOtherPluginMapper() { @@ -105,7 +146,7 @@ public class IndicesModuleTests extends ESTestCase { List plugins = Arrays.asList(plugin, plugin); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new IndicesModule(plugins)); - assertThat(e.getMessage(), Matchers.containsString("already registered")); + assertThat(e.getMessage(), containsString("already registered")); } public void testDuplicateBuiltinMetadataMapper() { @@ -117,7 +158,7 @@ public class IndicesModuleTests extends ESTestCase { }); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new IndicesModule(plugins)); - assertThat(e.getMessage(), Matchers.containsString("already registered")); + assertThat(e.getMessage(), containsString("already registered")); } public void testDuplicateOtherPluginMetadataMapper() { @@ -130,7 +171,7 @@ public class IndicesModuleTests extends ESTestCase { List plugins = Arrays.asList(plugin, plugin); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new IndicesModule(plugins)); - assertThat(e.getMessage(), Matchers.containsString("already registered")); + assertThat(e.getMessage(), containsString("already registered")); } public void testDuplicateFieldNamesMapper() { @@ -142,20 +183,102 @@ public class IndicesModuleTests extends ESTestCase { }); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new IndicesModule(plugins)); - assertThat(e.getMessage(), Matchers.containsString("cannot contain metadata mapper [_field_names]")); + assertThat(e.getMessage(), containsString("cannot contain metadata mapper [_field_names]")); } public void testFieldNamesIsLast() { IndicesModule module = new IndicesModule(Collections.emptyList()); - List fieldNames = module.getMapperRegistry().getMetadataMapperParsers().keySet() - .stream().collect(Collectors.toList()); + List fieldNames = new ArrayList<>(module.getMapperRegistry().getMetadataMapperParsers().keySet()); assertEquals(FieldNamesFieldMapper.NAME, fieldNames.get(fieldNames.size() - 1)); } public void testFieldNamesIsLastWithPlugins() { IndicesModule module = new IndicesModule(fakePlugins); - List fieldNames = module.getMapperRegistry().getMetadataMapperParsers().keySet() - .stream().collect(Collectors.toList()); + List fieldNames = new ArrayList<>(module.getMapperRegistry().getMetadataMapperParsers().keySet()); assertEquals(FieldNamesFieldMapper.NAME, fieldNames.get(fieldNames.size() - 1)); } + + public void testGetFieldFilter() { + List mapperPlugins = Arrays.asList( + new MapperPlugin() { + @Override + public Function> getFieldFilter() { + return MapperPlugin.NOOP_FIELD_FILTER; + } + }, + new MapperPlugin() { + @Override + public Function> getFieldFilter() { + return index -> index.equals("hidden_index") ? field -> false : MapperPlugin.NOOP_FIELD_PREDICATE; + } + }, + new MapperPlugin() { + @Override + public Function> getFieldFilter() { + return index -> field -> field.equals("hidden_field") == false; + } + }, + new MapperPlugin() { + @Override + public Function> getFieldFilter() { + return index -> index.equals("filtered") ? field -> field.equals("visible") : MapperPlugin.NOOP_FIELD_PREDICATE; + } + }); + + IndicesModule indicesModule = new IndicesModule(mapperPlugins); + MapperRegistry mapperRegistry = indicesModule.getMapperRegistry(); + Function> fieldFilter = mapperRegistry.getFieldFilter(); + assertNotSame(MapperPlugin.NOOP_FIELD_FILTER, fieldFilter); + + assertFalse(fieldFilter.apply("hidden_index").test(randomAlphaOfLengthBetween(3, 5))); + assertTrue(fieldFilter.apply(randomAlphaOfLengthBetween(3, 5)).test(randomAlphaOfLengthBetween(3, 5))); + + assertFalse(fieldFilter.apply(randomAlphaOfLengthBetween(3, 5)).test("hidden_field")); + assertFalse(fieldFilter.apply("filtered").test(randomAlphaOfLengthBetween(3, 5))); + assertFalse(fieldFilter.apply("filtered").test("hidden_field")); + assertTrue(fieldFilter.apply("filtered").test("visible")); + assertFalse(fieldFilter.apply("hidden_index").test("visible")); + assertTrue(fieldFilter.apply(randomAlphaOfLengthBetween(3, 5)).test("visible")); + assertFalse(fieldFilter.apply("hidden_index").test("hidden_field")); + } + + public void testDefaultFieldFilterIsNoOp() { + int numPlugins = randomIntBetween(0, 10); + List mapperPlugins = new ArrayList<>(numPlugins); + for (int i = 0; i < numPlugins; i++) { + mapperPlugins.add(new MapperPlugin() {}); + } + IndicesModule indicesModule = new IndicesModule(mapperPlugins); + Function> fieldFilter = indicesModule.getMapperRegistry().getFieldFilter(); + assertSame(MapperPlugin.NOOP_FIELD_FILTER, fieldFilter); + } + + public void testNoOpFieldPredicate() { + List mapperPlugins = Arrays.asList( + new MapperPlugin() { + @Override + public Function> getFieldFilter() { + return MapperPlugin.NOOP_FIELD_FILTER; + } + }, + new MapperPlugin() { + @Override + public Function> getFieldFilter() { + return index -> index.equals("hidden_index") ? field -> false : MapperPlugin.NOOP_FIELD_PREDICATE; + } + }, + new MapperPlugin() { + @Override + public Function> getFieldFilter() { + return index -> index.equals("filtered") ? field -> field.equals("visible") : MapperPlugin.NOOP_FIELD_PREDICATE; + } + }); + + IndicesModule indicesModule = new IndicesModule(mapperPlugins); + MapperRegistry mapperRegistry = indicesModule.getMapperRegistry(); + Function> fieldFilter = mapperRegistry.getFieldFilter(); + assertSame(MapperPlugin.NOOP_FIELD_PREDICATE, fieldFilter.apply(randomAlphaOfLengthBetween(3, 7))); + assertNotSame(MapperPlugin.NOOP_FIELD_PREDICATE, fieldFilter.apply("hidden_index")); + assertNotSame(MapperPlugin.NOOP_FIELD_PREDICATE, fieldFilter.apply("filtered")); + } } diff --git a/core/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java b/core/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java index 267253ff12a..01f1c3dab9c 100644 --- a/core/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java +++ b/core/src/test/java/org/elasticsearch/indices/IndicesServiceTests.java @@ -110,7 +110,6 @@ public class IndicesServiceTests extends ESSingleNodeTestCase { } } - @Override protected boolean resetNodeAfterTest() { return true; @@ -431,4 +430,12 @@ public class IndicesServiceTests extends ESSingleNodeTestCase { assertThat("index not defined", indexStats.containsKey(index), equalTo(true)); assertThat("unexpected shard stats", indexStats.get(index), equalTo(shardStats)); } + + public void testIsMetaDataField() { + IndicesService indicesService = getIndicesService(); + assertFalse(indicesService.isMetaDataField(randomAlphaOfLengthBetween(10, 15))); + for (String builtIn : IndicesModule.getBuiltInMetaDataFields()) { + assertTrue(indicesService.isMetaDataField(builtIn)); + } + } } diff --git a/core/src/test/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java b/core/src/test/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java index f65250a5666..a58e9abbd47 100644 --- a/core/src/test/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/indices/mapping/UpdateMappingIntegrationIT.java @@ -36,11 +36,9 @@ import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; -import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.InternalSettingsPlugin; import org.hamcrest.Matchers; -import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -107,7 +105,7 @@ public class UpdateMappingIntegrationIT extends ESIntegTestCase { } } - public void testUpdateMappingWithoutType() throws Exception { + public void testUpdateMappingWithoutType() { client().admin().indices().prepareCreate("test") .setSettings( Settings.builder() @@ -128,7 +126,7 @@ public class UpdateMappingIntegrationIT extends ESIntegTestCase { equalTo("{\"doc\":{\"properties\":{\"body\":{\"type\":\"text\"},\"date\":{\"type\":\"integer\"}}}}")); } - public void testUpdateMappingWithoutTypeMultiObjects() throws Exception { + public void testUpdateMappingWithoutTypeMultiObjects() { client().admin().indices().prepareCreate("test") .setSettings( Settings.builder() @@ -148,7 +146,7 @@ public class UpdateMappingIntegrationIT extends ESIntegTestCase { equalTo("{\"doc\":{\"properties\":{\"date\":{\"type\":\"integer\"}}}}")); } - public void testUpdateMappingWithConflicts() throws Exception { + public void testUpdateMappingWithConflicts() { client().admin().indices().prepareCreate("test") .setSettings( Settings.builder() @@ -167,7 +165,7 @@ public class UpdateMappingIntegrationIT extends ESIntegTestCase { } } - public void testUpdateMappingWithNormsConflicts() throws Exception { + public void testUpdateMappingWithNormsConflicts() { client().admin().indices().prepareCreate("test") .addMapping("type", "{\"type\":{\"properties\":{\"body\":{\"type\":\"text\", \"norms\": false }}}}", XContentType.JSON) .execute().actionGet(); @@ -184,7 +182,7 @@ public class UpdateMappingIntegrationIT extends ESIntegTestCase { /* Second regression test for https://github.com/elastic/elasticsearch/issues/3381 */ - public void testUpdateMappingNoChanges() throws Exception { + public void testUpdateMappingNoChanges() { client().admin().indices().prepareCreate("test") .setSettings( Settings.builder() @@ -251,7 +249,7 @@ public class UpdateMappingIntegrationIT extends ESIntegTestCase { getResponse = client().admin().indices().prepareGetMappings("test").addTypes(MapperService.DEFAULT_MAPPING).get(); defaultMapping = getResponse.getMappings().get("test").get(MapperService.DEFAULT_MAPPING).sourceAsMap(); Map fieldSettings = (Map) ((Map) defaultMapping.get("properties")).get("f"); - assertThat(fieldSettings, hasEntry("type", (Object) "keyword")); + assertThat(fieldSettings, hasEntry("type", "keyword")); // but we still validate the _default_ type logger.info("Confirming _default_ mappings validation"); @@ -276,40 +274,36 @@ public class UpdateMappingIntegrationIT extends ESIntegTestCase { } for (int j = 0; j < threads.length; j++) { - threads[j] = new Thread(new Runnable() { - @SuppressWarnings("unchecked") - @Override - public void run() { - try { - barrier.await(); + threads[j] = new Thread(() -> { + try { + barrier.await(); - for (int i = 0; i < 100; i++) { - if (stop.get()) { - return; - } - - Client client1 = clientArray.get(i % clientArray.size()); - Client client2 = clientArray.get((i + 1) % clientArray.size()); - String indexName = i % 2 == 0 ? "test2" : "test1"; - String typeName = "type"; - String fieldName = Thread.currentThread().getName() + "_" + i; - - PutMappingResponse response = client1.admin().indices().preparePutMapping(indexName).setType(typeName).setSource( - JsonXContent.contentBuilder().startObject().startObject(typeName) - .startObject("properties").startObject(fieldName).field("type", "text").endObject().endObject() - .endObject().endObject() - ).get(); - - assertThat(response.isAcknowledged(), equalTo(true)); - GetMappingsResponse getMappingResponse = client2.admin().indices().prepareGetMappings(indexName).get(); - ImmutableOpenMap mappings = getMappingResponse.getMappings().get(indexName); - assertThat(mappings.containsKey(typeName), equalTo(true)); - assertThat(((Map) mappings.get(typeName).getSourceAsMap().get("properties")).keySet(), Matchers.hasItem(fieldName)); + for (int i = 0; i < 100; i++) { + if (stop.get()) { + return; } - } catch (Exception e) { - threadException.set(e); - stop.set(true); + + Client client1 = clientArray.get(i % clientArray.size()); + Client client2 = clientArray.get((i + 1) % clientArray.size()); + String indexName = i % 2 == 0 ? "test2" : "test1"; + String typeName = "type"; + String fieldName = Thread.currentThread().getName() + "_" + i; + + PutMappingResponse response = client1.admin().indices().preparePutMapping(indexName).setType(typeName).setSource( + JsonXContent.contentBuilder().startObject().startObject(typeName) + .startObject("properties").startObject(fieldName).field("type", "text").endObject().endObject() + .endObject().endObject() + ).get(); + + assertThat(response.isAcknowledged(), equalTo(true)); + GetMappingsResponse getMappingResponse = client2.admin().indices().prepareGetMappings(indexName).get(); + ImmutableOpenMap mappings = getMappingResponse.getMappings().get(indexName); + assertThat(mappings.containsKey(typeName), equalTo(true)); + assertThat(((Map) mappings.get(typeName).getSourceAsMap().get("properties")).keySet(), Matchers.hasItem(fieldName)); } + } catch (Exception e) { + threadException.set(e); + stop.set(true); } }); @@ -325,7 +319,7 @@ public class UpdateMappingIntegrationIT extends ESIntegTestCase { } - public void testPutMappingsWithBlocks() throws Exception { + public void testPutMappingsWithBlocks() { createIndex("test"); ensureGreen(); @@ -350,7 +344,8 @@ public class UpdateMappingIntegrationIT extends ESIntegTestCase { } } - public void testUpdateMappingOnAllTypes() throws IOException { + @SuppressWarnings("unchecked") + public void testUpdateMappingOnAllTypes() { assertTrue("remove this multi type test", Version.CURRENT.before(Version.fromString("7.0.0"))); assertAcked(prepareCreate("index") .setSettings(Settings.builder().put("index.version.created", Version.V_5_6_0.id)) diff --git a/core/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java b/core/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java index 993cc845064..cf5f24d2a6e 100644 --- a/core/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java +++ b/core/src/test/java/org/elasticsearch/indices/recovery/RecoverySourceHandlerTests.java @@ -70,15 +70,18 @@ import org.elasticsearch.test.CorruptionUtils; import org.elasticsearch.test.DummyShardLock; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; +import org.mockito.ArgumentCaptor; import java.io.IOException; import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.Comparator; import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Supplier; +import java.util.stream.Collectors; import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; @@ -88,6 +91,7 @@ import static org.mockito.Matchers.anyBoolean; import static org.mockito.Matchers.anyString; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; public class RecoverySourceHandlerTests extends ESTestCase { @@ -181,29 +185,70 @@ public class RecoverySourceHandlerTests extends ESTestCase { operations.add(new Translog.Index(index, new Engine.IndexResult(1, i - initialNumberOfDocs, true))); } operations.add(null); - final long startingSeqNo = randomBoolean() ? SequenceNumbers.UNASSIGNED_SEQ_NO : randomIntBetween(0, 16); - RecoverySourceHandler.SendSnapshotResult result = handler.sendSnapshot(startingSeqNo, new Translog.Snapshot() { - @Override - public void close() { + final long startingSeqNo = randomIntBetween(0, numberOfDocsWithValidSequenceNumbers - 1); + final long requiredStartingSeqNo = randomIntBetween((int) startingSeqNo, numberOfDocsWithValidSequenceNumbers - 1); + final long endingSeqNo = randomIntBetween((int) requiredStartingSeqNo - 1, numberOfDocsWithValidSequenceNumbers - 1); + RecoverySourceHandler.SendSnapshotResult result = handler.sendSnapshot(startingSeqNo, requiredStartingSeqNo, + endingSeqNo, new Translog.Snapshot() { + @Override + public void close() { - } + } - private int counter = 0; + private int counter = 0; - @Override - public int totalOperations() { - return operations.size() - 1; - } + @Override + public int totalOperations() { + return operations.size() - 1; + } - @Override - public Translog.Operation next() throws IOException { - return operations.get(counter++); - } - }); - if (startingSeqNo == SequenceNumbers.UNASSIGNED_SEQ_NO) { - assertThat(result.totalOperations, equalTo(initialNumberOfDocs + numberOfDocsWithValidSequenceNumbers)); - } else { - assertThat(result.totalOperations, equalTo(Math.toIntExact(numberOfDocsWithValidSequenceNumbers - startingSeqNo))); + @Override + public Translog.Operation next() throws IOException { + return operations.get(counter++); + } + }); + final int expectedOps = (int) (endingSeqNo - startingSeqNo + 1); + assertThat(result.totalOperations, equalTo(expectedOps)); + final ArgumentCaptor shippedOpsCaptor = ArgumentCaptor.forClass(List.class); + verify(recoveryTarget).indexTranslogOperations(shippedOpsCaptor.capture(), ArgumentCaptor.forClass(Integer.class).capture()); + List shippedOps = new ArrayList<>(); + for (List list: shippedOpsCaptor.getAllValues()) { + shippedOps.addAll(list); + } + shippedOps.sort(Comparator.comparing(Translog.Operation::seqNo)); + assertThat(shippedOps.size(), equalTo(expectedOps)); + for (int i = 0; i < shippedOps.size(); i++) { + assertThat(shippedOps.get(i), equalTo(operations.get(i + (int) startingSeqNo + initialNumberOfDocs))); + } + if (endingSeqNo >= requiredStartingSeqNo + 1) { + // check that missing ops blows up + List requiredOps = operations.subList(0, operations.size() - 1).stream() // remove last null marker + .filter(o -> o.seqNo() >= requiredStartingSeqNo && o.seqNo() <= endingSeqNo).collect(Collectors.toList()); + List opsToSkip = randomSubsetOf(randomIntBetween(1, requiredOps.size()), requiredOps); + expectThrows(IllegalStateException.class, () -> + handler.sendSnapshot(startingSeqNo, requiredStartingSeqNo, + endingSeqNo, new Translog.Snapshot() { + @Override + public void close() { + + } + + private int counter = 0; + + @Override + public int totalOperations() { + return operations.size() - 1 - opsToSkip.size(); + } + + @Override + public Translog.Operation next() throws IOException { + Translog.Operation op; + do { + op = operations.get(counter++); + } while (op != null && opsToSkip.contains(op)); + return op; + } + })); } } @@ -383,7 +428,7 @@ public class RecoverySourceHandlerTests extends ESTestCase { } @Override - long phase2(long startingSeqNo, Translog.Snapshot snapshot) throws IOException { + long phase2(long startingSeqNo, long requiredSeqNoRangeStart, long endingSeqNo, Translog.Snapshot snapshot) throws IOException { phase2Called.set(true); return SequenceNumbers.UNASSIGNED_SEQ_NO; } diff --git a/core/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java b/core/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java index 7ff0725449e..51c073c607e 100644 --- a/core/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java +++ b/core/src/test/java/org/elasticsearch/indices/settings/UpdateSettingsIT.java @@ -241,10 +241,45 @@ public class UpdateSettingsIT extends ESIntegTestCase { .actionGet(); } } + public void testResetDefaultWithWildcard() { + createIndex("test"); + + client() + .admin() + .indices() + .prepareUpdateSettings("test") + .setSettings( + Settings.builder() + .put("index.refresh_interval", -1)) + .execute() + .actionGet(); + IndexMetaData indexMetaData = client().admin().cluster().prepareState().execute().actionGet().getState().metaData().index("test"); + assertEquals(indexMetaData.getSettings().get("index.refresh_interval"), "-1"); + for (IndicesService service : internalCluster().getInstances(IndicesService.class)) { + IndexService indexService = service.indexService(resolveIndex("test")); + if (indexService != null) { + assertEquals(indexService.getIndexSettings().getRefreshInterval().millis(), -1); + } + } + client() + .admin() + .indices() + .prepareUpdateSettings("test") + .setSettings(Settings.builder().putNull("index.ref*")) + .execute() + .actionGet(); + indexMetaData = client().admin().cluster().prepareState().execute().actionGet().getState().metaData().index("test"); + assertNull(indexMetaData.getSettings().get("index.refresh_interval")); + for (IndicesService service : internalCluster().getInstances(IndicesService.class)) { + IndexService indexService = service.indexService(resolveIndex("test")); + if (indexService != null) { + assertEquals(indexService.getIndexSettings().getRefreshInterval().millis(), 1000); + } + } + } public void testResetDefault() { createIndex("test"); - client() .admin() .indices() diff --git a/core/src/test/java/org/elasticsearch/indices/state/OpenCloseIndexIT.java b/core/src/test/java/org/elasticsearch/indices/state/OpenCloseIndexIT.java index f0808f35741..02191bc22fa 100644 --- a/core/src/test/java/org/elasticsearch/indices/state/OpenCloseIndexIT.java +++ b/core/src/test/java/org/elasticsearch/indices/state/OpenCloseIndexIT.java @@ -298,7 +298,7 @@ public class OpenCloseIndexIT extends ESIntegTestCase { assertIndexIsOpened("test1", "test2"); } - public void testOpenWaitingForActiveShardsFailed() { + public void testOpenWaitingForActiveShardsFailed() throws Exception { Client client = client(); Settings settings = Settings.builder() .put(IndexMetaData.INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), 1) @@ -308,8 +308,10 @@ public class OpenCloseIndexIT extends ESIntegTestCase { assertAcked(client.admin().indices().prepareClose("test").get()); OpenIndexResponse response = client.admin().indices().prepareOpen("test").setTimeout("100ms").setWaitForActiveShards(2).get(); - assertAcked(response); assertThat(response.isShardsAcknowledged(), equalTo(false)); + assertBusy(() -> assertThat(client.admin().cluster().prepareState().get().getState().metaData().index("test").getState(), + equalTo(IndexMetaData.State.OPEN))); + ensureGreen("test"); } private void assertIndexIsOpened(String... indices) { diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/EquivalenceIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/EquivalenceIT.java index 02b3632d2e8..2d9f462d862 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/EquivalenceIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/EquivalenceIT.java @@ -41,6 +41,8 @@ import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregatorFactory; import org.elasticsearch.search.aggregations.metrics.sum.Sum; import org.elasticsearch.test.ESIntegTestCase; +import org.junit.After; +import org.junit.Before; import java.util.ArrayList; import java.util.Collection; @@ -90,6 +92,21 @@ public class EquivalenceIT extends ESIntegTestCase { } } + @Before + private void setupMaxBuckets() { + // disables the max bucket limit for this test + client().admin().cluster().prepareUpdateSettings() + .setTransientSettings(Collections.singletonMap("search.max_buckets", Integer.MAX_VALUE)) + .get(); + } + + @After + private void cleanupMaxBuckets() { + client().admin().cluster().prepareUpdateSettings() + .setTransientSettings(Collections.singletonMap("search.max_buckets", null)) + .get(); + } + // Make sure that unordered, reversed, disjoint and/or overlapping ranges are supported // Duel with filters public void testRandomRanges() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java index 73ebd0089b0..f359a3307bd 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/composite/CompositeAggregatorTests.java @@ -1048,7 +1048,7 @@ public class CompositeAggregatorTests extends AggregatorTestCase { if (reduced) { composite = searchAndReduce(indexSearcher, query, aggregationBuilder, FIELD_TYPES); } else { - composite = search(indexSearcher, query, aggregationBuilder, indexSettings, FIELD_TYPES); + composite = search(indexSearcher, query, aggregationBuilder, FIELD_TYPES); } verify.accept(composite); } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java index 9d3374630a9..9b5bc7541f2 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregatorTests.java @@ -31,6 +31,7 @@ import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; import org.elasticsearch.index.mapper.DateFieldMapper; +import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.AggregatorTestCase; import java.io.IOException; @@ -39,6 +40,8 @@ import java.util.Collections; import java.util.List; import java.util.function.Consumer; +import static org.elasticsearch.search.aggregations.MultiBucketConsumerService.TooManyBucketsException; + public class DateHistogramAggregatorTests extends AggregatorTestCase { private static final String DATE_FIELD = "date"; @@ -335,28 +338,82 @@ public class DateHistogramAggregatorTests extends AggregatorTestCase { ); } + public void testMaxBucket() throws IOException { + Query query = new MatchAllDocsQuery(); + List timestamps = Arrays.asList( + "2010-01-01T00:00:00.000Z", + "2011-01-01T00:00:00.000Z", + "2017-01-01T00:00:00.000Z" + ); + + TooManyBucketsException exc = expectThrows(TooManyBucketsException.class, () -> testSearchCase(query, timestamps, + aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.seconds(5)).field(DATE_FIELD), + histogram -> {}, 2)); + + exc = expectThrows(TooManyBucketsException.class, () -> testSearchAndReduceCase(query, timestamps, + aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.seconds(5)).field(DATE_FIELD), + histogram -> {}, 2)); + + exc = expectThrows(TooManyBucketsException.class, () -> testSearchAndReduceCase(query, timestamps, + aggregation -> aggregation.dateHistogramInterval(DateHistogramInterval.seconds(5)).field(DATE_FIELD).minDocCount(0L), + histogram -> {}, 100)); + + exc = expectThrows(TooManyBucketsException.class, () -> testSearchAndReduceCase(query, timestamps, + aggregation -> + aggregation.dateHistogramInterval(DateHistogramInterval.seconds(5)) + .field(DATE_FIELD) + .subAggregation( + AggregationBuilders.dateHistogram("1") + .dateHistogramInterval(DateHistogramInterval.seconds(5)) + .field(DATE_FIELD) + ), + histogram -> {}, 5)); + } + private void testSearchCase(Query query, List dataset, Consumer configure, Consumer verify) throws IOException { - executeTestCase(false, query, dataset, configure, verify); + testSearchCase(query, dataset, configure, verify, 10000); + } + + private void testSearchCase(Query query, List dataset, + Consumer configure, + Consumer verify, + int maxBucket) throws IOException { + executeTestCase(false, query, dataset, configure, verify, maxBucket); } private void testSearchAndReduceCase(Query query, List dataset, Consumer configure, Consumer verify) throws IOException { - executeTestCase(true, query, dataset, configure, verify); + testSearchAndReduceCase(query, dataset, configure, verify, 1000); + } + + private void testSearchAndReduceCase(Query query, List dataset, + Consumer configure, + Consumer verify, + int maxBucket) throws IOException { + executeTestCase(true, query, dataset, configure, verify, maxBucket); } private void testBothCases(Query query, List dataset, Consumer configure, Consumer verify) throws IOException { - testSearchCase(query, dataset, configure, verify); - testSearchAndReduceCase(query, dataset, configure, verify); + testBothCases(query, dataset, configure, verify, 10000); + } + + private void testBothCases(Query query, List dataset, + Consumer configure, + Consumer verify, + int maxBucket) throws IOException { + testSearchCase(query, dataset, configure, verify, maxBucket); + testSearchAndReduceCase(query, dataset, configure, verify, maxBucket); } private void executeTestCase(boolean reduced, Query query, List dataset, Consumer configure, - Consumer verify) throws IOException { + Consumer verify, + int maxBucket) throws IOException { try (Directory directory = newDirectory()) { try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { @@ -389,9 +446,9 @@ public class DateHistogramAggregatorTests extends AggregatorTestCase { InternalDateHistogram histogram; if (reduced) { - histogram = searchAndReduce(indexSearcher, query, aggregationBuilder, fieldType); + histogram = searchAndReduce(indexSearcher, query, aggregationBuilder, maxBucket, fieldType); } else { - histogram = search(indexSearcher, query, aggregationBuilder, fieldType); + histogram = search(indexSearcher, query, aggregationBuilder, maxBucket, fieldType); } verify.accept(histogram); } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java index 47fccbc83c4..9dd355d8ca0 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java @@ -72,15 +72,14 @@ public class TermsAggregatorTests extends AggregatorTestCase { private boolean randomizeAggregatorImpl = true; - @Override protected A createAggregator(AggregationBuilder aggregationBuilder, - IndexSearcher indexSearcher, IndexSettings indexSettings, MappedFieldType... fieldTypes) throws IOException { + IndexSearcher indexSearcher, MappedFieldType... fieldTypes) throws IOException { try { if (randomizeAggregatorImpl) { TermsAggregatorFactory.COLLECT_SEGMENT_ORDS = randomBoolean(); TermsAggregatorFactory.REMAP_GLOBAL_ORDS = randomBoolean(); } - return super.createAggregator(aggregationBuilder, indexSearcher, indexSettings, fieldTypes); + return super.createAggregator(aggregationBuilder, indexSearcher, fieldTypes); } finally { TermsAggregatorFactory.COLLECT_SEGMENT_ORDS = null; TermsAggregatorFactory.REMAP_GLOBAL_ORDS = null; diff --git a/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java b/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java index faf1f65f34b..5861e768436 100644 --- a/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java +++ b/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java @@ -2915,4 +2915,32 @@ public class HighlighterSearchIT extends ESIntegTestCase { assertThat(field.getFragments()[0].string(), equalTo("brown")); } } + + public void testWithNormalizer() throws Exception { + Builder builder = Settings.builder() + .put(indexSettings()) + .putList("index.analysis.normalizer.my_normalizer.filter", "lowercase"); + + assertAcked(prepareCreate("test").setSettings(builder.build()) + .addMapping("doc", "keyword", + "type=keyword,normalizer=my_normalizer")); + ensureGreen(); + + client().prepareIndex("test", "doc", "0") + .setSource("keyword", "Hello World") + .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) + .get(); + + for (String highlighterType : new String[] {"unified", "plain"}) { + SearchResponse searchResponse = client().prepareSearch() + .setQuery(matchQuery("keyword", "hello world")) + .highlighter(new HighlightBuilder() + .field(new Field("keyword").highlighterType(highlighterType))) + .get(); + assertHitCount(searchResponse, 1); + HighlightField field = searchResponse.getHits().getAt(0).getHighlightFields().get("keyword"); + assertThat(field.getFragments().length, equalTo(1)); + assertThat(field.getFragments()[0].string(), equalTo("Hello World")); + } + } } diff --git a/distribution/build.gradle b/distribution/build.gradle index b7fa48561da..3df81d24c6b 100644 --- a/distribution/build.gradle +++ b/distribution/build.gradle @@ -164,7 +164,7 @@ configure(distributions) { from project(':core').jar from project(':core').configurations.runtime // delay add tools using closures, since they have not yet been configured, so no jar task exists yet - from { project(':distribution:tools:java-version-checker').jar } + from { project(':distribution:tools:launchers').jar } from { project(':distribution:tools:plugin-cli').jar } } @@ -392,6 +392,7 @@ configure(distributions.findAll { ['deb', 'rpm'].contains(it.name) }) { } else if (project.name == 'deb') { requires('bash') } + requires('coreutils') into '/usr/share/elasticsearch' fileMode 0644 diff --git a/distribution/src/main/resources/bin/elasticsearch b/distribution/src/main/resources/bin/elasticsearch index 4064170807f..11efddf6e26 100755 --- a/distribution/src/main/resources/bin/elasticsearch +++ b/distribution/src/main/resources/bin/elasticsearch @@ -16,15 +16,8 @@ source "`dirname "$0"`"/elasticsearch-env -parse_jvm_options() { - if [ -f "$1" ]; then - echo "`grep "^-" "$1" | tr '\n' ' '`" - fi -} - ES_JVM_OPTIONS="$ES_PATH_CONF"/jvm.options - -JVM_OPTIONS=`parse_jvm_options "$ES_JVM_OPTIONS"` +JVM_OPTIONS=`"$JAVA" -cp "$ES_CLASSPATH" org.elasticsearch.tools.launchers.JvmOptionsParser "$ES_JVM_OPTIONS"` ES_JAVA_OPTS="${JVM_OPTIONS//\$\{ES_TMPDIR\}/$ES_TMPDIR} $ES_JAVA_OPTS" cd "$ES_HOME" diff --git a/distribution/src/main/resources/bin/elasticsearch-env b/distribution/src/main/resources/bin/elasticsearch-env index 2b376bd47b3..83380400173 100644 --- a/distribution/src/main/resources/bin/elasticsearch-env +++ b/distribution/src/main/resources/bin/elasticsearch-env @@ -63,7 +63,7 @@ if [ ! -z "$JAVA_OPTS" ]; then fi # check the Java version -"$JAVA" -cp "$ES_CLASSPATH" org.elasticsearch.tools.JavaVersionChecker +"$JAVA" -cp "$ES_CLASSPATH" org.elasticsearch.tools.launchers.JavaVersionChecker export HOSTNAME=$HOSTNAME @@ -75,9 +75,13 @@ if [ -z "$ES_PATH_CONF" ]; then fi if [ -z "$ES_TMPDIR" ]; then - if [ "`uname`" == "Darwin" ]; then - ES_TMPDIR=`mktemp -d -t elasticsearch` + set +e + mktemp --version 2>&1 | grep coreutils > /dev/null + mktemp_coreutils=$? + set -e + if [ $mktemp_coreutils -eq 0 ]; then + ES_TMPDIR=`mktemp -d --tmpdir "elasticearch.XXXXXXXX"` else - ES_TMPDIR=`mktemp -d -t elasticsearch.XXXXXXXX` + ES_TMPDIR=`mktemp -d -t elasticsearch` fi fi diff --git a/distribution/src/main/resources/bin/elasticsearch-env.bat b/distribution/src/main/resources/bin/elasticsearch-env.bat index e80a8ce258e..4d1ea24b388 100644 --- a/distribution/src/main/resources/bin/elasticsearch-env.bat +++ b/distribution/src/main/resources/bin/elasticsearch-env.bat @@ -42,7 +42,7 @@ if defined JAVA_OPTS ( ) rem check the Java version -%JAVA% -cp "%ES_CLASSPATH%" "org.elasticsearch.tools.JavaVersionChecker" || exit /b 1 +%JAVA% -cp "%ES_CLASSPATH%" "org.elasticsearch.tools.launchers.JavaVersionChecker" || exit /b 1 set HOSTNAME=%COMPUTERNAME% diff --git a/distribution/src/main/resources/bin/elasticsearch-service.bat b/distribution/src/main/resources/bin/elasticsearch-service.bat index e8be9485349..6218d120627 100644 --- a/distribution/src/main/resources/bin/elasticsearch-service.bat +++ b/distribution/src/main/resources/bin/elasticsearch-service.bat @@ -103,11 +103,19 @@ set ES_JVM_OPTIONS=%ES_PATH_CONF%\jvm.options if not "%ES_JAVA_OPTS%" == "" set ES_JAVA_OPTS=%ES_JAVA_OPTS: =;% @setlocal -for /F "usebackq delims=" %%a in (`findstr /b \- "%ES_JVM_OPTIONS%" ^| findstr /b /v "\-server \-client"`) do set JVM_OPTIONS=!JVM_OPTIONS!%%a; -@endlocal & set ES_JAVA_OPTS=%JVM_OPTIONS:${ES_TMPDIR}=!ES_TMPDIR!%%ES_JAVA_OPTS% +for /F "usebackq delims=" %%a in (`"%JAVA% -cp "%ES_CLASSPATH%" "org.elasticsearch.tools.launchers.JvmOptionsParser" "%ES_JVM_OPTIONS%" || echo jvm_options_parser_failed"`) do set JVM_OPTIONS=%%a +@endlocal & set "MAYBE_JVM_OPTIONS_PARSER_FAILED=%JVM_OPTIONS%" & set ES_JAVA_OPTS=%JVM_OPTIONS:${ES_TMPDIR}=!ES_TMPDIR!% %ES_JAVA_OPTS% + +if "%MAYBE_JVM_OPTIONS_PARSER_FAILED%" == "jvm_options_parser_failed" ( + exit /b 1 +) + +if not "%ES_JAVA_OPTS%" == "" set ES_JAVA_OPTS=%ES_JAVA_OPTS: =;% if "%ES_JAVA_OPTS:~-1%"==";" set ES_JAVA_OPTS=%ES_JAVA_OPTS:~0,-1% +echo %ES_JAVA_OPTS% + @setlocal EnableDelayedExpansion for %%a in ("%ES_JAVA_OPTS:;=","%") do ( set var=%%a diff --git a/distribution/src/main/resources/bin/elasticsearch.bat b/distribution/src/main/resources/bin/elasticsearch.bat index 210da3e5eb6..4709942d0dc 100644 --- a/distribution/src/main/resources/bin/elasticsearch.bat +++ b/distribution/src/main/resources/bin/elasticsearch.bat @@ -42,12 +42,13 @@ IF ERRORLEVEL 1 ( ) set "ES_JVM_OPTIONS=%ES_PATH_CONF%\jvm.options" - @setlocal -rem extract the options from the JVM options file %ES_JVM_OPTIONS% -rem such options are the lines beginning with '-', thus "findstr /b" -for /F "usebackq delims=" %%a in (`findstr /b \- "%ES_JVM_OPTIONS%"`) do set JVM_OPTIONS=!JVM_OPTIONS! %%a -@endlocal & set ES_JAVA_OPTS=%JVM_OPTIONS:${ES_TMPDIR}=!ES_TMPDIR!% %ES_JAVA_OPTS% +for /F "usebackq delims=" %%a in (`"%JAVA% -cp "%ES_CLASSPATH%" "org.elasticsearch.tools.launchers.JvmOptionsParser" "%ES_JVM_OPTIONS%" || echo jvm_options_parser_failed"`) do set JVM_OPTIONS=%%a +@endlocal & set "MAYBE_JVM_OPTIONS_PARSER_FAILED=%JVM_OPTIONS%" & set ES_JAVA_OPTS=%JVM_OPTIONS:${ES_TMPDIR}=!ES_TMPDIR!% %ES_JAVA_OPTS% + +if "%MAYBE_JVM_OPTIONS_PARSER_FAILED%" == "jvm_options_parser_failed" ( + exit /b 1 +) cd "%ES_HOME%" %JAVA% %ES_JAVA_OPTS% -Delasticsearch -Des.path.home="%ES_HOME%" -Des.path.conf="%ES_PATH_CONF%" -cp "%ES_CLASSPATH%" "org.elasticsearch.bootstrap.Elasticsearch" !newparams! diff --git a/distribution/src/main/resources/config/jvm.options b/distribution/src/main/resources/config/jvm.options index a8fff81f468..85209c21ca4 100644 --- a/distribution/src/main/resources/config/jvm.options +++ b/distribution/src/main/resources/config/jvm.options @@ -44,9 +44,6 @@ ## basic -# force the server VM --server - # explicitly set the stack size -Xss1m @@ -84,13 +81,16 @@ # ensure the directory exists and has sufficient space ${heap.dump.path} -## GC logging +## JDK 8 GC logging --XX:+PrintGCDetails --XX:+PrintGCDateStamps --XX:+PrintTenuringDistribution --XX:+PrintGCApplicationStoppedTime --Xloggc:${loggc} --XX:+UseGCLogFileRotation --XX:NumberOfGCLogFiles=32 --XX:GCLogFileSize=64m +8:-XX:+PrintGCDetails +8:-XX:+PrintGCDateStamps +8:-XX:+PrintTenuringDistribution +8:-XX:+PrintGCApplicationStoppedTime +8:-Xloggc:${loggc} +8:-XX:+UseGCLogFileRotation +8:-XX:NumberOfGCLogFiles=32 +8:-XX:GCLogFileSize=64m + +# JDK 9+ GC logging +9-:-Xlog:gc*,gc+age=trace,safepoint:file=${loggc}:utctime,pid,tags:filecount=32,filesize=64m diff --git a/distribution/tools/java-version-checker/build.gradle b/distribution/tools/launchers/build.gradle similarity index 61% rename from distribution/tools/java-version-checker/build.gradle rename to distribution/tools/launchers/build.gradle index 7b2a76037cc..27e8712ffcb 100644 --- a/distribution/tools/java-version-checker/build.gradle +++ b/distribution/tools/launchers/build.gradle @@ -17,26 +17,38 @@ * under the License. */ - import org.elasticsearch.gradle.precommit.PrecommitTasks import org.gradle.api.JavaVersion apply plugin: 'elasticsearch.build' apply plugin: 'ru.vyarus.animalsniffer' -sourceCompatibility = JavaVersion.VERSION_1_6 -targetCompatibility = JavaVersion.VERSION_1_6 +sourceCompatibility = JavaVersion.VERSION_1_7 +targetCompatibility = JavaVersion.VERSION_1_7 dependencies { - signature "org.codehaus.mojo.signature:java16:1.0@signature" + signature "org.codehaus.mojo.signature:java17:1.0@signature" + + testCompile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}" + testCompile "junit:junit:${versions.junit}" + testCompile "org.hamcrest:hamcrest-all:${versions.hamcrest}" } +archivesBaseName = 'elasticsearch-launchers' + +// launchers do not depend on core so only JDK signatures should be checked forbiddenApisMain { - // java-version-checker does not depend on core so only JDK signatures should be checked + signaturesURLs = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt')] +} +forbiddenApisTest { signaturesURLs = [PrecommitTasks.getResource('/forbidden/jdk-signatures.txt')] } +namingConventions { + testClass = 'org.elasticsearch.tools.launchers.LaunchersTestCase' + skipIntegTestInDisguise = true +} + javadoc.enabled = false -test.enabled = false loggerUsageCheck.enabled = false jarHell.enabled=false diff --git a/distribution/tools/java-version-checker/src/main/java/org/elasticsearch/tools/JavaVersionChecker.java b/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JavaVersion.java similarity index 50% rename from distribution/tools/java-version-checker/src/main/java/org/elasticsearch/tools/JavaVersionChecker.java rename to distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JavaVersion.java index 1b44132c55e..30ca7a4a2a7 100644 --- a/distribution/tools/java-version-checker/src/main/java/org/elasticsearch/tools/JavaVersionChecker.java +++ b/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JavaVersion.java @@ -17,47 +17,18 @@ * under the License. */ -package org.elasticsearch.tools; +package org.elasticsearch.tools.launchers; import java.util.ArrayList; -import java.util.Arrays; import java.util.List; -import java.util.Locale; +import java.util.Objects; -/** - * Simple program that checks if the runtime Java version is at least 1.8. - */ -final class JavaVersionChecker { +public class JavaVersion { - private JavaVersionChecker() { - } + static final List CURRENT = parse(System.getProperty("java.specification.version")); + static final List JAVA_8 = parse("1.8"); - private static final List JAVA_8 = Arrays.asList(1, 8); - - /** - * The main entry point. The exit code is 0 if the Java version is at least 1.8, otherwise the exit code is 1. - * - * @param args the args to the program which are rejected if not empty - */ - public static void main(final String[] args) { - // no leniency! - if (args.length != 0) { - throw new IllegalArgumentException("expected zero arguments but was: " + Arrays.toString(args)); - } - final String javaSpecificationVersion = System.getProperty("java.specification.version"); - final List current = parse(javaSpecificationVersion); - if (compare(current, JAVA_8) < 0) { - final String message = String.format( - Locale.ROOT, - "the minimum required Java version is 8; your Java version from [%s] does not meet this requirement", - System.getProperty("java.home")); - println(message); - exit(1); - } - exit(0); - } - - private static List parse(final String value) { + static List parse(final String value) { if (!value.matches("^0*[0-9]+(\\.[0-9]+)*$")) { throw new IllegalArgumentException(value); } @@ -70,7 +41,16 @@ final class JavaVersionChecker { return version; } - private static int compare(final List left, final List right) { + static int majorVersion(final List javaVersion) { + Objects.requireNonNull(javaVersion); + if (javaVersion.get(0) > 1) { + return javaVersion.get(0); + } else { + return javaVersion.get(1); + } + } + + static int compare(final List left, final List right) { // lexicographically compare two lists, treating missing entries as zeros final int len = Math.max(left.size(), right.size()); for (int i = 0; i < len; i++) { @@ -86,14 +66,5 @@ final class JavaVersionChecker { return 0; } - @SuppressForbidden(reason = "System#err") - private static void println(String message) { - System.err.println(message); - } - - @SuppressForbidden(reason = "System#exit") - private static void exit(final int status) { - System.exit(status); - } } diff --git a/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JavaVersionChecker.java b/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JavaVersionChecker.java new file mode 100644 index 00000000000..ed632d060a5 --- /dev/null +++ b/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JavaVersionChecker.java @@ -0,0 +1,54 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.tools.launchers; + +import java.util.Arrays; +import java.util.Locale; + +/** + * Simple program that checks if the runtime Java version is at least 1.8. + */ +final class JavaVersionChecker { + + private JavaVersionChecker() { + } + + /** + * The main entry point. The exit code is 0 if the Java version is at least 1.8, otherwise the exit code is 1. + * + * @param args the args to the program which are rejected if not empty + */ + public static void main(final String[] args) { + // no leniency! + if (args.length != 0) { + throw new IllegalArgumentException("expected zero arguments but was " + Arrays.toString(args)); + } + if (JavaVersion.compare(JavaVersion.CURRENT, JavaVersion.JAVA_8) < 0) { + final String message = String.format( + Locale.ROOT, + "the minimum required Java version is 8; your Java version from [%s] does not meet this requirement", + System.getProperty("java.home")); + Launchers.errPrintln(message); + Launchers.exit(1); + } + Launchers.exit(0); + } + +} diff --git a/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JvmOptionsParser.java b/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JvmOptionsParser.java new file mode 100644 index 00000000000..fe7e045e6bc --- /dev/null +++ b/distribution/tools/launchers/src/main/java/org/elasticsearch/tools/launchers/JvmOptionsParser.java @@ -0,0 +1,270 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.tools.launchers; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.io.Reader; +import java.nio.charset.Charset; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Iterator; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.SortedMap; +import java.util.TreeMap; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +/** + * Parses JVM options from a file and prints a single line with all JVM options to standard output. + */ +final class JvmOptionsParser { + + /** + * The main entry point. The exit code is 0 if the JVM options were successfully parsed, otherwise the exit code is 1. If an improperly + * formatted line is discovered, the line is output to standard error. + * + * @param args the args to the program which should consist of a single option, the path to the JVM options + */ + public static void main(final String[] args) throws IOException { + if (args.length != 1) { + throw new IllegalArgumentException("expected one argument specifying path to jvm.options but was " + Arrays.toString(args)); + } + final List jvmOptions = new ArrayList<>(); + final SortedMap invalidLines = new TreeMap<>(); + try (InputStream is = Files.newInputStream(Paths.get(args[0])); + Reader reader = new InputStreamReader(is, Charset.forName("UTF-8")); + BufferedReader br = new BufferedReader(reader)) { + parse( + JavaVersion.majorVersion(JavaVersion.CURRENT), + br, + new JvmOptionConsumer() { + @Override + public void accept(final String jvmOption) { + jvmOptions.add(jvmOption); + } + }, + new InvalidLineConsumer() { + @Override + public void accept(final int lineNumber, final String line) { + invalidLines.put(lineNumber, line); + } + }); + } + + if (invalidLines.isEmpty()) { + final String spaceDelimitedJvmOptions = spaceDelimitJvmOptions(jvmOptions); + Launchers.outPrintln(spaceDelimitedJvmOptions); + Launchers.exit(0); + } else { + final String errorMessage = String.format( + Locale.ROOT, + "encountered [%d] error%s parsing [%s]", + invalidLines.size(), + invalidLines.size() == 1 ? "" : "s", + args[0]); + Launchers.errPrintln(errorMessage); + int count = 0; + for (final Map.Entry entry : invalidLines.entrySet()) { + count++; + final String message = String.format( + Locale.ROOT, + "[%d]: encountered improperly formatted JVM option line [%s] on line number [%d]", + count, + entry.getValue(), + entry.getKey()); + Launchers.errPrintln(message); + } + Launchers.exit(1); + } + } + + /** + * Callback for valid JVM options. + */ + interface JvmOptionConsumer { + /** + * Invoked when a line in the JVM options file matches the specified syntax and the specified major version. + * @param jvmOption the matching JVM option + */ + void accept(String jvmOption); + } + + /** + * Callback for invalid lines in the JVM options. + */ + interface InvalidLineConsumer { + /** + * Invoked when a line in the JVM options does not match the specified syntax. + */ + void accept(int lineNumber, String line); + } + + private static final Pattern PATTERN = Pattern.compile("((?\\d+)(?-)?(?\\d+)?:)?(?