From 71c223c38326ed57776ee08003ac485ede746b1a Mon Sep 17 00:00:00 2001 From: Gameldar Date: Wed, 21 Dec 2016 15:15:19 +0800 Subject: [PATCH 01/21] Add link to foreach processor to ingest-attachment.asciidoc Add a Note to the foreach processor for processing an array of attachments, as this is required to be able to process an array of attachments. --- docs/plugins/ingest-attachment.asciidoc | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/plugins/ingest-attachment.asciidoc b/docs/plugins/ingest-attachment.asciidoc index ff29e544c96..44ff41a6dfb 100644 --- a/docs/plugins/ingest-attachment.asciidoc +++ b/docs/plugins/ingest-attachment.asciidoc @@ -105,3 +105,5 @@ Returns this: NOTE: Extracting contents from binary data is a resource intensive operation and consumes a lot of resources. It is highly recommended to run pipelines using this processor in a dedicated ingest node. + +NOTE: To process an array of attachments the {ref}/foreach-processor.html[foreach processor] is required. From 417746ca9eb3fb8827acbe8c8342ded4171fb1e5 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Tue, 20 Dec 2016 10:49:26 +0100 Subject: [PATCH 02/21] Added base class for testing aggregators and some initial tests for `terms`, `top_hits` and `min` aggregations. --- .../aggregations/AggregatorTestCase.java | 117 +++++++++++++ .../bucket/terms/TermsAggregatorTests.java | 85 +++++++++ .../metrics/min/MinAggregatorTests.java | 163 ++++++++++++++++++ .../tophits/TopHitsAggregatorTests.java | 87 ++++++++++ 4 files changed, 452 insertions(+) create mode 100644 core/src/test/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java create mode 100644 core/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java create mode 100644 core/src/test/java/org/elasticsearch/search/aggregations/metrics/min/MinAggregatorTests.java create mode 100644 core/src/test/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregatorTests.java diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java b/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java new file mode 100644 index 00000000000..0d11ef5449c --- /dev/null +++ b/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java @@ -0,0 +1,117 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.search.aggregations; + +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.QueryCache; +import org.apache.lucene.search.QueryCachingPolicy; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.ParseFieldMatcher; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.MockBigArrays; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.cache.query.DisabledQueryCache; +import org.elasticsearch.index.engine.Engine; +import org.elasticsearch.index.fielddata.IndexFieldData; +import org.elasticsearch.index.fielddata.IndexFieldDataService; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.indices.breaker.CircuitBreakerService; +import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; +import org.elasticsearch.search.fetch.FetchPhase; +import org.elasticsearch.search.fetch.subphase.DocValueFieldsFetchSubPhase; +import org.elasticsearch.search.fetch.subphase.FetchSourceSubPhase; +import org.elasticsearch.search.internal.ContextIndexSearcher; +import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.search.lookup.SearchLookup; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.Arrays; + +import static org.mockito.Matchers.any; +import static org.mockito.Matchers.anyString; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +/** + * Base class for testing {@link Aggregator} implementations. + * Provides a helper constructing the {@link Aggregator} implementation based on a provided {@link AggregationBuilder} instance. + */ +public abstract class AggregatorTestCase extends ESTestCase { + + protected A createAggregator(B aggregationBuilder, + MappedFieldType fieldType, + IndexSearcher indexSearcher) throws IOException { + IndexSettings indexSettings = new IndexSettings( + IndexMetaData.builder("_index").settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)) + .numberOfShards(1) + .numberOfReplicas(0) + .creationDate(System.currentTimeMillis()) + .build(), + Settings.EMPTY + ); + + Engine.Searcher searcher = new Engine.Searcher("aggregator_test", indexSearcher); + QueryCache queryCache = new DisabledQueryCache(indexSettings); + QueryCachingPolicy queryCachingPolicy = new QueryCachingPolicy() { + @Override + public void onUse(Query query) { + } + + @Override + public boolean shouldCache(Query query) throws IOException { + // never cache a query + return false; + } + }; + ContextIndexSearcher contextIndexSearcher = new ContextIndexSearcher(searcher, queryCache, queryCachingPolicy); + + CircuitBreakerService circuitBreakerService = new NoneCircuitBreakerService(); + SearchContext searchContext = mock(SearchContext.class); + when(searchContext.searcher()).thenReturn(contextIndexSearcher); + when(searchContext.parseFieldMatcher()).thenReturn(ParseFieldMatcher.STRICT); + when(searchContext.bigArrays()).thenReturn(new MockBigArrays(Settings.EMPTY, circuitBreakerService)); + when(searchContext.fetchPhase()) + .thenReturn(new FetchPhase(Arrays.asList(new FetchSourceSubPhase(), new DocValueFieldsFetchSubPhase()))); + + // TODO: now just needed for top_hits, this will need to be revised for other agg unit tests: + MapperService mapperService = mock(MapperService.class); + when(mapperService.hasNested()).thenReturn(false); + when(searchContext.mapperService()).thenReturn(mapperService); + + SearchLookup searchLookup = new SearchLookup(mapperService, mock(IndexFieldDataService.class), new String[]{"type"}); + when(searchContext.lookup()).thenReturn(searchLookup); + + QueryShardContext queryShardContext = mock(QueryShardContext.class); + IndexFieldData fieldData = fieldType.fielddataBuilder().build(indexSettings, fieldType, null, circuitBreakerService, + mock(MapperService.class)); + when(queryShardContext.fieldMapper(anyString())).thenReturn(fieldType); + when(queryShardContext.getForField(any())).thenReturn(fieldData); + when(searchContext.getQueryShardContext()).thenReturn(queryShardContext); + + @SuppressWarnings("unchecked") + A aggregator = (A) aggregationBuilder.build(searchContext, null).create(null, true); + return aggregator; + } + +} diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java new file mode 100644 index 00000000000..60613e26d72 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/terms/TermsAggregatorTests.java @@ -0,0 +1,85 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.search.aggregations.bucket.terms; + +import org.apache.lucene.document.Document; +import org.apache.lucene.document.SortedSetDocValuesField; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.store.Directory; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.index.mapper.KeywordFieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.search.aggregations.AggregatorTestCase; +import org.elasticsearch.search.aggregations.support.ValueType; + +public class TermsAggregatorTests extends AggregatorTestCase { + + public void testTermsAggregator() throws Exception { + Directory directory = newDirectory(); + RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); + Document document = new Document(); + document.add(new SortedSetDocValuesField("string", new BytesRef("a"))); + document.add(new SortedSetDocValuesField("string", new BytesRef("b"))); + indexWriter.addDocument(document); + document = new Document(); + document.add(new SortedSetDocValuesField("string", new BytesRef("c"))); + document.add(new SortedSetDocValuesField("string", new BytesRef("a"))); + indexWriter.addDocument(document); + document = new Document(); + document.add(new SortedSetDocValuesField("string", new BytesRef("b"))); + document.add(new SortedSetDocValuesField("string", new BytesRef("d"))); + indexWriter.addDocument(document); + indexWriter.close(); + + IndexReader indexReader = DirectoryReader.open(directory); + IndexSearcher indexSearcher = newSearcher(indexReader, true, true); + + for (TermsAggregatorFactory.ExecutionMode executionMode : TermsAggregatorFactory.ExecutionMode.values()) { + TermsAggregationBuilder aggregationBuilder = new TermsAggregationBuilder("_name", ValueType.STRING) + .executionHint(executionMode.toString()) + .field("string") + .order(Terms.Order.term(true)); + MappedFieldType fieldType = new KeywordFieldMapper.KeywordFieldType(); + fieldType.setName("string"); + fieldType.setHasDocValues(true ); + try (TermsAggregator aggregator = createAggregator(aggregationBuilder, fieldType, indexSearcher)) { + aggregator.preCollection(); + indexSearcher.search(new MatchAllDocsQuery(), aggregator); + aggregator.postCollection(); + Terms result = (Terms) aggregator.buildAggregation(0L); + assertEquals(4, result.getBuckets().size()); + assertEquals("a", result.getBuckets().get(0).getKeyAsString()); + assertEquals(2L, result.getBuckets().get(0).getDocCount()); + assertEquals("b", result.getBuckets().get(1).getKeyAsString()); + assertEquals(2L, result.getBuckets().get(1).getDocCount()); + assertEquals("c", result.getBuckets().get(2).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(2).getDocCount()); + assertEquals("d", result.getBuckets().get(3).getKeyAsString()); + assertEquals(1L, result.getBuckets().get(3).getDocCount()); + } + } + indexReader.close(); + directory.close(); + } + +} diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/min/MinAggregatorTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/min/MinAggregatorTests.java new file mode 100644 index 00000000000..b9ee39f7275 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/min/MinAggregatorTests.java @@ -0,0 +1,163 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.search.aggregations.metrics.min; + +import org.apache.lucene.document.Document; +import org.apache.lucene.document.NumericDocValuesField; +import org.apache.lucene.document.SortedNumericDocValuesField; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.store.Directory; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.NumberFieldMapper; +import org.elasticsearch.search.aggregations.AggregatorTestCase; + +public class MinAggregatorTests extends AggregatorTestCase { + + public void testMinAggregator_numericDv() throws Exception { + Directory directory = newDirectory(); + RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); + Document document = new Document(); + document.add(new NumericDocValuesField("number", 9)); + indexWriter.addDocument(document); + document = new Document(); + document.add(new NumericDocValuesField("number", 7)); + indexWriter.addDocument(document); + document = new Document(); + document.add(new NumericDocValuesField("number", 5)); + indexWriter.addDocument(document); + document = new Document(); + document.add(new NumericDocValuesField("number", 3)); + indexWriter.addDocument(document); + document = new Document(); + document.add(new NumericDocValuesField("number", 1)); + indexWriter.addDocument(document); + document = new Document(); + document.add(new NumericDocValuesField("number", -1)); + indexWriter.addDocument(document); + indexWriter.close(); + + IndexReader indexReader = DirectoryReader.open(directory); + IndexSearcher indexSearcher = newSearcher(indexReader, true, true); + + MinAggregationBuilder aggregationBuilder = new MinAggregationBuilder("_name").field("number"); + MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG); + fieldType.setName("number"); + try (MinAggregator aggregator = createAggregator(aggregationBuilder, fieldType, indexSearcher)) { + aggregator.preCollection(); + indexSearcher.search(new MatchAllDocsQuery(), aggregator); + aggregator.postCollection(); + InternalMin result = (InternalMin) aggregator.buildAggregation(0L); + assertEquals(-1.0, result.getValue(), 0); + } + indexReader.close(); + directory.close(); + } + + public void testMinAggregator_sortedNumericDv() throws Exception { + Directory directory = newDirectory(); + RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); + Document document = new Document(); + document.add(new SortedNumericDocValuesField("number", 9)); + document.add(new SortedNumericDocValuesField("number", 7)); + indexWriter.addDocument(document); + document = new Document(); + document.add(new SortedNumericDocValuesField("number", 5)); + document.add(new SortedNumericDocValuesField("number", 3)); + indexWriter.addDocument(document); + document = new Document(); + document.add(new SortedNumericDocValuesField("number", 1)); + document.add(new SortedNumericDocValuesField("number", -1)); + indexWriter.addDocument(document); + indexWriter.close(); + + IndexReader indexReader = DirectoryReader.open(directory); + IndexSearcher indexSearcher = newSearcher(indexReader, true, true); + + MinAggregationBuilder aggregationBuilder = new MinAggregationBuilder("_name").field("number"); + MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG); + fieldType.setName("number"); + try (MinAggregator aggregator = createAggregator(aggregationBuilder, fieldType, indexSearcher)) { + aggregator.preCollection(); + indexSearcher.search(new MatchAllDocsQuery(), aggregator); + aggregator.postCollection(); + InternalMin result = (InternalMin) aggregator.buildAggregation(0L); + assertEquals(-1.0, result.getValue(), 0); + } + indexReader.close(); + directory.close(); + } + + public void testMinAggregator_noValue() throws Exception { + Directory directory = newDirectory(); + RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); + Document document = new Document(); + document.add(new SortedNumericDocValuesField("number1", 7)); + indexWriter.addDocument(document); + document = new Document(); + document.add(new SortedNumericDocValuesField("number1", 3)); + indexWriter.addDocument(document); + document = new Document(); + document.add(new SortedNumericDocValuesField("number1", 1)); + indexWriter.addDocument(document); + indexWriter.close(); + + IndexReader indexReader = DirectoryReader.open(directory); + IndexSearcher indexSearcher = newSearcher(indexReader, true, true); + + MinAggregationBuilder aggregationBuilder = new MinAggregationBuilder("_name").field("number2"); + MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG); + fieldType.setName("number2"); + try (MinAggregator aggregator = createAggregator(aggregationBuilder, fieldType, indexSearcher)) { + aggregator.preCollection(); + indexSearcher.search(new MatchAllDocsQuery(), aggregator); + aggregator.postCollection(); + InternalMin result = (InternalMin) aggregator.buildAggregation(0L); + assertEquals(Double.POSITIVE_INFINITY, result.getValue(), 0); + } + indexReader.close(); + directory.close(); + } + + public void testMinAggregator_noDocs() throws Exception { + Directory directory = newDirectory(); + RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); + indexWriter.close(); + + IndexReader indexReader = DirectoryReader.open(directory); + IndexSearcher indexSearcher = newSearcher(indexReader, true, true); + + MinAggregationBuilder aggregationBuilder = new MinAggregationBuilder("_name").field("number"); + MappedFieldType fieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG); + fieldType.setName("number"); + try (MinAggregator aggregator = createAggregator(aggregationBuilder, fieldType, indexSearcher)) { + aggregator.preCollection(); + indexSearcher.search(new MatchAllDocsQuery(), aggregator); + aggregator.postCollection(); + InternalMin result = (InternalMin) aggregator.buildAggregation(0L); + assertEquals(Double.POSITIVE_INFINITY, result.getValue(), 0); + } + indexReader.close(); + directory.close(); + } + +} diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregatorTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregatorTests.java new file mode 100644 index 00000000000..9bf371e5099 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/tophits/TopHitsAggregatorTests.java @@ -0,0 +1,87 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.search.aggregations.metrics.tophits; + +import org.apache.lucene.document.Document; +import org.apache.lucene.document.Field; +import org.apache.lucene.document.SortedSetDocValuesField; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.store.Directory; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.index.mapper.KeywordFieldMapper; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.Uid; +import org.elasticsearch.index.mapper.UidFieldMapper; +import org.elasticsearch.search.SearchHits; +import org.elasticsearch.search.aggregations.AggregatorTestCase; +import org.elasticsearch.search.sort.SortOrder; + +public class TopHitsAggregatorTests extends AggregatorTestCase { + + public void testTermsAggregator() throws Exception { + Directory directory = newDirectory(); + RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); + Document document = new Document(); + document.add(new Field(UidFieldMapper.NAME, Uid.createUid("type", "1"), UidFieldMapper.Defaults.FIELD_TYPE)); + document.add(new SortedSetDocValuesField("string", new BytesRef("a"))); + document.add(new SortedSetDocValuesField("string", new BytesRef("b"))); + indexWriter.addDocument(document); + document = new Document(); + document.add(new Field(UidFieldMapper.NAME, Uid.createUid("type", "2"), UidFieldMapper.Defaults.FIELD_TYPE)); + document.add(new SortedSetDocValuesField("string", new BytesRef("c"))); + document.add(new SortedSetDocValuesField("string", new BytesRef("a"))); + indexWriter.addDocument(document); + document = new Document(); + document.add(new Field(UidFieldMapper.NAME, Uid.createUid("type", "3"), UidFieldMapper.Defaults.FIELD_TYPE)); + document.add(new SortedSetDocValuesField("string", new BytesRef("b"))); + document.add(new SortedSetDocValuesField("string", new BytesRef("d"))); + indexWriter.addDocument(document); + indexWriter.close(); + + IndexReader indexReader = DirectoryReader.open(directory); + IndexSearcher indexSearcher = newSearcher(indexReader, true, true); + + MappedFieldType fieldType = new KeywordFieldMapper.KeywordFieldType(); + fieldType.setName("string"); + fieldType.setHasDocValues(true ); + TopHitsAggregationBuilder aggregationBuilder = new TopHitsAggregationBuilder("_name"); + aggregationBuilder.sort("string", SortOrder.DESC); + try (TopHitsAggregator aggregator = createAggregator(aggregationBuilder, fieldType, indexSearcher)){ + aggregator.preCollection(); + indexSearcher.search(new MatchAllDocsQuery(), aggregator); + aggregator.postCollection(); + TopHits topHits = (TopHits) aggregator.buildAggregation(0L); + SearchHits searchHits = topHits.getHits(); + assertEquals(3L, searchHits.getTotalHits()); + assertEquals("3", searchHits.getAt(0).getId()); + assertEquals("type", searchHits.getAt(0).getType()); + assertEquals("2", searchHits.getAt(1).getId()); + assertEquals("type", searchHits.getAt(1).getType()); + assertEquals("1", searchHits.getAt(2).getId()); + assertEquals("type", searchHits.getAt(2).getType()); + } + indexReader.close(); + directory.close(); + } + +} From e298180a39790a5aab49dac7735479d235124322 Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Wed, 21 Dec 2016 10:11:56 +0100 Subject: [PATCH 03/21] IndicesStoreIntegrationIT should not use start recovery sending as an indication that the recovery started Sending a request is not a good indicator as it doesn't mean it's processed yet. Instead we should use one of the first request from source to target. This caused the cluster state block to be added to early , blocking the recovery it self --- .../store/IndicesStoreIntegrationIT.java | 18 +++++------------- 1 file changed, 5 insertions(+), 13 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java b/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java index 29845e5396d..174c6788bd9 100644 --- a/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java +++ b/core/src/test/java/org/elasticsearch/indices/store/IndicesStoreIntegrationIT.java @@ -26,7 +26,6 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.RoutingNode; @@ -45,7 +44,7 @@ import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.IndicesService; -import org.elasticsearch.indices.recovery.PeerRecoverySourceService; +import org.elasticsearch.indices.recovery.PeerRecoveryTargetService; import org.elasticsearch.node.Node; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; @@ -56,8 +55,6 @@ import org.elasticsearch.test.disruption.BlockClusterStateProcessing; import org.elasticsearch.test.disruption.SingleNodeDisruption; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.transport.ConnectTransportException; -import org.elasticsearch.transport.Transport; -import org.elasticsearch.transport.TransportException; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.transport.TransportRequestOptions; import org.elasticsearch.transport.TransportService; @@ -480,18 +477,13 @@ public class IndicesStoreIntegrationIT extends ESIntegTestCase { @Override public void receivedRequest(long requestId, String action) { - if (action.equals(IndicesStore.ACTION_SHARD_EXISTS)) { + if (action.equals(PeerRecoveryTargetService.Actions.FILES_INFO)) { + logger.info("received: {}, relocation starts", action); + beginRelocationLatch.countDown(); + } else if (action.equals(IndicesStore.ACTION_SHARD_EXISTS)) { receivedShardExistsRequestLatch.countDown(); logger.info("received: {}, relocation done", action); } } - - @Override - public void requestSent(DiscoveryNode node, long requestId, String action, TransportRequestOptions options) { - if (action.equals(PeerRecoverySourceService.Actions.START_RECOVERY)) { - logger.info("sent: {}, relocation starts", action); - beginRelocationLatch.countDown(); - } - } } } From 6249f1092fcf116dd88d76c3b533cab4e11c2afa Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Wed, 21 Dec 2016 10:25:33 +0100 Subject: [PATCH 04/21] x_refresh.yaml tests should use unique index names and doc ids to ease debugging This is to make it easier to grep the node logs --- .../rest-api-spec/test/bulk/50_refresh.yaml | 18 +++++++++--------- .../rest-api-spec/test/create/60_refresh.yaml | 8 ++++---- .../rest-api-spec/test/delete/50_refresh.yaml | 16 ++++++++-------- .../rest-api-spec/test/index/60_refresh.yaml | 8 ++++---- .../rest-api-spec/test/update/60_refresh.yaml | 14 +++++++------- 5 files changed, 32 insertions(+), 32 deletions(-) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/50_refresh.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/50_refresh.yaml index 4906975bfab..6326b9464ca 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/50_refresh.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/50_refresh.yaml @@ -4,14 +4,14 @@ bulk: refresh: true body: | - {"index": {"_index": "test_index", "_type": "test_type", "_id": "test_id"}} + {"index": {"_index": "bulk_50_refresh_1", "_type": "test_type", "_id": "bulk_50_refresh_id1"}} {"f1": "v1", "f2": 42} - {"index": {"_index": "test_index", "_type": "test_type", "_id": "test_id2"}} + {"index": {"_index": "bulk_50_refresh_1", "_type": "test_type", "_id": "bulk_50_refresh_id2"}} {"f1": "v2", "f2": 47} - do: count: - index: test_index + index: bulk_50_refresh_1 - match: {count: 2} --- @@ -20,14 +20,14 @@ bulk: refresh: "" body: | - {"index": {"_index": "test_index", "_type": "test_type", "_id": "test_id"}} + {"index": {"_index": "bulk_50_refresh_2", "_type": "test_type", "_id": "bulk_50_refresh_id3"}} {"f1": "v1", "f2": 42} - {"index": {"_index": "test_index", "_type": "test_type", "_id": "test_id2"}} + {"index": {"_index": "bulk_50_refresh_2", "_type": "test_type", "_id": "bulk_50_refresh_id4"}} {"f1": "v2", "f2": 47} - do: count: - index: test_index + index: bulk_50_refresh_2 - match: {count: 2} @@ -37,12 +37,12 @@ bulk: refresh: wait_for body: | - {"index": {"_index": "test_index", "_type": "test_type", "_id": "test_id"}} + {"index": {"_index": "bulk_50_refresh_3", "_type": "test_type", "_id": "bulk_50_refresh_id5"}} {"f1": "v1", "f2": 42} - {"index": {"_index": "test_index", "_type": "test_type", "_id": "test_id2"}} + {"index": {"_index": "bulk_50_refresh_3", "_type": "test_type", "_id": "bulk_50_refresh_id6"}} {"f1": "v2", "f2": 47} - do: count: - index: test_index + index: bulk_50_refresh_3 - match: {count: 2} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/create/60_refresh.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/create/60_refresh.yaml index 5cdc1a20583..deb5e1e6850 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/create/60_refresh.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/create/60_refresh.yaml @@ -66,17 +66,17 @@ "refresh=wait_for waits until changes are visible in search": - do: index: - index: test_1 + index: create_60_refresh_1 type: test - id: 1 + id: create_60_refresh_id1 body: { foo: bar } refresh: wait_for - is_false: forced_refresh - do: search: - index: test_1 + index: create_60_refresh_1 type: test body: - query: { term: { _id: 1 }} + query: { term: { _id: create_60_refresh_id1 }} - match: { hits.total: 1 } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/50_refresh.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/50_refresh.yaml index 9ea6bc033de..589d2cf77a3 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/delete/50_refresh.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/delete/50_refresh.yaml @@ -115,33 +115,33 @@ "refresh=wait_for waits until changes are visible in search": - do: index: - index: test_1 + index: delete_50_refresh_1 type: test - id: 1 + id: delete_50_refresh_id1 body: { foo: bar } refresh: true - is_true: forced_refresh - do: search: - index: test_1 + index: delete_50_refresh_1 type: test body: - query: { term: { _id: 1 }} + query: { term: { _id: delete_50_refresh_id1 }} - match: { hits.total: 1 } - do: delete: - index: test_1 + index: delete_50_refresh_1 type: test - id: 1 + id: delete_50_refresh_id1 refresh: wait_for - is_false: forced_refresh - do: search: - index: test_1 + index: delete_50_refresh_1 type: test body: - query: { term: { _id: 1 }} + query: { term: { _id: delete_50_refresh_id1 }} - match: { hits.total: 0 } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/index/60_refresh.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/index/60_refresh.yaml index 2175128debb..5d20406abee 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/index/60_refresh.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/index/60_refresh.yaml @@ -67,17 +67,17 @@ "refresh=wait_for waits until changes are visible in search": - do: index: - index: test_1 + index: index_60_refresh_1 type: test - id: 1 + id: index_60_refresh_id1 body: { foo: bar } refresh: wait_for - is_false: forced_refresh - do: search: - index: test_1 + index: index_60_refresh_1 type: test body: - query: { term: { _id: 1 }} + query: { term: { _id: index_60_refresh_id1 }} - match: { hits.total: 1 } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/update/60_refresh.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/update/60_refresh.yaml index c5fe68e26c9..62d8fd125ff 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/update/60_refresh.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/update/60_refresh.yaml @@ -81,26 +81,26 @@ "refresh=wait_for waits until changes are visible in search": - do: index: - index: test_1 + index: update_60_refresh_1 type: test - id: 1 + id: update_60_refresh_id1 body: { foo: bar } refresh: true - is_true: forced_refresh - do: search: - index: test_1 + index: update_60_refresh_1 type: test body: - query: { term: { _id: 1 }} + query: { term: { _id: update_60_refresh_id1 }} - match: { hits.total: 1 } - do: update: - index: test_1 + index: update_60_refresh_1 type: test - id: 1 + id: update_60_refresh_id1 refresh: wait_for body: doc: { test: asdf } @@ -108,7 +108,7 @@ - do: search: - index: test_1 + index: update_60_refresh_1 type: test body: query: { match: { test: asdf } } From 9b22ec1c6ab2ee1bcee1f2a26071b49f239c8c67 Mon Sep 17 00:00:00 2001 From: Clinton Gormley Date: Wed, 21 Dec 2016 10:54:27 +0100 Subject: [PATCH 05/21] Add ID for percolate query to Java API docs --- docs/java-api/query-dsl/percolate-query.asciidoc | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/docs/java-api/query-dsl/percolate-query.asciidoc b/docs/java-api/query-dsl/percolate-query.asciidoc index 1d1ae314fcb..a5651392b62 100644 --- a/docs/java-api/query-dsl/percolate-query.asciidoc +++ b/docs/java-api/query-dsl/percolate-query.asciidoc @@ -1,3 +1,10 @@ +[[java-query-percolate-query]] +==== Percolate Query + +See: + * {ref}/query-dsl-percolate-query.html[Percolate Query] + + [source,java] -------------------------------------------------- Settings settings = Settings.builder().put("cluster.name", "elasticsearch").build(); From bdecbb529f3d4fd7700be6dc686c4c80dda29e94 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Wed, 21 Dec 2016 11:19:47 +0100 Subject: [PATCH 06/21] Factor out sort values from InternalSearchHit (#22080) This adds fromXContent method and unit test for sort values that are part of InternalSearchHit. In order to centralize serialisation and xContent parsing and rendering code, move all relevant parts to a new class which can be unit tested much better in isolation.This is part of the preparation for parsing search responses on the client side. --- .../common/xcontent/XContentParserUtils.java | 14 ++ .../search/internal/InternalSearchHit.java | 93 +--------- .../search/internal/SearchSortValues.java | 168 ++++++++++++++++++ .../internal/SearchSortValuesTests.java | 129 ++++++++++++++ 4 files changed, 317 insertions(+), 87 deletions(-) create mode 100644 core/src/main/java/org/elasticsearch/search/internal/SearchSortValues.java create mode 100644 core/src/test/java/org/elasticsearch/search/internal/SearchSortValuesTests.java diff --git a/core/src/main/java/org/elasticsearch/common/xcontent/XContentParserUtils.java b/core/src/main/java/org/elasticsearch/common/xcontent/XContentParserUtils.java index 846582fa5f4..956a0d85de8 100644 --- a/core/src/main/java/org/elasticsearch/common/xcontent/XContentParserUtils.java +++ b/core/src/main/java/org/elasticsearch/common/xcontent/XContentParserUtils.java @@ -22,6 +22,7 @@ package org.elasticsearch.common.xcontent; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.xcontent.XContentParser.Token; +import java.io.IOException; import java.util.Locale; import java.util.function.Supplier; @@ -34,6 +35,19 @@ public final class XContentParserUtils { private XContentParserUtils() { } + /** + * Makes sure that current token is of type {@link XContentParser.Token#FIELD_NAME} and the the field name is equal to the provided one + * @throws ParsingException if the token is not of type {@link XContentParser.Token#FIELD_NAME} or is not equal to the given field name + */ + public static void ensureFieldName(XContentParser parser, Token token, String fieldName) throws IOException { + ensureExpectedToken(Token.FIELD_NAME, token, parser::getTokenLocation); + String currentName = parser.currentName(); + if (currentName.equals(fieldName) == false) { + String message = "Failed to parse object: expecting field with name [%s] but found [%s]"; + throw new ParsingException(parser.getTokenLocation(), String.format(Locale.ROOT, message, fieldName, currentName)); + } + } + /** * @throws ParsingException with a "unknown field found" reason */ diff --git a/core/src/main/java/org/elasticsearch/search/internal/InternalSearchHit.java b/core/src/main/java/org/elasticsearch/search/internal/InternalSearchHit.java index 82324e3edf7..76de9740c8e 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/InternalSearchHit.java +++ b/core/src/main/java/org/elasticsearch/search/internal/InternalSearchHit.java @@ -20,7 +20,6 @@ package org.elasticsearch.search.internal; import org.apache.lucene.search.Explanation; -import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; @@ -48,7 +47,6 @@ import org.elasticsearch.search.lookup.SourceLookup; import java.io.IOException; import java.util.ArrayList; -import java.util.Arrays; import java.util.HashMap; import java.util.Iterator; import java.util.List; @@ -67,8 +65,6 @@ import static org.elasticsearch.search.internal.InternalSearchHitField.readSearc public class InternalSearchHit implements SearchHit { - private static final Object[] EMPTY_SORT_VALUES = new Object[0]; - private transient int docId; private float score = Float.NEGATIVE_INFINITY; @@ -86,7 +82,7 @@ public class InternalSearchHit implements SearchHit { private Map highlightFields = null; - private Object[] sortValues = EMPTY_SORT_VALUES; + private SearchSortValues sortValues = SearchSortValues.EMPTY; private String[] matchedQueries = Strings.EMPTY_ARRAY; @@ -343,17 +339,12 @@ public class InternalSearchHit implements SearchHit { } public void sortValues(Object[] sortValues, DocValueFormat[] sortValueFormats) { - this.sortValues = Arrays.copyOf(sortValues, sortValues.length); - for (int i = 0; i < sortValues.length; ++i) { - if (this.sortValues[i] instanceof BytesRef) { - this.sortValues[i] = sortValueFormats[i].format((BytesRef) sortValues[i]); - } - } + this.sortValues = new SearchSortValues(sortValues, sortValueFormats); } @Override public Object[] sortValues() { - return sortValues; + return sortValues.sortValues(); } @Override @@ -499,13 +490,7 @@ public class InternalSearchHit implements SearchHit { } builder.endObject(); } - if (sortValues != null && sortValues.length > 0) { - builder.startArray(Fields.SORT); - for (Object sortValue : sortValues) { - builder.value(sortValue); - } - builder.endArray(); - } + sortValues.toXContent(builder, params); if (matchedQueries.length > 0) { builder.startArray(Fields.MATCHED_QUERIES); for (String matchedFilter : matchedQueries) { @@ -603,34 +588,7 @@ public class InternalSearchHit implements SearchHit { this.highlightFields = unmodifiableMap(highlightFields); } - size = in.readVInt(); - if (size > 0) { - sortValues = new Object[size]; - for (int i = 0; i < sortValues.length; i++) { - byte type = in.readByte(); - if (type == 0) { - sortValues[i] = null; - } else if (type == 1) { - sortValues[i] = in.readString(); - } else if (type == 2) { - sortValues[i] = in.readInt(); - } else if (type == 3) { - sortValues[i] = in.readLong(); - } else if (type == 4) { - sortValues[i] = in.readFloat(); - } else if (type == 5) { - sortValues[i] = in.readDouble(); - } else if (type == 6) { - sortValues[i] = in.readByte(); - } else if (type == 7) { - sortValues[i] = in.readShort(); - } else if (type == 8) { - sortValues[i] = in.readBoolean(); - } else { - throw new IOException("Can't match type [" + type + "]"); - } - } - } + sortValues = new SearchSortValues(in); size = in.readVInt(); if (size > 0) { @@ -681,46 +639,7 @@ public class InternalSearchHit implements SearchHit { highlightField.writeTo(out); } } - - if (sortValues.length == 0) { - out.writeVInt(0); - } else { - out.writeVInt(sortValues.length); - for (Object sortValue : sortValues) { - if (sortValue == null) { - out.writeByte((byte) 0); - } else { - Class type = sortValue.getClass(); - if (type == String.class) { - out.writeByte((byte) 1); - out.writeString((String) sortValue); - } else if (type == Integer.class) { - out.writeByte((byte) 2); - out.writeInt((Integer) sortValue); - } else if (type == Long.class) { - out.writeByte((byte) 3); - out.writeLong((Long) sortValue); - } else if (type == Float.class) { - out.writeByte((byte) 4); - out.writeFloat((Float) sortValue); - } else if (type == Double.class) { - out.writeByte((byte) 5); - out.writeDouble((Double) sortValue); - } else if (type == Byte.class) { - out.writeByte((byte) 6); - out.writeByte((Byte) sortValue); - } else if (type == Short.class) { - out.writeByte((byte) 7); - out.writeShort((Short) sortValue); - } else if (type == Boolean.class) { - out.writeByte((byte) 8); - out.writeBoolean((Boolean) sortValue); - } else { - throw new IOException("Can't handle sort field value of type [" + type + "]"); - } - } - } - } + sortValues.writeTo(out); if (matchedQueries.length == 0) { out.writeVInt(0); diff --git a/core/src/main/java/org/elasticsearch/search/internal/SearchSortValues.java b/core/src/main/java/org/elasticsearch/search/internal/SearchSortValues.java new file mode 100644 index 00000000000..a4fcb18f828 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/internal/SearchSortValues.java @@ -0,0 +1,168 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.internal; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentParserUtils; +import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.search.internal.InternalSearchHit.Fields; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Objects; + +public class SearchSortValues implements ToXContent, Writeable { + + static final SearchSortValues EMPTY = new SearchSortValues(new Object[0]); + private final Object[] sortValues; + + SearchSortValues(Object[] sortValues) { + this.sortValues = Objects.requireNonNull(sortValues, "sort values must not be empty"); + } + + public SearchSortValues(Object[] sortValues, DocValueFormat[] sortValueFormats) { + Objects.requireNonNull(sortValues); + Objects.requireNonNull(sortValueFormats); + this.sortValues = Arrays.copyOf(sortValues, sortValues.length); + for (int i = 0; i < sortValues.length; ++i) { + if (this.sortValues[i] instanceof BytesRef) { + this.sortValues[i] = sortValueFormats[i].format((BytesRef) sortValues[i]); + } + } + } + + public SearchSortValues(StreamInput in) throws IOException { + int size = in.readVInt(); + if (size > 0) { + sortValues = new Object[size]; + for (int i = 0; i < sortValues.length; i++) { + byte type = in.readByte(); + if (type == 0) { + sortValues[i] = null; + } else if (type == 1) { + sortValues[i] = in.readString(); + } else if (type == 2) { + sortValues[i] = in.readInt(); + } else if (type == 3) { + sortValues[i] = in.readLong(); + } else if (type == 4) { + sortValues[i] = in.readFloat(); + } else if (type == 5) { + sortValues[i] = in.readDouble(); + } else if (type == 6) { + sortValues[i] = in.readByte(); + } else if (type == 7) { + sortValues[i] = in.readShort(); + } else if (type == 8) { + sortValues[i] = in.readBoolean(); + } else { + throw new IOException("Can't match type [" + type + "]"); + } + } + } else { + sortValues = new Object[0]; + } + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVInt(sortValues.length); + for (Object sortValue : sortValues) { + if (sortValue == null) { + out.writeByte((byte) 0); + } else { + Class type = sortValue.getClass(); + if (type == String.class) { + out.writeByte((byte) 1); + out.writeString((String) sortValue); + } else if (type == Integer.class) { + out.writeByte((byte) 2); + out.writeInt((Integer) sortValue); + } else if (type == Long.class) { + out.writeByte((byte) 3); + out.writeLong((Long) sortValue); + } else if (type == Float.class) { + out.writeByte((byte) 4); + out.writeFloat((Float) sortValue); + } else if (type == Double.class) { + out.writeByte((byte) 5); + out.writeDouble((Double) sortValue); + } else if (type == Byte.class) { + out.writeByte((byte) 6); + out.writeByte((Byte) sortValue); + } else if (type == Short.class) { + out.writeByte((byte) 7); + out.writeShort((Short) sortValue); + } else if (type == Boolean.class) { + out.writeByte((byte) 8); + out.writeBoolean((Boolean) sortValue); + } else { + throw new IOException("Can't handle sort field value of type [" + type + "]"); + } + } + } + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + if (sortValues.length > 0) { + builder.startArray(Fields.SORT); + for (Object sortValue : sortValues) { + builder.value(sortValue); + } + builder.endArray(); + } + return builder; + } + + public static SearchSortValues fromXContent(XContentParser parser) throws IOException { + XContentParserUtils.ensureFieldName(parser, parser.currentToken(), Fields.SORT); + XContentParser.Token token = parser.nextToken(); + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_ARRAY, token, parser::getTokenLocation); + return new SearchSortValues(parser.list().toArray()); + } + + public Object[] sortValues() { + return sortValues; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + SearchSortValues other = (SearchSortValues) obj; + return Arrays.equals(sortValues, other.sortValues); + } + + @Override + public int hashCode() { + return Arrays.hashCode(sortValues); + } +} diff --git a/core/src/test/java/org/elasticsearch/search/internal/SearchSortValuesTests.java b/core/src/test/java/org/elasticsearch/search/internal/SearchSortValuesTests.java new file mode 100644 index 00000000000..e67c14a9d7d --- /dev/null +++ b/core/src/test/java/org/elasticsearch/search/internal/SearchSortValuesTests.java @@ -0,0 +1,129 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.internal; + +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.function.Supplier; + +import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; +import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; + +public class SearchSortValuesTests extends ESTestCase { + + public static SearchSortValues createTestItem() { + List> valueSuppliers = new ArrayList<>(); + // this should reflect all values that are allowed to go through the transport layer + valueSuppliers.add(() -> null); + valueSuppliers.add(() -> randomInt()); + valueSuppliers.add(() -> randomLong()); + valueSuppliers.add(() -> randomDouble()); + valueSuppliers.add(() -> randomFloat()); + valueSuppliers.add(() -> randomByte()); + valueSuppliers.add(() -> randomShort()); + valueSuppliers.add(() -> randomBoolean()); + valueSuppliers.add(() -> frequently() ? randomAsciiOfLengthBetween(1, 30) : randomRealisticUnicodeOfCodepointLength(30)); + + int size = randomInt(20); + Object[] values = new Object[size]; + for (int i = 0; i < size; i++) { + Supplier supplier = randomFrom(valueSuppliers); + values[i] = supplier.get(); + } + return new SearchSortValues(values); + } + + public void testFromXContent() throws IOException { + SearchSortValues sortValues = createTestItem(); + XContentType xcontentType = randomFrom(XContentType.values()); + XContentBuilder builder = XContentFactory.contentBuilder(xcontentType); + if (randomBoolean()) { + builder.prettyPrint(); + } + builder.startObject(); // we need to wrap xContent output in proper object to create a parser for it + builder = sortValues.toXContent(builder, ToXContent.EMPTY_PARAMS); + builder.endObject(); + + XContentParser parser = createParser(builder); + parser.nextToken(); // skip to the elements field name token, fromXContent advances from there if called from ourside + parser.nextToken(); + if (sortValues.sortValues().length > 0) { + SearchSortValues parsed = SearchSortValues.fromXContent(parser); + assertToXContentEquivalent(builder.bytes(), toXContent(parsed, xcontentType, true), xcontentType); + parser.nextToken(); + } + assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken()); + assertNull(parser.nextToken()); + } + + public void testToXContent() throws IOException { + SearchSortValues sortValues = new SearchSortValues(new Object[]{ 1, "foo", 3.0}); + XContentBuilder builder = JsonXContent.contentBuilder(); + builder.startObject(); + sortValues.toXContent(builder, ToXContent.EMPTY_PARAMS); + builder.endObject(); + assertEquals("{\"sort\":[1,\"foo\",3.0]}", builder.string()); + } + + /** + * Test equality and hashCode properties + */ + public void testEqualsAndHashcode() { + checkEqualsAndHashCode(createTestItem(), SearchSortValuesTests::copy, SearchSortValuesTests::mutate); + } + + public void testSerialization() throws IOException { + SearchSortValues sortValues = createTestItem(); + try (BytesStreamOutput output = new BytesStreamOutput()) { + sortValues.writeTo(output); + try (StreamInput in = output.bytes().streamInput()) { + SearchSortValues deserializedCopy = new SearchSortValues(in); + assertEquals(sortValues, deserializedCopy); + assertEquals(sortValues.hashCode(), deserializedCopy.hashCode()); + assertNotSame(sortValues, deserializedCopy); + } + } + } + + private static SearchSortValues mutate(SearchSortValues original) { + Object[] sortValues = original.sortValues(); + if (sortValues.length == 0) { + return new SearchSortValues(new Object[] { 1 }); + } + return new SearchSortValues(Arrays.copyOf(sortValues, sortValues.length + 1)); + } + + private static SearchSortValues copy(SearchSortValues original) { + return new SearchSortValues(Arrays.copyOf(original.sortValues(), original.sortValues().length)); + } +} From 2ec5deef089723885dc76462c68b953f3af364ca Mon Sep 17 00:00:00 2001 From: David Pilato Date: Wed, 21 Dec 2016 11:27:34 +0100 Subject: [PATCH 07/21] Link for setting page in elasticsearch.yml is outdated Closes #22299. --- distribution/src/main/resources/config/elasticsearch.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/distribution/src/main/resources/config/elasticsearch.yml b/distribution/src/main/resources/config/elasticsearch.yml index 3d732e74c0e..bf806535c9b 100644 --- a/distribution/src/main/resources/config/elasticsearch.yml +++ b/distribution/src/main/resources/config/elasticsearch.yml @@ -8,7 +8,7 @@ # the most important settings you may want to configure for a production cluster. # # Please see the documentation for further information on configuration options: -# +# # # ---------------------------------- Cluster ----------------------------------- # From ae01a51b44c1bc4f0bbcfc8904b00bf48f450c96 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Wed, 21 Dec 2016 11:44:24 +0100 Subject: [PATCH 08/21] [TEST] make ESSingleNodeTestCase tests repeatable (#22283) If we conditionally do random things, e.g. initialize a node only after the first test, we have to make sure that we unconditionally create a new seed calling random.nextLong(), then initialize the node under a private randomness context. This makes sure that any random usage through Randomness.get() will retrieve the proper random instance through RandomizedContext.current().getRandom(). When running under private randomness, the context will return the Random instance that was created with the provided seed (forked from the main random instance) rather than the main Random that's exposed to tests as well. Otherwise tests become non repeatable because that initialization part happens only before the first executed test. --- .../resources/checkstyle_suppressions.xml | 4 - .../test/ESSingleNodeTestCase.java | 97 ++++++++----------- .../org/elasticsearch/test/ESTestCase.java | 3 +- 3 files changed, 45 insertions(+), 59 deletions(-) diff --git a/buildSrc/src/main/resources/checkstyle_suppressions.xml b/buildSrc/src/main/resources/checkstyle_suppressions.xml index dba0529923f..02097315eaa 100644 --- a/buildSrc/src/main/resources/checkstyle_suppressions.xml +++ b/buildSrc/src/main/resources/checkstyle_suppressions.xml @@ -993,13 +993,9 @@ - - - - diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java index 32dac0fd837..449e959372c 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java @@ -18,13 +18,7 @@ */ package org.elasticsearch.test; -import java.io.IOException; -import java.nio.file.Path; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; - +import com.carrotsearch.randomizedtesting.RandomizedContext; import org.apache.lucene.util.IOUtils; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; @@ -44,6 +38,7 @@ import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.env.Environment; +import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexService; import org.elasticsearch.indices.IndicesService; @@ -56,11 +51,16 @@ import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.discovery.TestZenDiscovery; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.MockTcpTransportPlugin; -import org.junit.After; import org.junit.AfterClass; -import org.junit.Before; import org.junit.BeforeClass; +import java.io.IOException; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; + import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.lessThanOrEqualTo; @@ -73,64 +73,58 @@ public abstract class ESSingleNodeTestCase extends ESTestCase { private static Node NODE = null; - private void reset() throws IOException { - assert NODE != null; - stopNode(); - startNode(); - } - - protected void startNode() { + protected void startNode(long seed) throws Exception { assert NODE == null; - NODE = newNode(); - // we must wait for the node to actually be up and running. otherwise the node might have started, elected itself master but might not yet have removed the + NODE = RandomizedContext.current().runWithPrivateRandomness(seed, this::newNode); + // we must wait for the node to actually be up and running. otherwise the node might have started, + // elected itself master but might not yet have removed the // SERVICE_UNAVAILABLE/1/state not recovered / initialized block ClusterHealthResponse clusterHealthResponse = client().admin().cluster().prepareHealth().setWaitForGreenStatus().get(); assertFalse(clusterHealthResponse.isTimedOut()); client().admin().indices() - .preparePutTemplate("random_index_template") + .preparePutTemplate("one_shard_index_template") .setPatterns(Collections.singletonList("*")) .setOrder(0) .setSettings(Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)).get(); } - protected static void stopNode() throws IOException { + private static void stopNode() throws IOException { Node node = NODE; NODE = null; IOUtils.close(node); } - private void cleanup(boolean resetNode) throws IOException { - assertAcked(client().admin().indices().prepareDelete("*").get()); - if (resetNode) { - reset(); + @Override + public void setUp() throws Exception { + super.setUp(); + //the seed has to be created regardless of whether it will be used or not, for repeatability + long seed = random().nextLong(); + // Create the node lazily, on the first test. This is ok because we do not randomize any settings, + // only the cluster name. This allows us to have overridden properties for plugins and the version to use. + if (NODE == null) { + startNode(seed); } + } + + @Override + public void tearDown() throws Exception { + logger.info("[{}#{}]: cleaning up after test", getTestClass().getSimpleName(), getTestName()); + super.tearDown(); + assertAcked(client().admin().indices().prepareDelete("*").get()); MetaData metaData = client().admin().cluster().prepareState().get().getState().getMetaData(); assertThat("test leaves persistent cluster metadata behind: " + metaData.persistentSettings().getAsMap(), metaData.persistentSettings().getAsMap().size(), equalTo(0)); assertThat("test leaves transient cluster metadata behind: " + metaData.transientSettings().getAsMap(), metaData.transientSettings().getAsMap().size(), equalTo(0)); - } - - @Before - @Override - public void setUp() throws Exception { - super.setUp(); - // Create the node lazily, on the first test. This is ok because we do not randomize any settings, - // only the cluster name. This allows us to have overridden properties for plugins and the version to use. - if (NODE == null) { - startNode(); + if (resetNodeAfterTest()) { + assert NODE != null; + stopNode(); + //the seed can be created within this if as it will either be executed before every test method or will never be. + startNode(random().nextLong()); } } - @After - @Override - public void tearDown() throws Exception { - logger.info("[{}#{}]: cleaning up after test", getTestClass().getSimpleName(), getTestName()); - super.tearDown(); - cleanup(resetNodeAfterTest()); - } - @BeforeClass public static void setUpClass() throws Exception { stopNode(); @@ -150,7 +144,6 @@ public abstract class ESSingleNodeTestCase extends ESTestCase { return false; } - /** The plugin classes that should be added to the node. */ protected Collection> getPlugins() { return Collections.emptyList(); @@ -171,13 +164,13 @@ public abstract class ESSingleNodeTestCase extends ESTestCase { private Node newNode() { final Path tempDir = createTempDir(); Settings settings = Settings.builder() - .put(ClusterName.CLUSTER_NAME_SETTING.getKey(), InternalTestCluster.clusterName("single-node-cluster", randomLong())) + .put(ClusterName.CLUSTER_NAME_SETTING.getKey(), InternalTestCluster.clusterName("single-node-cluster", random().nextLong())) .put(Environment.PATH_HOME_SETTING.getKey(), tempDir) .put(Environment.PATH_REPO_SETTING.getKey(), tempDir.resolve("repo")) // TODO: use a consistent data path for custom paths // This needs to tie into the ESIntegTestCase#indexSettings() method .put(Environment.PATH_SHARED_DATA_SETTING.getKey(), createTempDir().getParent()) - .put("node.name", nodeName()) + .put("node.name", "node_s_0") .put("script.inline", "true") .put("script.stored", "true") .put(ScriptService.SCRIPT_MAX_COMPILATIONS_PER_MINUTE.getKey(), 1000) @@ -185,6 +178,7 @@ public abstract class ESSingleNodeTestCase extends ESTestCase { .put(NetworkModule.HTTP_ENABLED.getKey(), false) .put("transport.type", MockTcpTransportPlugin.MOCK_TCP_TRANSPORT_NAME) .put(Node.NODE_DATA_SETTING.getKey(), true) + .put(NodeEnvironment.NODE_ID_SEED_SETTING.getKey(), random().nextLong()) .put(nodeSettings()) // allow test cases to provide their own settings or override these .build(); Collection> plugins = getPlugins(); @@ -212,13 +206,6 @@ public abstract class ESSingleNodeTestCase extends ESTestCase { return NODE.client(); } - /** - * Returns the single test nodes name. - */ - public String nodeName() { - return "node_s_0"; - } - /** * Return a reference to the singleton node. */ @@ -274,7 +261,8 @@ public abstract class ESSingleNodeTestCase extends ESTestCase { // Wait for the index to be allocated so that cluster state updates don't override // changes that would have been done locally ClusterHealthResponse health = client().admin().cluster() - .health(Requests.clusterHealthRequest(index).waitForYellowStatus().waitForEvents(Priority.LANGUID).waitForNoRelocatingShards(true)).actionGet(); + .health(Requests.clusterHealthRequest(index).waitForYellowStatus().waitForEvents(Priority.LANGUID) + .waitForNoRelocatingShards(true)).actionGet(); assertThat(health.getStatus(), lessThanOrEqualTo(ClusterHealthStatus.YELLOW)); assertThat("Cluster must be a single node cluster", health.getNumberOfDataNodes(), equalTo(1)); IndicesService instanceFromNode = getInstanceFromNode(IndicesService.class); @@ -316,7 +304,8 @@ public abstract class ESSingleNodeTestCase extends ESTestCase { */ public ClusterHealthStatus ensureGreen(TimeValue timeout, String... indices) { ClusterHealthResponse actionGet = client().admin().cluster() - .health(Requests.clusterHealthRequest(indices).timeout(timeout).waitForGreenStatus().waitForEvents(Priority.LANGUID).waitForNoRelocatingShards(true)).actionGet(); + .health(Requests.clusterHealthRequest(indices).timeout(timeout).waitForGreenStatus().waitForEvents(Priority.LANGUID) + .waitForNoRelocatingShards(true)).actionGet(); if (actionGet.isTimedOut()) { logger.info("ensureGreen timed out, cluster state:\n{}\n{}", client().admin().cluster().prepareState().get().getState(), client().admin().cluster().preparePendingClusterTasks().get()); diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index a169274c172..9a198ade50a 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -795,7 +795,8 @@ public abstract class ESTestCase extends LuceneTestCase { */ public static List randomSubsetOf(int size, Collection collection) { if (size > collection.size()) { - throw new IllegalArgumentException("Can\'t pick " + size + " random objects from a collection of " + collection.size() + " objects"); + throw new IllegalArgumentException("Can\'t pick " + size + " random objects from a collection of " + + collection.size() + " objects"); } List tempList = new ArrayList<>(collection); Collections.shuffle(tempList, random()); From 55feae1e9a9fa047de9caf35cbab991569237e04 Mon Sep 17 00:00:00 2001 From: javanna Date: Wed, 21 Dec 2016 13:28:43 +0100 Subject: [PATCH 09/21] [TEST] remove deleted test classes from checkstyle suppressions Relates to #21915 --- buildSrc/src/main/resources/checkstyle_suppressions.xml | 6 ------ 1 file changed, 6 deletions(-) diff --git a/buildSrc/src/main/resources/checkstyle_suppressions.xml b/buildSrc/src/main/resources/checkstyle_suppressions.xml index 02097315eaa..91ac28441e7 100644 --- a/buildSrc/src/main/resources/checkstyle_suppressions.xml +++ b/buildSrc/src/main/resources/checkstyle_suppressions.xml @@ -590,9 +590,6 @@ - - - @@ -704,7 +701,6 @@ - @@ -923,7 +919,6 @@ - @@ -994,7 +989,6 @@ - From 7141f6b554559a1d7bb52732ce62fb63240a4004 Mon Sep 17 00:00:00 2001 From: javanna Date: Wed, 21 Dec 2016 13:31:02 +0100 Subject: [PATCH 10/21] [TEST] improve error message in ESTestCase#assertWarnings --- .../src/main/java/org/elasticsearch/test/ESTestCase.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 9a198ade50a..bcfad66ef59 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -29,7 +29,6 @@ import com.carrotsearch.randomizedtesting.generators.RandomNumbers; import com.carrotsearch.randomizedtesting.generators.RandomPicks; import com.carrotsearch.randomizedtesting.generators.RandomStrings; import com.carrotsearch.randomizedtesting.rules.TestRuleAdapter; - import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -63,9 +62,9 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.MockPageCacheRecycler; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContent; -import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; @@ -133,7 +132,6 @@ import static org.elasticsearch.common.util.CollectionUtils.arrayAsArrayList; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasItem; -import static org.hamcrest.Matchers.hasSize; /** * Base testcase for randomized unit testing with Elasticsearch @@ -300,7 +298,9 @@ public abstract class ESTestCase extends LuceneTestCase { } try { final List actualWarnings = threadContext.getResponseHeaders().get(DeprecationLogger.WARNING_HEADER); - assertThat(actualWarnings, hasSize(expectedWarnings.length)); + assertEquals("Expected " + expectedWarnings.length + " warnings but found " + actualWarnings.size() + "\nExpected: " + + Arrays.asList(expectedWarnings) + "\nActual: " + actualWarnings, + expectedWarnings.length, actualWarnings.size()); for (String msg : expectedWarnings) { assertThat(actualWarnings, hasItem(equalTo(msg))); } From 8847c3409303bc8d4c2ff0107c2a34a5068fc4ef Mon Sep 17 00:00:00 2001 From: Nic Palmer Date: Wed, 21 Dec 2016 13:10:33 +0000 Subject: [PATCH 11/21] Push for eu-west-2 issue --- .../java/org/elasticsearch/cloud/aws/AwsEc2ServiceImpl.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2ServiceImpl.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2ServiceImpl.java index 7eac1184fbf..f8dec51f41a 100644 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2ServiceImpl.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2ServiceImpl.java @@ -178,6 +178,9 @@ public class AwsEc2ServiceImpl extends AbstractComponent implements AwsEc2Servic case "eu-west-1": endpoint = "ec2.eu-west-1.amazonaws.com"; break; + case "eu-west-2": + endpoint = "ec2.eu-west-2.amazonaws.com"; + break; case "eu-central": case "eu-central-1": endpoint = "ec2.eu-central-1.amazonaws.com"; From 80f8dfe852ceaa012433b8e46c33f75fd1efe507 Mon Sep 17 00:00:00 2001 From: Rui Hu Date: Wed, 21 Dec 2016 02:22:10 -0500 Subject: [PATCH 12/21] Fixed document mistake and fit for 5.1.1 API --- docs/java-api/docs/index_.asciidoc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/java-api/docs/index_.asciidoc b/docs/java-api/docs/index_.asciidoc index 1e48fbd431c..0b91622dd81 100644 --- a/docs/java-api/docs/index_.asciidoc +++ b/docs/java-api/docs/index_.asciidoc @@ -156,8 +156,8 @@ String _type = response.getType(); String _id = response.getId(); // Version (if it's the first time you index this document, you will get: 1) long _version = response.getVersion(); -// isCreated() is true if the document is a new one, false if it has been updated -boolean created = response.isCreated(); +// status has stored current instance statement. +RestStatus status = response.status(); -------------------------------------------------- For more information on the index operation, check out the REST From 567c65b0d57e92cd530dafa7f55de7ba681af13e Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Wed, 21 Dec 2016 09:05:14 -0500 Subject: [PATCH 13/21] Replace IndicesQueriesRegistry (#22289) * Switch query parsing to namedObject * Remove IndicesQueriesRegistry --- .../org/elasticsearch/index/IndexModule.java | 4 +- .../org/elasticsearch/index/IndexService.java | 6 +- .../index/query/MatchAllQueryBuilder.java | 2 +- .../index/query/QueryParseContext.java | 22 ++++--- .../index/query/QueryRewriteContext.java | 7 +-- .../index/query/QueryShardContext.java | 9 +-- .../elasticsearch/indices/IndicesService.java | 18 +----- .../indices/query/IndicesQueriesRegistry.java | 32 ----------- .../java/org/elasticsearch/node/Node.java | 4 +- .../rest/action/RestActions.java | 6 +- .../indices/RestValidateQueryAction.java | 12 +--- .../rest/action/cat/RestCountAction.java | 9 +-- .../rest/action/document/RestCountAction.java | 11 +--- .../rest/action/search/RestExplainAction.java | 9 +-- .../action/search/RestMultiSearchAction.java | 3 +- .../rest/action/search/RestSearchAction.java | 2 +- .../elasticsearch/search/SearchModule.java | 57 ++++++++++--------- .../search/SearchRequestParsers.java | 14 +---- .../search/MultiSearchRequestTests.java | 17 +++--- .../elasticsearch/index/IndexModuleTests.java | 5 +- .../index/mapper/DateFieldTypeTests.java | 10 ++-- .../index/mapper/RangeFieldTypeTests.java | 2 +- .../index/query/BoolQueryBuilderTests.java | 1 - .../index/query/InnerHitBuilderTests.java | 15 +++-- .../index/query/QueryParseContextTests.java | 28 +++++---- .../index/query/QueryShardContextTests.java | 2 +- .../index/query/RangeQueryRewriteTests.java | 6 +- .../index/query/SimpleQueryParserTests.java | 26 +-------- .../search/AbstractSearchTestCase.java | 11 +++- .../search/SearchModuleTests.java | 50 +++++++--------- .../AggregationCollectorTests.java | 4 +- .../aggregations/AggregatorParsingTests.java | 25 ++++---- .../aggregations/BaseAggregationTestCase.java | 13 +++-- .../BasePipelineAggregationTestCase.java | 5 +- .../geogrid/GeoHashGridParserTests.java | 14 ++--- .../bucket/histogram/ExtendedBoundsTests.java | 2 +- .../SignificanceHeuristicTests.java | 7 +-- .../aggregations/metrics/FiltersTests.java | 8 +-- .../aggregations/metrics/TopHitsTests.java | 5 +- .../ExtendedStatsBucketTests.java | 2 +- .../bucketmetrics/PercentilesBucketTests.java | 2 +- .../pipeline/moving/avg/MovAvgTests.java | 2 +- .../support/IncludeExcludeTests.java | 7 +-- .../builder/SearchSourceBuilderTests.java | 4 +- .../highlight/HighlightBuilderTests.java | 29 ++++++---- .../ShardSearchTransportRequestTests.java | 4 +- .../rescore/QueryRescoreBuilderTests.java | 19 ++++--- .../searchafter/SearchAfterBuilderTests.java | 23 +------- .../search/slice/SliceBuilderTests.java | 24 +------- .../search/sort/AbstractSortTestCase.java | 19 ++++--- .../search/sort/FieldSortBuilderTests.java | 2 +- .../sort/GeoDistanceSortBuilderTests.java | 17 +++--- .../search/sort/ScoreSortBuilderTests.java | 5 +- .../search/sort/ScriptSortBuilderTests.java | 10 ++-- .../search/sort/SortBuilderTests.java | 21 ++++--- .../AbstractSuggestionBuilderTestCase.java | 8 +-- .../search/suggest/SuggestBuilderTests.java | 4 +- .../CategoryContextMappingTests.java | 3 +- .../completion/GeoContextMappingTests.java | 3 +- .../completion/QueryContextTestCase.java | 4 +- .../phrase/DirectCandidateGeneratorTests.java | 11 +--- .../phrase/SmoothingModelTestCase.java | 3 +- .../TransportSearchTemplateAction.java | 2 +- .../TransportMultiPercolateAction.java | 2 +- .../percolator/TransportPercolateAction.java | 9 ++- .../index/reindex/RestReindexAction.java | 2 +- .../index/reindex/RestReindexActionTests.java | 5 +- .../test/AbstractQueryTestCase.java | 8 +-- .../search/MockSearchServiceTests.java | 2 +- 69 files changed, 282 insertions(+), 457 deletions(-) delete mode 100644 core/src/main/java/org/elasticsearch/indices/query/IndicesQueriesRegistry.java diff --git a/core/src/main/java/org/elasticsearch/index/IndexModule.java b/core/src/main/java/org/elasticsearch/index/IndexModule.java index 3734a4eab58..97edd2fd468 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexModule.java +++ b/core/src/main/java/org/elasticsearch/index/IndexModule.java @@ -48,7 +48,6 @@ import org.elasticsearch.indices.IndicesQueryCache; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache; import org.elasticsearch.indices.mapper.MapperRegistry; -import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.script.ScriptService; import org.elasticsearch.threadpool.ThreadPool; @@ -327,7 +326,6 @@ public final class IndexModule { BigArrays bigArrays, ThreadPool threadPool, ScriptService scriptService, - IndicesQueriesRegistry indicesQueriesRegistry, ClusterService clusterService, Client client, IndicesQueryCache indicesQueryCache, @@ -366,7 +364,7 @@ public final class IndexModule { } return new IndexService(indexSettings, environment, xContentRegistry, new SimilarityService(indexSettings, similarities), shardStoreDeleter, analysisRegistry, engineFactory.get(), circuitBreakerService, bigArrays, threadPool, scriptService, - indicesQueriesRegistry, clusterService, client, queryCache, store, eventListener, searcherWrapperFactory, mapperRegistry, + clusterService, client, queryCache, store, eventListener, searcherWrapperFactory, mapperRegistry, indicesFieldDataCache, globalCheckpointSyncer, searchOperationListeners, indexOperationListeners); } diff --git a/core/src/main/java/org/elasticsearch/index/IndexService.java b/core/src/main/java/org/elasticsearch/index/IndexService.java index 11018d75b3b..32a0268e3ea 100644 --- a/core/src/main/java/org/elasticsearch/index/IndexService.java +++ b/core/src/main/java/org/elasticsearch/index/IndexService.java @@ -68,7 +68,6 @@ import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.cluster.IndicesClusterStateService; import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache; import org.elasticsearch.indices.mapper.MapperRegistry; -import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.script.ScriptService; import org.elasticsearch.threadpool.ThreadPool; @@ -120,7 +119,6 @@ public class IndexService extends AbstractIndexComponent implements IndicesClust private final BigArrays bigArrays; private final AsyncGlobalCheckpointTask globalCheckpointTask; private final ScriptService scriptService; - private final IndicesQueriesRegistry queryRegistry; private final ClusterService clusterService; private final Client client; @@ -134,7 +132,6 @@ public class IndexService extends AbstractIndexComponent implements IndicesClust BigArrays bigArrays, ThreadPool threadPool, ScriptService scriptService, - IndicesQueriesRegistry queryRegistry, ClusterService clusterService, Client client, QueryCache queryCache, @@ -162,7 +159,6 @@ public class IndexService extends AbstractIndexComponent implements IndicesClust this.bigArrays = bigArrays; this.threadPool = threadPool; this.scriptService = scriptService; - this.queryRegistry = queryRegistry; this.clusterService = clusterService; this.client = client; this.eventListener = eventListener; @@ -478,7 +474,7 @@ public class IndexService extends AbstractIndexComponent implements IndicesClust public QueryShardContext newQueryShardContext(int shardId, IndexReader indexReader, LongSupplier nowInMillis) { return new QueryShardContext( shardId, indexSettings, indexCache.bitsetFilterCache(), indexFieldData, mapperService(), - similarityService(), scriptService, xContentRegistry, queryRegistry, + similarityService(), scriptService, xContentRegistry, client, indexReader, nowInMillis); } diff --git a/core/src/main/java/org/elasticsearch/index/query/MatchAllQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/MatchAllQueryBuilder.java index 4a86854e7bf..63580eca3e7 100644 --- a/core/src/main/java/org/elasticsearch/index/query/MatchAllQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/MatchAllQueryBuilder.java @@ -58,7 +58,7 @@ public class MatchAllQueryBuilder extends AbstractQueryBuilder PARSER = new ObjectParser<>(NAME, MatchAllQueryBuilder::new); + private static final ObjectParser PARSER = new ObjectParser<>(NAME, MatchAllQueryBuilder::new); static { declareStandardFields(PARSER); diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryParseContext.java b/core/src/main/java/org/elasticsearch/index/query/QueryParseContext.java index b944bd3a42c..c8b0e1cb672 100644 --- a/core/src/main/java/org/elasticsearch/index/query/QueryParseContext.java +++ b/core/src/main/java/org/elasticsearch/index/query/QueryParseContext.java @@ -23,8 +23,9 @@ import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.ParseFieldMatcherSupplier; import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.xcontent.NamedXContentRegistry.UnknownNamedObjectException; +import org.elasticsearch.common.xcontent.XContentLocation; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.script.Script; import java.io.IOException; @@ -36,17 +37,14 @@ public class QueryParseContext implements ParseFieldMatcherSupplier { private static final ParseField CACHE_KEY = new ParseField("_cache_key").withAllDeprecated("Filters are always used as cache keys"); private final XContentParser parser; - private final IndicesQueriesRegistry indicesQueriesRegistry; private final ParseFieldMatcher parseFieldMatcher; private final String defaultScriptLanguage; - public QueryParseContext(IndicesQueriesRegistry registry, XContentParser parser, ParseFieldMatcher parseFieldMatcher) { - this(Script.DEFAULT_SCRIPT_LANG, registry, parser, parseFieldMatcher); + public QueryParseContext(XContentParser parser, ParseFieldMatcher parseFieldMatcher) { + this(Script.DEFAULT_SCRIPT_LANG, parser, parseFieldMatcher); } - public QueryParseContext(String defaultScriptLanguage, IndicesQueriesRegistry registry, XContentParser parser, - ParseFieldMatcher parseFieldMatcher) { - this.indicesQueriesRegistry = Objects.requireNonNull(registry, "indices queries registry cannot be null"); + public QueryParseContext(String defaultScriptLanguage, XContentParser parser, ParseFieldMatcher parseFieldMatcher) { this.parser = Objects.requireNonNull(parser, "parser cannot be null"); this.parseFieldMatcher = Objects.requireNonNull(parseFieldMatcher, "parse field matcher cannot be null"); this.defaultScriptLanguage = defaultScriptLanguage; @@ -105,7 +103,15 @@ public class QueryParseContext implements ParseFieldMatcherSupplier { if (parser.nextToken() != XContentParser.Token.START_OBJECT) { throw new ParsingException(parser.getTokenLocation(), "[" + queryName + "] query malformed, no start_object after query name"); } - QueryBuilder result = indicesQueriesRegistry.lookup(queryName, parseFieldMatcher, parser.getTokenLocation()).fromXContent(this); + QueryBuilder result; + try { + result = parser.namedObject(QueryBuilder.class, queryName, this); + } catch (UnknownNamedObjectException e) { + // Preserve the error message from 5.0 until we have a compellingly better message so we don't break BWC. + // This intentionally doesn't include the causing exception because that'd change the "root_cause" of any unknown query errors + throw new ParsingException(new XContentLocation(e.getLineNumber(), e.getColumnNumber()), + "no [query] registered for [" + e.getName() + "]"); + } //end_object of the specific query (e.g. match, multi_match etc.) element if (parser.currentToken() != XContentParser.Token.END_OBJECT) { throw new ParsingException(parser.getTokenLocation(), diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java b/core/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java index e98af78cf24..13362f676aa 100644 --- a/core/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java +++ b/core/src/main/java/org/elasticsearch/index/query/QueryRewriteContext.java @@ -27,7 +27,6 @@ import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptContext; @@ -43,19 +42,17 @@ public class QueryRewriteContext implements ParseFieldMatcherSupplier { protected final ScriptService scriptService; protected final IndexSettings indexSettings; private final NamedXContentRegistry xContentRegistry; - protected final IndicesQueriesRegistry indicesQueriesRegistry; protected final Client client; protected final IndexReader reader; protected final LongSupplier nowInMillis; public QueryRewriteContext(IndexSettings indexSettings, MapperService mapperService, ScriptService scriptService, - NamedXContentRegistry xContentRegistry, IndicesQueriesRegistry indicesQueriesRegistry, Client client, IndexReader reader, + NamedXContentRegistry xContentRegistry, Client client, IndexReader reader, LongSupplier nowInMillis) { this.mapperService = mapperService; this.scriptService = scriptService; this.indexSettings = indexSettings; this.xContentRegistry = xContentRegistry; - this.indicesQueriesRegistry = indicesQueriesRegistry; this.client = client; this.reader = reader; this.nowInMillis = nowInMillis; @@ -107,7 +104,7 @@ public class QueryRewriteContext implements ParseFieldMatcherSupplier { * are configured in the index settings. The default script language will always default to Painless. */ public QueryParseContext newParseContext(XContentParser parser) { - return new QueryParseContext(indicesQueriesRegistry, parser, indexSettings.getParseFieldMatcher()); + return new QueryParseContext(parser, indexSettings.getParseFieldMatcher()); } public long nowInMillis() { diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryShardContext.java b/core/src/main/java/org/elasticsearch/index/query/QueryShardContext.java index a4a1193063b..31668922e6e 100644 --- a/core/src/main/java/org/elasticsearch/index/query/QueryShardContext.java +++ b/core/src/main/java/org/elasticsearch/index/query/QueryShardContext.java @@ -48,7 +48,6 @@ import org.elasticsearch.index.mapper.ObjectMapper; import org.elasticsearch.index.mapper.TextFieldMapper; import org.elasticsearch.index.query.support.NestedScope; import org.elasticsearch.index.similarity.SimilarityService; -import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.script.CompiledScript; import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.Script; @@ -92,7 +91,6 @@ public class QueryShardContext extends QueryRewriteContext { private final Map namedQueries = new HashMap<>(); private final MapperQueryParser queryParser = new MapperQueryParser(this); - private final IndicesQueriesRegistry indicesQueriesRegistry; private boolean allowUnmappedFields; private boolean mapUnmappedFieldAsString; private NestedScope nestedScope; @@ -100,9 +98,9 @@ public class QueryShardContext extends QueryRewriteContext { public QueryShardContext(int shardId, IndexSettings indexSettings, BitsetFilterCache bitsetFilterCache, IndexFieldDataService indexFieldDataService, MapperService mapperService, SimilarityService similarityService, - ScriptService scriptService, NamedXContentRegistry xContentRegistry, IndicesQueriesRegistry indicesQueriesRegistry, + ScriptService scriptService, NamedXContentRegistry xContentRegistry, Client client, IndexReader reader, LongSupplier nowInMillis) { - super(indexSettings, mapperService, scriptService, xContentRegistry, indicesQueriesRegistry, client, reader, nowInMillis); + super(indexSettings, mapperService, scriptService, xContentRegistry, client, reader, nowInMillis); this.shardId = shardId; this.indexSettings = indexSettings; this.similarityService = similarityService; @@ -110,14 +108,13 @@ public class QueryShardContext extends QueryRewriteContext { this.bitsetFilterCache = bitsetFilterCache; this.indexFieldDataService = indexFieldDataService; this.allowUnmappedFields = indexSettings.isDefaultAllowUnmappedFields(); - this.indicesQueriesRegistry = indicesQueriesRegistry; this.nestedScope = new NestedScope(); } public QueryShardContext(QueryShardContext source) { this(source.shardId, source.indexSettings, source.bitsetFilterCache, source.indexFieldDataService, source.mapperService, - source.similarityService, source.scriptService, source.getXContentRegistry(), source.indicesQueriesRegistry, source.client, + source.similarityService, source.scriptService, source.getXContentRegistry(), source.client, source.reader, source.nowInMillis); this.types = source.getTypes(); } diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesService.java b/core/src/main/java/org/elasticsearch/indices/IndicesService.java index 413af6466db..8f5d01d7683 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/core/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -19,8 +19,6 @@ package org.elasticsearch.indices; -import com.carrotsearch.hppc.cursors.ObjectCursor; - import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.lucene.index.DirectoryReader; @@ -41,7 +39,6 @@ import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; -import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.routing.RecoverySource; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.service.ClusterService; @@ -106,7 +103,6 @@ import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.cluster.IndicesClusterStateService; import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache; import org.elasticsearch.indices.mapper.MapperRegistry; -import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.indices.recovery.PeerRecoveryTargetService; import org.elasticsearch.indices.recovery.RecoveryState; import org.elasticsearch.plugins.PluginsService; @@ -156,7 +152,6 @@ public class IndicesService extends AbstractLifecycleComponent private final NamedXContentRegistry xContentRegistry; private final TimeValue shardsClosedTimeout; private final AnalysisRegistry analysisRegistry; - private final IndicesQueriesRegistry indicesQueriesRegistry; private final IndexNameExpressionResolver indexNameExpressionResolver; private final IndexScopedSettings indexScopeSetting; private final IndicesFieldDataCache indicesFieldDataCache; @@ -187,7 +182,7 @@ public class IndicesService extends AbstractLifecycleComponent public IndicesService(Settings settings, PluginsService pluginsService, NodeEnvironment nodeEnv, NamedXContentRegistry xContentRegistry, ClusterSettings clusterSettings, AnalysisRegistry analysisRegistry, - IndicesQueriesRegistry indicesQueriesRegistry, IndexNameExpressionResolver indexNameExpressionResolver, + IndexNameExpressionResolver indexNameExpressionResolver, MapperRegistry mapperRegistry, NamedWriteableRegistry namedWriteableRegistry, ThreadPool threadPool, IndexScopedSettings indexScopedSettings, CircuitBreakerService circuitBreakerService, BigArrays bigArrays, ScriptService scriptService, ClusterService clusterService, Client client, @@ -199,7 +194,6 @@ public class IndicesService extends AbstractLifecycleComponent this.xContentRegistry = xContentRegistry; this.shardsClosedTimeout = settings.getAsTime(INDICES_SHARDS_CLOSED_TIMEOUT, new TimeValue(1, TimeUnit.DAYS)); this.analysisRegistry = analysisRegistry; - this.indicesQueriesRegistry = indicesQueriesRegistry; this.indexNameExpressionResolver = indexNameExpressionResolver; this.indicesRequestCache = new IndicesRequestCache(settings); this.indicesQueryCache = new IndicesQueryCache(settings); @@ -451,7 +445,6 @@ public class IndicesService extends AbstractLifecycleComponent bigArrays, threadPool, scriptService, - indicesQueriesRegistry, clusterService, client, indicesQueryCache, @@ -1017,13 +1010,6 @@ public class IndicesService extends AbstractLifecycleComponent return numUncompletedDeletes.get() > 0; } - /** - * Returns this nodes {@link IndicesQueriesRegistry} - */ - public IndicesQueriesRegistry getIndicesQueryRegistry() { - return indicesQueriesRegistry; - } - public AnalysisRegistry getAnalysis() { return analysisRegistry; } @@ -1268,7 +1254,7 @@ public class IndicesService extends AbstractLifecycleComponent * of dependencies we pass in a function that can perform the parsing. */ ShardSearchRequest.FilterParser filterParser = bytes -> { try (XContentParser parser = XContentFactory.xContent(bytes).createParser(xContentRegistry, bytes)) { - return new QueryParseContext(indicesQueriesRegistry, parser, new ParseFieldMatcher(settings)).parseInnerQueryBuilder(); + return new QueryParseContext(parser, new ParseFieldMatcher(settings)).parseInnerQueryBuilder(); } }; String[] aliases = indexNameExpressionResolver.filteringAliases(state, index, expressions); diff --git a/core/src/main/java/org/elasticsearch/indices/query/IndicesQueriesRegistry.java b/core/src/main/java/org/elasticsearch/indices/query/IndicesQueriesRegistry.java deleted file mode 100644 index f1d45b55495..00000000000 --- a/core/src/main/java/org/elasticsearch/indices/query/IndicesQueriesRegistry.java +++ /dev/null @@ -1,32 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.indices.query; - -import org.elasticsearch.common.xcontent.ParseFieldRegistry; -import org.elasticsearch.index.query.QueryParser; - -/** - * Extensions to ParseFieldRegistry to make Guice happy. - */ -public class IndicesQueriesRegistry extends ParseFieldRegistry> { - public IndicesQueriesRegistry() { - super("query"); - } -} diff --git a/core/src/main/java/org/elasticsearch/node/Node.java b/core/src/main/java/org/elasticsearch/node/Node.java index bfe5ad453b5..2c29d00ecc6 100644 --- a/core/src/main/java/org/elasticsearch/node/Node.java +++ b/core/src/main/java/org/elasticsearch/node/Node.java @@ -94,7 +94,6 @@ import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.indices.cluster.IndicesClusterStateService; -import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.indices.recovery.PeerRecoverySourceService; import org.elasticsearch.indices.recovery.PeerRecoveryTargetService; import org.elasticsearch.indices.recovery.RecoverySettings; @@ -372,7 +371,7 @@ public class Node implements Closeable { ).flatMap(Function.identity()).collect(toList())); final MetaStateService metaStateService = new MetaStateService(settings, nodeEnvironment); final IndicesService indicesService = new IndicesService(settings, pluginsService, nodeEnvironment, xContentRegistry, - settingsModule.getClusterSettings(), analysisModule.getAnalysisRegistry(), searchModule.getQueryParserRegistry(), + settingsModule.getClusterSettings(), analysisModule.getAnalysisRegistry(), clusterModule.getIndexNameExpressionResolver(), indicesModule.getMapperRegistry(), namedWriteableRegistry, threadPool, settingsModule.getIndexScopedSettings(), circuitBreakerService, bigArrays, scriptModule.getScriptService(), clusterService, client, metaStateService); @@ -410,7 +409,6 @@ public class Node implements Closeable { final DiscoveryModule discoveryModule = new DiscoveryModule(this.settings, threadPool, transportService, networkService, clusterService, pluginsService.filterPlugins(DiscoveryPlugin.class)); modules.add(b -> { - b.bind(IndicesQueriesRegistry.class).toInstance(searchModule.getQueryParserRegistry()); b.bind(SearchRequestParsers.class).toInstance(searchModule.getSearchRequestParsers()); b.bind(SearchExtRegistry.class).toInstance(searchModule.getSearchExtRegistry()); b.bind(NamedXContentRegistry.class).toInstance(xContentRegistry); diff --git a/core/src/main/java/org/elasticsearch/rest/action/RestActions.java b/core/src/main/java/org/elasticsearch/rest/action/RestActions.java index 0cb1a248b17..4a96becbbbd 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/RestActions.java +++ b/core/src/main/java/org/elasticsearch/rest/action/RestActions.java @@ -36,7 +36,6 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryStringQueryBuilder; -import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestRequest; @@ -195,9 +194,8 @@ public class RestActions { return queryBuilder; } - public static QueryBuilder getQueryContent(XContentParser requestParser, IndicesQueriesRegistry indicesQueriesRegistry, - ParseFieldMatcher parseFieldMatcher) { - QueryParseContext context = new QueryParseContext(indicesQueriesRegistry, requestParser, parseFieldMatcher); + public static QueryBuilder getQueryContent(XContentParser requestParser, ParseFieldMatcher parseFieldMatcher) { + QueryParseContext context = new QueryParseContext(requestParser, parseFieldMatcher); return context.parseTopLevelQueryBuilder(); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryAction.java b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryAction.java index 8b68a3b6168..e12baa35bec 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/admin/indices/RestValidateQueryAction.java @@ -19,7 +19,6 @@ package org.elasticsearch.rest.action.admin.indices; -import org.apache.lucene.util.IOUtils; import org.elasticsearch.action.admin.indices.validate.query.QueryExplanation; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequest; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryResponse; @@ -30,9 +29,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestChannel; @@ -50,11 +46,8 @@ import static org.elasticsearch.rest.RestStatus.OK; import static org.elasticsearch.rest.action.RestActions.buildBroadcastShardsHeader; public class RestValidateQueryAction extends BaseRestHandler { - - private final IndicesQueriesRegistry indicesQueriesRegistry; - @Inject - public RestValidateQueryAction(Settings settings, RestController controller, IndicesQueriesRegistry indicesQueriesRegistry) { + public RestValidateQueryAction(Settings settings, RestController controller) { super(settings); controller.registerHandler(GET, "/_validate/query", this); controller.registerHandler(POST, "/_validate/query", this); @@ -62,7 +55,6 @@ public class RestValidateQueryAction extends BaseRestHandler { controller.registerHandler(POST, "/{index}/_validate/query", this); controller.registerHandler(GET, "/{index}/{type}/_validate/query", this); controller.registerHandler(POST, "/{index}/{type}/_validate/query", this); - this.indicesQueriesRegistry = indicesQueriesRegistry; } @Override @@ -77,7 +69,7 @@ public class RestValidateQueryAction extends BaseRestHandler { try { request.withContentOrSourceParamParserOrNull(parser -> { if (parser != null) { - validateQueryRequest.query(RestActions.getQueryContent(parser, indicesQueriesRegistry, parseFieldMatcher)); + validateQueryRequest.query(RestActions.getQueryContent(parser, parseFieldMatcher)); } else if (request.hasParam("q")) { validateQueryRequest.query(RestActions.urlParamsToQueryBuilder(request)); } diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestCountAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestCountAction.java index 22346ad2772..49cbcfa6610 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestCountAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestCountAction.java @@ -28,7 +28,6 @@ import org.elasticsearch.common.Table; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; @@ -41,15 +40,11 @@ import java.io.IOException; import static org.elasticsearch.rest.RestRequest.Method.GET; public class RestCountAction extends AbstractCatAction { - - private final IndicesQueriesRegistry indicesQueriesRegistry; - @Inject - public RestCountAction(Settings settings, RestController restController, RestController controller, IndicesQueriesRegistry indicesQueriesRegistry) { + public RestCountAction(Settings settings, RestController restController, RestController controller) { super(settings); restController.registerHandler(GET, "/_cat/count", this); restController.registerHandler(GET, "/_cat/count/{index}", this); - this.indicesQueriesRegistry = indicesQueriesRegistry; } @Override @@ -72,7 +67,7 @@ public class RestCountAction extends AbstractCatAction { searchSourceBuilder.query(queryBuilder); } } else { - searchSourceBuilder.query(RestActions.getQueryContent(parser, indicesQueriesRegistry, parseFieldMatcher)); + searchSourceBuilder.query(RestActions.getQueryContent(parser, parseFieldMatcher)); } }); } catch (IOException e) { diff --git a/core/src/main/java/org/elasticsearch/rest/action/document/RestCountAction.java b/core/src/main/java/org/elasticsearch/rest/action/document/RestCountAction.java index 2983879961c..86acb1fb1b9 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/document/RestCountAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/document/RestCountAction.java @@ -19,7 +19,6 @@ package org.elasticsearch.rest.action.document; -import org.apache.lucene.util.IOUtils; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.IndicesOptions; @@ -28,9 +27,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestController; @@ -48,11 +45,8 @@ import static org.elasticsearch.rest.action.RestActions.buildBroadcastShardsHead import static org.elasticsearch.search.internal.SearchContext.DEFAULT_TERMINATE_AFTER; public class RestCountAction extends BaseRestHandler { - - private final IndicesQueriesRegistry indicesQueriesRegistry; - @Inject - public RestCountAction(Settings settings, RestController controller, IndicesQueriesRegistry indicesQueriesRegistry) { + public RestCountAction(Settings settings, RestController controller) { super(settings); controller.registerHandler(POST, "/_count", this); controller.registerHandler(GET, "/_count", this); @@ -60,7 +54,6 @@ public class RestCountAction extends BaseRestHandler { controller.registerHandler(GET, "/{index}/_count", this); controller.registerHandler(POST, "/{index}/{type}/_count", this); controller.registerHandler(GET, "/{index}/{type}/_count", this); - this.indicesQueriesRegistry = indicesQueriesRegistry; } @Override @@ -76,7 +69,7 @@ public class RestCountAction extends BaseRestHandler { searchSourceBuilder.query(queryBuilder); } } else { - searchSourceBuilder.query(RestActions.getQueryContent(parser, indicesQueriesRegistry, parseFieldMatcher)); + searchSourceBuilder.query(RestActions.getQueryContent(parser, parseFieldMatcher)); } }); countRequest.routing(request.param("routing")); diff --git a/core/src/main/java/org/elasticsearch/rest/action/search/RestExplainAction.java b/core/src/main/java/org/elasticsearch/rest/action/search/RestExplainAction.java index 7d31ed7cad5..a3c845555bf 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/search/RestExplainAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/search/RestExplainAction.java @@ -29,7 +29,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.get.GetResult; import org.elasticsearch.index.query.QueryBuilder; -import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestController; @@ -50,13 +49,9 @@ import static org.elasticsearch.rest.RestStatus.OK; * Rest action for computing a score explanation for specific documents. */ public class RestExplainAction extends BaseRestHandler { - - private final IndicesQueriesRegistry indicesQueriesRegistry; - @Inject - public RestExplainAction(Settings settings, RestController controller, IndicesQueriesRegistry indicesQueriesRegistry) { + public RestExplainAction(Settings settings, RestController controller) { super(settings); - this.indicesQueriesRegistry = indicesQueriesRegistry; controller.registerHandler(GET, "/{index}/{type}/{id}/_explain", this); controller.registerHandler(POST, "/{index}/{type}/{id}/_explain", this); } @@ -70,7 +65,7 @@ public class RestExplainAction extends BaseRestHandler { String queryString = request.param("q"); request.withContentOrSourceParamParserOrNull(parser -> { if (parser != null) { - explainRequest.query(RestActions.getQueryContent(parser, indicesQueriesRegistry, parseFieldMatcher)); + explainRequest.query(RestActions.getQueryContent(parser, parseFieldMatcher)); } else if (queryString != null) { QueryBuilder query = RestActions.urlParamsToQueryBuilder(request); explainRequest.query(query); diff --git a/core/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java b/core/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java index 502d427050f..95c8ed51a3f 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java @@ -89,8 +89,7 @@ public class RestMultiSearchAction extends BaseRestHandler { parseMultiLineRequest(restRequest, multiRequest.indicesOptions(), allowExplicitIndex, (searchRequest, parser) -> { try { - final QueryParseContext queryParseContext = new QueryParseContext(searchRequestParsers.queryParsers, parser, - parseFieldMatcher); + final QueryParseContext queryParseContext = new QueryParseContext(parser, parseFieldMatcher); searchRequest.source(SearchSourceBuilder.fromXContent(queryParseContext, searchRequestParsers.aggParsers, searchRequestParsers.suggesters, searchRequestParsers.searchExtParsers)); multiRequest.add(searchRequest); diff --git a/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java b/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java index 425acbf41e3..b0e0ce074cf 100644 --- a/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java +++ b/core/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java @@ -92,7 +92,7 @@ public class RestSearchAction extends BaseRestHandler { } searchRequest.indices(Strings.splitStringByCommaToArray(request.param("index"))); if (requestContentParser != null) { - QueryParseContext context = new QueryParseContext(searchRequestParsers.queryParsers, requestContentParser, parseFieldMatcher); + QueryParseContext context = new QueryParseContext(requestContentParser, parseFieldMatcher); searchRequest.source().parseXContent(context, searchRequestParsers.aggParsers, searchRequestParsers.suggesters, searchRequestParsers.searchExtParsers); } diff --git a/core/src/main/java/org/elasticsearch/search/SearchModule.java b/core/src/main/java/org/elasticsearch/search/SearchModule.java index a5ea15c6702..41b59e6d686 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchModule.java +++ b/core/src/main/java/org/elasticsearch/search/SearchModule.java @@ -85,7 +85,6 @@ import org.elasticsearch.index.query.functionscore.RandomScoreFunctionBuilder; import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder; import org.elasticsearch.index.query.functionscore.ScriptScoreFunctionBuilder; import org.elasticsearch.index.query.functionscore.WeightBuilder; -import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.plugins.SearchPlugin.AggregationSpec; import org.elasticsearch.plugins.SearchPlugin.FetchPhaseConstructionContext; @@ -269,7 +268,6 @@ public class SearchModule { private final boolean transportClient; private final Map highlighters; private final Map> suggesters; - private final IndicesQueriesRegistry queryParserRegistry = new IndicesQueriesRegistry(); private final ParseFieldRegistry aggregationParserRegistry = new ParseFieldRegistry<>("aggregation"); private final ParseFieldRegistry pipelineAggregationParserRegistry = new ParseFieldRegistry<>( "pipline_aggregation"); @@ -304,7 +302,7 @@ public class SearchModule { registerFetchSubPhases(plugins); registerSearchExts(plugins); registerShapes(); - searchRequestParsers = new SearchRequestParsers(queryParserRegistry, aggregatorParsers, getSuggesters(), searchExtParserRegistry); + searchRequestParsers = new SearchRequestParsers(aggregatorParsers, getSuggesters(), searchExtParserRegistry); } public List getNamedWriteables() { @@ -319,10 +317,6 @@ public class SearchModule { return new Suggesters(suggesters); } - public IndicesQueriesRegistry getQueryParserRegistry() { - return queryParserRegistry; - } - public SearchRequestParsers getSearchRequestParsers() { return searchRequestParsers; } @@ -442,11 +436,12 @@ public class SearchModule { if (false == transportClient) { aggregationParserRegistry.register(spec.getParser(), spec.getName()); } - namedWriteables.add(new Entry(AggregationBuilder.class, spec.getName().getPreferredName(), spec.getReader())); + namedWriteables.add( + new NamedWriteableRegistry.Entry(AggregationBuilder.class, spec.getName().getPreferredName(), spec.getReader())); for (Map.Entry> t : spec.getResultReaders().entrySet()) { String writeableName = t.getKey(); Writeable.Reader internalReader = t.getValue(); - namedWriteables.add(new Entry(InternalAggregation.class, writeableName, internalReader)); + namedWriteables.add(new NamedWriteableRegistry.Entry(InternalAggregation.class, writeableName, internalReader)); } } @@ -535,10 +530,13 @@ public class SearchModule { if (false == transportClient) { pipelineAggregationParserRegistry.register(spec.getParser(), spec.getName()); } - namedWriteables.add(new Entry(PipelineAggregationBuilder.class, spec.getName().getPreferredName(), spec.getReader())); - namedWriteables.add(new Entry(PipelineAggregator.class, spec.getName().getPreferredName(), spec.getAggregatorReader())); + namedWriteables.add( + new NamedWriteableRegistry.Entry(PipelineAggregationBuilder.class, spec.getName().getPreferredName(), spec.getReader())); + namedWriteables.add( + new NamedWriteableRegistry.Entry(PipelineAggregator.class, spec.getName().getPreferredName(), spec.getAggregatorReader())); for (Map.Entry> resultReader : spec.getResultReaders().entrySet()) { - namedWriteables.add(new Entry(InternalAggregation.class, resultReader.getKey(), resultReader.getValue())); + namedWriteables + .add(new NamedWriteableRegistry.Entry(InternalAggregation.class, resultReader.getKey(), resultReader.getValue())); } } @@ -549,14 +547,14 @@ public class SearchModule { } private void registerRescorers() { - namedWriteables.add(new Entry(RescoreBuilder.class, QueryRescorerBuilder.NAME, QueryRescorerBuilder::new)); + namedWriteables.add(new NamedWriteableRegistry.Entry(RescoreBuilder.class, QueryRescorerBuilder.NAME, QueryRescorerBuilder::new)); } private void registerSorts() { - namedWriteables.add(new Entry(SortBuilder.class, GeoDistanceSortBuilder.NAME, GeoDistanceSortBuilder::new)); - namedWriteables.add(new Entry(SortBuilder.class, ScoreSortBuilder.NAME, ScoreSortBuilder::new)); - namedWriteables.add(new Entry(SortBuilder.class, ScriptSortBuilder.NAME, ScriptSortBuilder::new)); - namedWriteables.add(new Entry(SortBuilder.class, FieldSortBuilder.NAME, FieldSortBuilder::new)); + namedWriteables.add(new NamedWriteableRegistry.Entry(SortBuilder.class, GeoDistanceSortBuilder.NAME, GeoDistanceSortBuilder::new)); + namedWriteables.add(new NamedWriteableRegistry.Entry(SortBuilder.class, ScoreSortBuilder.NAME, ScoreSortBuilder::new)); + namedWriteables.add(new NamedWriteableRegistry.Entry(SortBuilder.class, ScriptSortBuilder.NAME, ScriptSortBuilder::new)); + namedWriteables.add(new NamedWriteableRegistry.Entry(SortBuilder.class, FieldSortBuilder.NAME, FieldSortBuilder::new)); } private void registerFromPlugin(List plugins, Function> producer, Consumer consumer) { @@ -568,9 +566,9 @@ public class SearchModule { } public static void registerSmoothingModels(List namedWriteables) { - namedWriteables.add(new Entry(SmoothingModel.class, Laplace.NAME, Laplace::new)); - namedWriteables.add(new Entry(SmoothingModel.class, LinearInterpolation.NAME, LinearInterpolation::new)); - namedWriteables.add(new Entry(SmoothingModel.class, StupidBackoff.NAME, StupidBackoff::new)); + namedWriteables.add(new NamedWriteableRegistry.Entry(SmoothingModel.class, Laplace.NAME, Laplace::new)); + namedWriteables.add(new NamedWriteableRegistry.Entry(SmoothingModel.class, LinearInterpolation.NAME, LinearInterpolation::new)); + namedWriteables.add(new NamedWriteableRegistry.Entry(SmoothingModel.class, StupidBackoff.NAME, StupidBackoff::new)); } private Map> setupSuggesters(List plugins) { @@ -581,7 +579,7 @@ public class SearchModule { @Override public void register(String name, Suggester t) { super.register(name, t); - namedWriteables.add(new Entry(SuggestionBuilder.class, name, t)); + namedWriteables.add(new NamedWriteableRegistry.Entry(SuggestionBuilder.class, name, t)); } }; suggesters.register("phrase", PhraseSuggester.INSTANCE); @@ -619,7 +617,7 @@ public class SearchModule { //weight doesn't have its own parser, so every function supports it out of the box. //Can be a single function too when not associated to any other function, which is why it needs to be registered manually here. - namedWriteables.add(new Entry(ScoreFunctionBuilder.class, WeightBuilder.NAME, WeightBuilder::new)); + namedWriteables.add(new NamedWriteableRegistry.Entry(ScoreFunctionBuilder.class, WeightBuilder.NAME, WeightBuilder::new)); registerFromPlugin(plugins, SearchPlugin::getScoreFunctions, this::registerScoreFunction); } @@ -646,7 +644,7 @@ public class SearchModule { * Register a new ValueFormat. */ private void registerValueFormat(String name, Writeable.Reader reader) { - namedWriteables.add(new Entry(DocValueFormat.class, name, reader)); + namedWriteables.add(new NamedWriteableRegistry.Entry(DocValueFormat.class, name, reader)); } private void registerSignificanceHeuristics(List plugins) { @@ -662,7 +660,8 @@ public class SearchModule { private void registerSignificanceHeuristic(SearchExtensionSpec heuristic) { significanceHeuristicParserRegistry.register(heuristic.getParser(), heuristic.getName()); - namedWriteables.add(new Entry(SignificanceHeuristic.class, heuristic.getName().getPreferredName(), heuristic.getReader())); + namedWriteables.add(new NamedWriteableRegistry.Entry(SignificanceHeuristic.class, heuristic.getName().getPreferredName(), + heuristic.getReader())); } private void registerMovingAverageModels(List plugins) { @@ -677,7 +676,8 @@ public class SearchModule { private void registerMovingAverageModel(SearchExtensionSpec movAvgModel) { movingAverageModelParserRegistry.register(movAvgModel.getParser(), movAvgModel.getName()); - namedWriteables.add(new Entry(MovAvgModel.class, movAvgModel.getName().getPreferredName(), movAvgModel.getReader())); + namedWriteables.add( + new NamedWriteableRegistry.Entry(MovAvgModel.class, movAvgModel.getName().getPreferredName(), movAvgModel.getReader())); } private void registerFetchSubPhases(List plugins) { @@ -700,7 +700,7 @@ public class SearchModule { private void registerSearchExt(SearchExtSpec spec) { searchExtParserRegistry.register(spec.getParser(), spec.getName()); - namedWriteables.add(new Entry(SearchExtBuilder.class, spec.getName().getPreferredName(), spec.getReader())); + namedWriteables.add(new NamedWriteableRegistry.Entry(SearchExtBuilder.class, spec.getName().getPreferredName(), spec.getReader())); } private void registerFetchSubPhase(FetchSubPhase subPhase) { @@ -774,8 +774,9 @@ public class SearchModule { } private void registerQuery(QuerySpec spec) { - queryParserRegistry.register(spec.getParser(), spec.getName()); - namedWriteables.add(new Entry(QueryBuilder.class, spec.getName().getPreferredName(), spec.getReader())); + namedWriteables.add(new NamedWriteableRegistry.Entry(QueryBuilder.class, spec.getName().getPreferredName(), spec.getReader())); + namedXContents.add(new NamedXContentRegistry.Entry(QueryBuilder.class, spec.getName(), + (p, c) -> spec.getParser().fromXContent((QueryParseContext) c))); } public FetchPhase getFetchPhase() { diff --git a/core/src/main/java/org/elasticsearch/search/SearchRequestParsers.java b/core/src/main/java/org/elasticsearch/search/SearchRequestParsers.java index ba3afc15771..085f55ad71f 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchRequestParsers.java +++ b/core/src/main/java/org/elasticsearch/search/SearchRequestParsers.java @@ -20,7 +20,6 @@ package org.elasticsearch.search; import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.search.aggregations.AggregatorParsers; import org.elasticsearch.search.suggest.Suggesters; @@ -33,15 +32,6 @@ public class SearchRequestParsers { // methods split across RestSearchAction and SearchSourceBuilder should be moved here // TODO: make all members private once parsing functions are moved here - // TODO: IndicesQueriesRegistry should be removed and just have the map of query parsers here - /** - * Query parsers that may be used in search requests. - * @see org.elasticsearch.index.query.QueryParseContext - * @see org.elasticsearch.search.builder.SearchSourceBuilder#fromXContent(QueryParseContext, AggregatorParsers, - * Suggesters, SearchExtRegistry) - */ - public final IndicesQueriesRegistry queryParsers; - // TODO: AggregatorParsers should be removed and the underlying maps of agg // and pipeline agg parsers should be here /** @@ -64,9 +54,7 @@ public class SearchRequestParsers { */ public final SearchExtRegistry searchExtParsers; - public SearchRequestParsers(IndicesQueriesRegistry queryParsers, AggregatorParsers aggParsers, Suggesters suggesters, - SearchExtRegistry searchExtParsers) { - this.queryParsers = queryParsers; + public SearchRequestParsers(AggregatorParsers aggParsers, Suggesters suggesters, SearchExtRegistry searchExtParsers) { this.aggParsers = aggParsers; this.suggesters = suggesters; this.searchExtParsers = searchExtParsers; diff --git a/core/src/test/java/org/elasticsearch/action/search/MultiSearchRequestTests.java b/core/src/test/java/org/elasticsearch/action/search/MultiSearchRequestTests.java index e42c56046e6..cdaae6aa0ff 100644 --- a/core/src/test/java/org/elasticsearch/action/search/MultiSearchRequestTests.java +++ b/core/src/test/java/org/elasticsearch/action/search/MultiSearchRequestTests.java @@ -20,15 +20,18 @@ package org.elasticsearch.action.search; import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.query.MatchAllQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryParser; -import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.search.RestMultiSearchAction; import org.elasticsearch.search.SearchRequestParsers; @@ -38,6 +41,7 @@ import org.elasticsearch.test.rest.FakeRestRequest; import java.io.IOException; +import static java.util.Collections.singletonList; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; @@ -168,13 +172,12 @@ public class MultiSearchRequestTests extends ESTestCase { private MultiSearchRequest parseMultiSearchRequest(String sample) throws IOException { byte[] data = StreamsUtils.copyToBytesFromClasspath(sample); RestRequest restRequest = new FakeRestRequest.Builder(xContentRegistry()).withContent(new BytesArray(data)).build(); - return RestMultiSearchAction.parseRequest(restRequest, true, parsers(), ParseFieldMatcher.EMPTY); + return RestMultiSearchAction.parseRequest(restRequest, true, new SearchRequestParsers(null, null, null), ParseFieldMatcher.EMPTY); } - private SearchRequestParsers parsers() { - IndicesQueriesRegistry registry = new IndicesQueriesRegistry(); - QueryParser parser = MatchAllQueryBuilder::fromXContent; - registry.register(parser, MatchAllQueryBuilder.NAME); - return new SearchRequestParsers(registry, null, null, null); + @Override + protected NamedXContentRegistry xContentRegistry() { + return new NamedXContentRegistry(singletonList(new NamedXContentRegistry.Entry(QueryBuilder.class, + new ParseField(MatchAllQueryBuilder.NAME), (p, c) -> MatchAllQueryBuilder.fromXContent((QueryParseContext) c)))); } } diff --git a/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java b/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java index e9b52589b9c..46281c812f1 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java +++ b/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java @@ -64,7 +64,6 @@ import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.indices.cluster.IndicesClusterStateService.AllocatedIndices.IndexRemovalReason; import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache; import org.elasticsearch.indices.mapper.MapperRegistry; -import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.script.ScriptContextRegistry; import org.elasticsearch.script.ScriptEngineRegistry; import org.elasticsearch.script.ScriptService; @@ -110,7 +109,6 @@ public class IndexModuleTests extends ESTestCase { private CircuitBreakerService circuitBreakerService; private BigArrays bigArrays; private ScriptService scriptService; - private IndicesQueriesRegistry indicesQueriesRegistry; private ClusterService clusterService; @Override @@ -130,7 +128,6 @@ public class IndexModuleTests extends ESTestCase { ScriptSettings scriptSettings = new ScriptSettings(scriptEngineRegistry, scriptContextRegistry); scriptService = new ScriptService(settings, environment, new ResourceWatcherService(settings, threadPool), scriptEngineRegistry, scriptContextRegistry, scriptSettings); - indicesQueriesRegistry = new IndicesQueriesRegistry(); clusterService = ClusterServiceUtils.createClusterService(threadPool); nodeEnvironment = new NodeEnvironment(settings, environment); mapperRegistry = new IndicesModule(Collections.emptyList()).getMapperRegistry(); @@ -145,7 +142,7 @@ public class IndexModuleTests extends ESTestCase { private IndexService newIndexService(IndexModule module) throws IOException { return module.newIndexService(nodeEnvironment, xContentRegistry(), deleter, circuitBreakerService, bigArrays, threadPool, - scriptService, indicesQueriesRegistry, clusterService, null, indicesQueryCache, mapperRegistry, shardId -> {}, + scriptService, clusterService, null, indicesQueryCache, mapperRegistry, shardId -> {}, new IndicesFieldDataCache(settings, listener)); } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java index a93cf0153cf..15f1819e939 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/DateFieldTypeTests.java @@ -71,7 +71,7 @@ public class DateFieldTypeTests extends FieldTypeTestCase { } public void testIsFieldWithinQueryEmptyReader() throws IOException { - QueryRewriteContext context = new QueryRewriteContext(null, null, null, xContentRegistry(), null, null, null, + QueryRewriteContext context = new QueryRewriteContext(null, null, null, xContentRegistry(), null, null, () -> nowInMillis); IndexReader reader = new MultiReader(); DateFieldType ft = new DateFieldType(); @@ -82,7 +82,7 @@ public class DateFieldTypeTests extends FieldTypeTestCase { private void doTestIsFieldWithinQuery(DateFieldType ft, DirectoryReader reader, DateTimeZone zone, DateMathParser alternateFormat) throws IOException { - QueryRewriteContext context = new QueryRewriteContext(null, null, null, xContentRegistry(), null, null, null, + QueryRewriteContext context = new QueryRewriteContext(null, null, null, xContentRegistry(), null, null, () -> nowInMillis); assertEquals(Relation.INTERSECTS, ft.isFieldWithinQuery(reader, "2015-10-09", "2016-01-02", randomBoolean(), randomBoolean(), null, null, context)); @@ -130,7 +130,7 @@ public class DateFieldTypeTests extends FieldTypeTestCase { DateFieldType ft2 = new DateFieldType(); ft2.setName("my_date2"); - QueryRewriteContext context = new QueryRewriteContext(null, null, null, xContentRegistry(), null, null, null, + QueryRewriteContext context = new QueryRewriteContext(null, null, null, xContentRegistry(), null, null, () -> nowInMillis); assertEquals(Relation.DISJOINT, ft2.isFieldWithinQuery(reader, "2015-10-09", "2016-01-02", false, false, null, null, context)); IOUtils.close(reader, w, dir); @@ -166,7 +166,7 @@ public class DateFieldTypeTests extends FieldTypeTestCase { QueryShardContext context = new QueryShardContext(0, new IndexSettings(IndexMetaData.builder("foo").settings(indexSettings).build(), indexSettings), - null, null, null, null, null, xContentRegistry(), null, null, null, () -> nowInMillis); + null, null, null, null, null, xContentRegistry(), null, null, () -> nowInMillis); MappedFieldType ft = createDefaultFieldType(); ft.setName("field"); String date = "2015-10-12T14:10:55"; @@ -185,7 +185,7 @@ public class DateFieldTypeTests extends FieldTypeTestCase { .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1).build(); QueryShardContext context = new QueryShardContext(0, new IndexSettings(IndexMetaData.builder("foo").settings(indexSettings).build(), indexSettings), - null, null, null, null, null, xContentRegistry(), null, null, null, () -> nowInMillis); + null, null, null, null, null, xContentRegistry(), null, null, () -> nowInMillis); MappedFieldType ft = createDefaultFieldType(); ft.setName("field"); String date1 = "2015-10-12T14:10:55"; diff --git a/core/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java index da822349909..04f5dcb5b91 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/RangeFieldTypeTests.java @@ -74,7 +74,7 @@ public class RangeFieldTypeTests extends FieldTypeTestCase { Settings indexSettings = Settings.builder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT).build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(randomAsciiOfLengthBetween(1, 10), indexSettings); - QueryShardContext context = new QueryShardContext(0, idxSettings, null, null, null, null, null, xContentRegistry(), null, + QueryShardContext context = new QueryShardContext(0, idxSettings, null, null, null, null, null, xContentRegistry(), null, null, () -> nowInMillis); RangeFieldMapper.RangeFieldType ft = new RangeFieldMapper.RangeFieldType(type); ft.setName(FIELDNAME); diff --git a/core/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java index 13854ffc1e2..ab73551d3e1 100644 --- a/core/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java @@ -30,7 +30,6 @@ import org.elasticsearch.common.xcontent.XContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.AbstractQueryTestCase; import org.hamcrest.Matchers; diff --git a/core/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java index 9c021e249a8..6b9fbe76ba8 100644 --- a/core/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java @@ -22,13 +22,13 @@ import org.apache.lucene.search.join.ScoreMode; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder; -import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.SearchModule; @@ -62,19 +62,24 @@ public class InnerHitBuilderTests extends ESTestCase { private static final int NUMBER_OF_TESTBUILDERS = 20; private static NamedWriteableRegistry namedWriteableRegistry; - private static IndicesQueriesRegistry indicesQueriesRegistry; + private static NamedXContentRegistry xContentRegistry; @BeforeClass public static void init() { SearchModule searchModule = new SearchModule(Settings.EMPTY, false, emptyList()); namedWriteableRegistry = new NamedWriteableRegistry(searchModule.getNamedWriteables()); - indicesQueriesRegistry = searchModule.getQueryParserRegistry(); + xContentRegistry = new NamedXContentRegistry(searchModule.getNamedXContents()); } @AfterClass public static void afterClass() throws Exception { namedWriteableRegistry = null; - indicesQueriesRegistry = null; + xContentRegistry = null; + } + + @Override + protected NamedXContentRegistry xContentRegistry() { + return xContentRegistry; } public void testSerialization() throws Exception { @@ -98,7 +103,7 @@ public class InnerHitBuilderTests extends ESTestCase { } XContentParser parser = createParser(shuffled); - QueryParseContext context = new QueryParseContext(indicesQueriesRegistry, parser, ParseFieldMatcher.EMPTY); + QueryParseContext context = new QueryParseContext(parser, ParseFieldMatcher.EMPTY); InnerHitBuilder secondInnerHits = InnerHitBuilder.fromXContent(context); assertThat(innerHit, not(sameInstance(secondInnerHits))); assertThat(innerHit, equalTo(secondInnerHits)); diff --git a/core/src/test/java/org/elasticsearch/index/query/QueryParseContextTests.java b/core/src/test/java/org/elasticsearch/index/query/QueryParseContextTests.java index da76c2796f6..1e00176f558 100644 --- a/core/src/test/java/org/elasticsearch/index/query/QueryParseContextTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/QueryParseContextTests.java @@ -24,9 +24,9 @@ import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; -import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.search.SearchModule; import org.elasticsearch.test.ESTestCase; import org.junit.After; @@ -38,12 +38,11 @@ import java.io.IOException; import static java.util.Collections.emptyList; public class QueryParseContextTests extends ESTestCase { - - private static IndicesQueriesRegistry indicesQueriesRegistry; + private static NamedXContentRegistry xContentRegistry; @BeforeClass public static void init() { - indicesQueriesRegistry = new SearchModule(Settings.EMPTY, false, emptyList()).getQueryParserRegistry(); + xContentRegistry = new NamedXContentRegistry(new SearchModule(Settings.EMPTY, false, emptyList()).getNamedXContents()); } private ThreadContext threadContext; @@ -64,7 +63,7 @@ public class QueryParseContextTests extends ESTestCase { QueryBuilder query = new MatchQueryBuilder("foo", "bar"); String requestBody = "{ \"query\" : " + query.toString() + "}"; try (XContentParser parser = createParser(JsonXContent.jsonXContent, requestBody)) { - QueryParseContext context = new QueryParseContext(indicesQueriesRegistry, parser, ParseFieldMatcher.STRICT); + QueryParseContext context = new QueryParseContext(parser, ParseFieldMatcher.STRICT); QueryBuilder actual = context.parseTopLevelQueryBuilder(); assertEquals(query, actual); } @@ -73,7 +72,7 @@ public class QueryParseContextTests extends ESTestCase { public void testParseTopLevelBuilderEmptyObject() throws IOException { String requestBody = "{}"; try (XContentParser parser = createParser(JsonXContent.jsonXContent, requestBody)) { - QueryParseContext context = new QueryParseContext(indicesQueriesRegistry, parser, ParseFieldMatcher.STRICT); + QueryParseContext context = new QueryParseContext(parser, ParseFieldMatcher.STRICT); QueryBuilder query = context.parseTopLevelQueryBuilder(); assertNull(query); } @@ -82,7 +81,7 @@ public class QueryParseContextTests extends ESTestCase { public void testParseTopLevelBuilderUnknownParameter() throws IOException { String requestBody = "{ \"foo\" : \"bar\"}"; try (XContentParser parser = createParser(JsonXContent.jsonXContent, requestBody)) { - QueryParseContext context = new QueryParseContext(indicesQueriesRegistry, parser, ParseFieldMatcher.STRICT); + QueryParseContext context = new QueryParseContext(parser, ParseFieldMatcher.STRICT); ParsingException exception = expectThrows(ParsingException.class, () -> context.parseTopLevelQueryBuilder()); assertEquals("request does not support [foo]", exception.getMessage()); } @@ -92,7 +91,7 @@ public class QueryParseContextTests extends ESTestCase { QueryBuilder query = new MatchQueryBuilder("foo", "bar"); String source = query.toString(); try (XContentParser parser = createParser(JsonXContent.jsonXContent, source)) { - QueryParseContext context = new QueryParseContext(indicesQueriesRegistry, parser, ParseFieldMatcher.STRICT); + QueryParseContext context = new QueryParseContext(parser, ParseFieldMatcher.STRICT); QueryBuilder actual = context.parseInnerQueryBuilder(); assertEquals(query, actual); } @@ -103,30 +102,35 @@ public class QueryParseContextTests extends ESTestCase { try (XContentParser parser = createParser(JsonXContent.jsonXContent, source)) { parser.nextToken(); parser.nextToken(); // don't start with START_OBJECT to provoke exception - QueryParseContext context = new QueryParseContext(indicesQueriesRegistry, parser, ParseFieldMatcher.STRICT); + QueryParseContext context = new QueryParseContext(parser, ParseFieldMatcher.STRICT); ParsingException exception = expectThrows(ParsingException.class, () -> context.parseInnerQueryBuilder()); assertEquals("[_na] query malformed, must start with start_object", exception.getMessage()); } source = "{}"; try (XContentParser parser = createParser(JsonXContent.jsonXContent, source)) { - QueryParseContext context = new QueryParseContext(indicesQueriesRegistry, parser, ParseFieldMatcher.EMPTY); + QueryParseContext context = new QueryParseContext(parser, ParseFieldMatcher.EMPTY); IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> context.parseInnerQueryBuilder()); assertEquals("query malformed, empty clause found at [1:2]", exception.getMessage()); } source = "{ \"foo\" : \"bar\" }"; try (XContentParser parser = createParser(JsonXContent.jsonXContent, source)) { - QueryParseContext context = new QueryParseContext(indicesQueriesRegistry, parser, ParseFieldMatcher.STRICT); + QueryParseContext context = new QueryParseContext(parser, ParseFieldMatcher.STRICT); ParsingException exception = expectThrows(ParsingException.class, () -> context.parseInnerQueryBuilder()); assertEquals("[foo] query malformed, no start_object after query name", exception.getMessage()); } source = "{ \"foo\" : {} }"; try (XContentParser parser = createParser(JsonXContent.jsonXContent, source)) { - QueryParseContext context = new QueryParseContext(indicesQueriesRegistry, parser, ParseFieldMatcher.STRICT); + QueryParseContext context = new QueryParseContext(parser, ParseFieldMatcher.STRICT); ParsingException exception = expectThrows(ParsingException.class, () -> context.parseInnerQueryBuilder()); assertEquals("no [query] registered for [foo]", exception.getMessage()); } } + + @Override + protected NamedXContentRegistry xContentRegistry() { + return xContentRegistry; + } } diff --git a/core/src/test/java/org/elasticsearch/index/query/QueryShardContextTests.java b/core/src/test/java/org/elasticsearch/index/query/QueryShardContextTests.java index ccf514dd41c..434c3d1ebf3 100644 --- a/core/src/test/java/org/elasticsearch/index/query/QueryShardContextTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/QueryShardContextTests.java @@ -48,7 +48,7 @@ public class QueryShardContextTests extends ESTestCase { when(mapperService.getIndexSettings()).thenReturn(indexSettings); final long nowInMillis = randomPositiveLong(); QueryShardContext context = new QueryShardContext( - 0, indexSettings, null, null, mapperService, null, null, xContentRegistry(), null, null, null, + 0, indexSettings, null, null, mapperService, null, null, xContentRegistry(), null, null, () -> nowInMillis); context.setAllowUnmappedFields(false); diff --git a/core/src/test/java/org/elasticsearch/index/query/RangeQueryRewriteTests.java b/core/src/test/java/org/elasticsearch/index/query/RangeQueryRewriteTests.java index 607f3f924d3..d7ef534bcb2 100644 --- a/core/src/test/java/org/elasticsearch/index/query/RangeQueryRewriteTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/RangeQueryRewriteTests.java @@ -37,7 +37,7 @@ public class RangeQueryRewriteTests extends ESSingleNodeTestCase { IndexService indexService = createIndex("test"); IndexReader reader = new MultiReader(); QueryRewriteContext context = new QueryShardContext(0, indexService.getIndexSettings(), null, null, indexService.mapperService(), - null, null, xContentRegistry(), null, null, reader, null); + null, null, xContentRegistry(), null, reader, null); RangeQueryBuilder range = new RangeQueryBuilder("foo"); assertEquals(Relation.DISJOINT, range.getRelation(context)); } @@ -54,7 +54,7 @@ public class RangeQueryRewriteTests extends ESSingleNodeTestCase { indexService.mapperService().merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE, false); QueryRewriteContext context = new QueryShardContext(0, indexService.getIndexSettings(), null, null, indexService.mapperService(), - null, null, xContentRegistry(), null, null, null, null); + null, null, xContentRegistry(), null, null, null); RangeQueryBuilder range = new RangeQueryBuilder("foo"); // can't make assumptions on a missing reader, so it must return INTERSECT assertEquals(Relation.INTERSECTS, range.getRelation(context)); @@ -73,7 +73,7 @@ public class RangeQueryRewriteTests extends ESSingleNodeTestCase { new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE, false); IndexReader reader = new MultiReader(); QueryRewriteContext context = new QueryShardContext(0, indexService.getIndexSettings(), null, null, indexService.mapperService(), - null, null, xContentRegistry(), null, null, reader, null); + null, null, xContentRegistry(), null, reader, null); RangeQueryBuilder range = new RangeQueryBuilder("foo"); // no values -> DISJOINT assertEquals(Relation.DISJOINT, range.getRelation(context)); diff --git a/core/src/test/java/org/elasticsearch/index/query/SimpleQueryParserTests.java b/core/src/test/java/org/elasticsearch/index/query/SimpleQueryParserTests.java index 2d7809b7df6..3c9b0f29d75 100644 --- a/core/src/test/java/org/elasticsearch/index/query/SimpleQueryParserTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/SimpleQueryParserTests.java @@ -34,37 +34,15 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MockFieldMapper; -import org.elasticsearch.indices.query.IndicesQueriesRegistry; -import org.elasticsearch.search.SearchModule; import org.elasticsearch.test.ESTestCase; -import org.junit.AfterClass; -import org.junit.BeforeClass; import java.util.Collections; import java.util.HashMap; import java.util.Map; -import static java.util.Collections.emptyList; import static org.hamcrest.Matchers.equalTo; public class SimpleQueryParserTests extends ESTestCase { - - private static IndicesQueriesRegistry indicesQueriesRegistry; - - /** - * setup for the whole base test class - */ - @BeforeClass - public static void init() { - SearchModule searchModule = new SearchModule(Settings.EMPTY, false, emptyList()); - indicesQueriesRegistry = searchModule.getQueryParserRegistry(); - } - - @AfterClass - public static void afterClass() throws Exception { - indicesQueriesRegistry = null; - } - private static class MockSimpleQueryParser extends SimpleQueryParser { public MockSimpleQueryParser(Analyzer analyzer, Map weights, int flags, Settings settings) { super(analyzer, weights, flags, settings, null); @@ -148,7 +126,7 @@ public class SimpleQueryParserTests extends ESTestCase { IndexMetaData indexState = IndexMetaData.builder("index").settings(indexSettings).build(); IndexSettings settings = new IndexSettings(indexState, Settings.EMPTY); QueryShardContext mockShardContext = new QueryShardContext(0, settings, null, null, null, null, null, xContentRegistry(), - indicesQueriesRegistry, null, null, System::currentTimeMillis) { + null, null, System::currentTimeMillis) { @Override public MappedFieldType fieldMapper(String name) { return new MockFieldMapper.FakeFieldType(); @@ -161,7 +139,7 @@ public class SimpleQueryParserTests extends ESTestCase { assertEquals(new TermQuery(new Term("foo.quote", "bar")), parser.parse("\"bar\"")); // Now check what happens if foo.quote does not exist - mockShardContext = new QueryShardContext(0, settings, null, null, null, null, null, xContentRegistry(), indicesQueriesRegistry, + mockShardContext = new QueryShardContext(0, settings, null, null, null, null, null, xContentRegistry(), null, null, System::currentTimeMillis) { @Override public MappedFieldType fieldMapper(String name) { diff --git a/core/src/test/java/org/elasticsearch/search/AbstractSearchTestCase.java b/core/src/test/java/org/elasticsearch/search/AbstractSearchTestCase.java index cf290b2b1b6..310ad86eb11 100644 --- a/core/src/test/java/org/elasticsearch/search/AbstractSearchTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/AbstractSearchTestCase.java @@ -25,10 +25,10 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.indices.IndicesModule; -import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.search.builder.SearchSourceBuilder; @@ -54,7 +54,7 @@ public abstract class AbstractSearchTestCase extends ESTestCase { protected NamedWriteableRegistry namedWriteableRegistry; protected SearchRequestParsers searchRequestParsers; private TestSearchExtPlugin searchExtPlugin; - protected IndicesQueriesRegistry queriesRegistry; + private NamedXContentRegistry xContentRegistry; public void setUp() throws Exception { super.setUp(); @@ -65,8 +65,13 @@ public abstract class AbstractSearchTestCase extends ESTestCase { entries.addAll(indicesModule.getNamedWriteables()); entries.addAll(searchModule.getNamedWriteables()); namedWriteableRegistry = new NamedWriteableRegistry(entries); + xContentRegistry = new NamedXContentRegistry(searchModule.getNamedXContents()); searchRequestParsers = searchModule.getSearchRequestParsers(); - queriesRegistry = searchModule.getQueryParserRegistry(); + } + + @Override + protected NamedXContentRegistry xContentRegistry() { + return xContentRegistry; } protected SearchSourceBuilder createSearchSourceBuilder() { diff --git a/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java b/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java index 2e9644191f6..89e846bdb8b 100644 --- a/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java +++ b/core/src/test/java/org/elasticsearch/search/SearchModuleTests.java @@ -19,20 +19,16 @@ package org.elasticsearch.search; import org.elasticsearch.common.ParseFieldMatcher; -import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.inject.ModuleTestCase; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.index.query.QueryParser; import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.index.query.functionscore.GaussDecayFunctionBuilder; -import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; @@ -70,16 +66,17 @@ import org.elasticsearch.search.suggest.term.TermSuggester; import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; +import java.util.Set; import static java.util.Collections.emptyList; import static java.util.Collections.singletonList; import static java.util.Collections.singletonMap; +import static java.util.stream.Collectors.toSet; import static org.hamcrest.Matchers.containsInAnyOrder; -import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.notNullValue; public class SearchModuleTests extends ModuleTestCase { @@ -109,8 +106,8 @@ public class SearchModuleTests extends ModuleTestCase { GaussDecayFunctionBuilder.PARSER)); } }; - SearchModule searchModule = new SearchModule(Settings.EMPTY, false, singletonList(registersDupeScoreFunction)); - expectThrows(IllegalArgumentException.class, () -> new NamedXContentRegistry(searchModule.getNamedXContents())); + expectThrows(IllegalArgumentException.class, () -> new NamedXContentRegistry( + new SearchModule(Settings.EMPTY, false, singletonList(registersDupeScoreFunction)).getNamedXContents())); SearchPlugin registersDupeSignificanceHeuristic = new SearchPlugin() { @Override @@ -144,8 +141,8 @@ public class SearchModuleTests extends ModuleTestCase { return singletonList(new QuerySpec<>(TermQueryBuilder.NAME, TermQueryBuilder::new, TermQueryBuilder::fromXContent)); } }; - expectThrows(IllegalArgumentException.class, () -> new SearchModule(Settings.EMPTY, false, - singletonList(registersDupeQuery))); + expectThrows(IllegalArgumentException.class, () -> new NamedXContentRegistry( + new SearchModule(Settings.EMPTY, false, singletonList(registersDupeQuery)).getNamedXContents())); SearchPlugin registersDupeAggregation = new SearchPlugin() { public List getAggregations() { @@ -200,31 +197,22 @@ public class SearchModuleTests extends ModuleTestCase { } public void testRegisteredQueries() throws IOException { - SearchModule module = new SearchModule(Settings.EMPTY, false, emptyList()); List allSupportedQueries = new ArrayList<>(); Collections.addAll(allSupportedQueries, NON_DEPRECATED_QUERIES); Collections.addAll(allSupportedQueries, DEPRECATED_QUERIES); - String[] supportedQueries = allSupportedQueries.toArray(new String[allSupportedQueries.size()]); - assertThat(module.getQueryParserRegistry().getNames(), containsInAnyOrder(supportedQueries)); + SearchModule module = new SearchModule(Settings.EMPTY, false, emptyList()); - IndicesQueriesRegistry indicesQueriesRegistry = module.getQueryParserRegistry(); - XContentParser dummyParser = createParser(JsonXContent.jsonXContent, new BytesArray("{}")); - for (String queryName : supportedQueries) { - indicesQueriesRegistry.lookup(queryName, ParseFieldMatcher.EMPTY, dummyParser.getTokenLocation()); - } + Set registeredNonDeprecated = module.getNamedXContents().stream() + .filter(e -> e.categoryClass.equals(QueryBuilder.class)) + .map(e -> e.name.getPreferredName()) + .collect(toSet()); + Set registeredAll = module.getNamedXContents().stream() + .filter(e -> e.categoryClass.equals(QueryBuilder.class)) + .flatMap(e -> Arrays.stream(e.name.getAllNamesIncludedDeprecated())) + .collect(toSet()); - for (String queryName : NON_DEPRECATED_QUERIES) { - QueryParser queryParser = indicesQueriesRegistry.lookup(queryName, ParseFieldMatcher.STRICT, dummyParser.getTokenLocation()); - assertThat(queryParser, notNullValue()); - } - for (String queryName : DEPRECATED_QUERIES) { - try { - indicesQueriesRegistry.lookup(queryName, ParseFieldMatcher.STRICT, dummyParser.getTokenLocation()); - fail("query is deprecated, getQueryParser should have failed in strict mode"); - } catch(IllegalArgumentException e) { - assertThat(e.getMessage(), containsString("Deprecated field [" + queryName + "] used")); - } - } + assertThat(registeredNonDeprecated, containsInAnyOrder(NON_DEPRECATED_QUERIES)); + assertThat(registeredAll, containsInAnyOrder(allSupportedQueries.toArray(new String[0]))); } public void testRegisterAggregation() { diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/AggregationCollectorTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/AggregationCollectorTests.java index c81ec476b86..0e79655d828 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/AggregationCollectorTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/AggregationCollectorTests.java @@ -24,7 +24,6 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.search.SearchRequestParsers; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.ESSingleNodeTestCase; @@ -61,9 +60,8 @@ public class AggregationCollectorTests extends ESSingleNodeTestCase { private boolean needsScores(IndexService index, String agg) throws IOException { AggregatorParsers parser = getInstanceFromNode(SearchRequestParsers.class).aggParsers; - IndicesQueriesRegistry queriesRegistry = getInstanceFromNode(IndicesQueriesRegistry.class); XContentParser aggParser = createParser(JsonXContent.jsonXContent, agg); - QueryParseContext parseContext = new QueryParseContext(queriesRegistry, aggParser, ParseFieldMatcher.STRICT); + QueryParseContext parseContext = new QueryParseContext(aggParser, ParseFieldMatcher.STRICT); aggParser.nextToken(); SearchContext context = createSearchContext(index); final AggregatorFactories factories = parser.parseAggregators(parseContext).build(context, null); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorParsingTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorParsingTests.java index 26b3dd6f8f2..5d6222e6979 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorParsingTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorParsingTests.java @@ -22,16 +22,16 @@ package org.elasticsearch.search.aggregations; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.env.Environment; -import org.elasticsearch.test.AbstractQueryTestCase; import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.SearchModule; +import org.elasticsearch.test.AbstractQueryTestCase; import org.elasticsearch.test.ESTestCase; import java.util.Random; @@ -50,7 +50,7 @@ public class AggregatorParsingTests extends ESTestCase { } protected AggregatorParsers aggParsers; - protected IndicesQueriesRegistry queriesRegistry; + private NamedXContentRegistry xContentRegistry; protected ParseFieldMatcher parseFieldMatcher; /** @@ -73,7 +73,7 @@ public class AggregatorParsingTests extends ESTestCase { String type = randomAsciiOfLengthBetween(1, 10); currentTypes[i] = type; } - queriesRegistry = searchModule.getQueryParserRegistry(); + xContentRegistry = new NamedXContentRegistry(searchModule.getNamedXContents()); parseFieldMatcher = ParseFieldMatcher.STRICT; } @@ -95,7 +95,7 @@ public class AggregatorParsingTests extends ESTestCase { .endObject(); try { XContentParser parser = createParser(source); - QueryParseContext parseContext = new QueryParseContext(queriesRegistry, parser, parseFieldMatcher); + QueryParseContext parseContext = new QueryParseContext(parser, parseFieldMatcher); assertSame(XContentParser.Token.START_OBJECT, parser.nextToken()); aggParsers.parseAggregators(parseContext); fail(); @@ -132,7 +132,7 @@ public class AggregatorParsingTests extends ESTestCase { .endObject(); try { XContentParser parser = createParser(source); - QueryParseContext parseContext = new QueryParseContext(queriesRegistry, parser, parseFieldMatcher); + QueryParseContext parseContext = new QueryParseContext(parser, parseFieldMatcher); assertSame(XContentParser.Token.START_OBJECT, parser.nextToken()); aggParsers.parseAggregators(parseContext); fail(); @@ -171,7 +171,7 @@ public class AggregatorParsingTests extends ESTestCase { .endObject(); try { XContentParser parser = createParser(source); - QueryParseContext parseContext = new QueryParseContext(queriesRegistry, parser, parseFieldMatcher); + QueryParseContext parseContext = new QueryParseContext(parser, parseFieldMatcher); assertSame(XContentParser.Token.START_OBJECT, parser.nextToken()); aggParsers.parseAggregators(parseContext); fail(); @@ -199,7 +199,7 @@ public class AggregatorParsingTests extends ESTestCase { .endObject(); try { XContentParser parser = createParser(source); - QueryParseContext parseContext = new QueryParseContext(queriesRegistry, parser, parseFieldMatcher); + QueryParseContext parseContext = new QueryParseContext(parser, parseFieldMatcher); assertSame(XContentParser.Token.START_OBJECT, parser.nextToken()); aggParsers.parseAggregators(parseContext); fail(); @@ -228,7 +228,7 @@ public class AggregatorParsingTests extends ESTestCase { .endObject(); try { XContentParser parser = createParser(source); - QueryParseContext parseContext = new QueryParseContext(queriesRegistry, parser, parseFieldMatcher); + QueryParseContext parseContext = new QueryParseContext(parser, parseFieldMatcher); assertSame(XContentParser.Token.START_OBJECT, parser.nextToken()); aggParsers.parseAggregators(parseContext); fail(); @@ -257,7 +257,7 @@ public class AggregatorParsingTests extends ESTestCase { .endObject(); try { XContentParser parser = createParser(source); - QueryParseContext parseContext = new QueryParseContext(queriesRegistry, parser, parseFieldMatcher); + QueryParseContext parseContext = new QueryParseContext(parser, parseFieldMatcher); assertSame(XContentParser.Token.START_OBJECT, parser.nextToken()); aggParsers.parseAggregators(parseContext); fail(); @@ -265,4 +265,9 @@ public class AggregatorParsingTests extends ESTestCase { // All Good } } + + @Override + protected NamedXContentRegistry xContentRegistry() { + return xContentRegistry; + } } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java b/core/src/test/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java index e8073bac354..a39a17068f5 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -33,7 +34,6 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.env.Environment; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.indices.IndicesModule; -import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.search.SearchModule; import org.elasticsearch.test.AbstractQueryTestCase; import org.elasticsearch.test.ESTestCase; @@ -64,7 +64,7 @@ public abstract class BaseAggregationTestCase now); + null, xContentRegistry(), null, null, () -> now); when(context.getQueryShardContext()).thenReturn(qsc); FormatDateTimeFormatter formatter = Joda.forPattern("dateOptionalTime"); DocValueFormat format = new DocValueFormat.DateTime(formatter, DateTimeZone.UTC); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java index 655a03a75c0..0db3fd6aa3f 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java @@ -35,7 +35,6 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.Index; import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.SearchShardTarget; @@ -267,11 +266,10 @@ public class SignificanceHeuristicTests extends ESTestCase { protected void checkParseException(ParseFieldRegistry significanceHeuristicParserRegistry, SearchContext searchContext, String faultyHeuristicDefinition, String expectedError) throws IOException { - IndicesQueriesRegistry registry = new IndicesQueriesRegistry(); try { XContentParser stParser = createParser(JsonXContent.jsonXContent, "{\"field\":\"text\", " + faultyHeuristicDefinition + ",\"min_doc_count\":200}"); - QueryParseContext parseContext = new QueryParseContext(registry, stParser, ParseFieldMatcher.STRICT); + QueryParseContext parseContext = new QueryParseContext(stParser, ParseFieldMatcher.STRICT); stParser.nextToken(); SignificantTermsAggregationBuilder.getParser(significanceHeuristicParserRegistry).parse("testagg", parseContext); fail(); @@ -293,8 +291,7 @@ public class SignificanceHeuristicTests extends ESTestCase { private SignificanceHeuristic parseSignificanceHeuristic( ParseFieldRegistry significanceHeuristicParserRegistry, SearchContext searchContext, XContentParser stParser) throws IOException { - IndicesQueriesRegistry registry = new IndicesQueriesRegistry(); - QueryParseContext parseContext = new QueryParseContext(registry, stParser, ParseFieldMatcher.STRICT); + QueryParseContext parseContext = new QueryParseContext(stParser, ParseFieldMatcher.STRICT); stParser.nextToken(); SignificantTermsAggregationBuilder aggregatorFactory = (SignificantTermsAggregationBuilder) SignificantTermsAggregationBuilder.getParser( diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/FiltersTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/FiltersTests.java index 496e77f364c..bf8e7421747 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/FiltersTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/FiltersTests.java @@ -28,7 +28,6 @@ import org.elasticsearch.index.query.MatchNoneQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.search.aggregations.BaseAggregationTestCase; import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.filters.FiltersAggregator.KeyedFilter; @@ -89,8 +88,7 @@ public class FiltersTests extends BaseAggregationTestCase T expectParseThrows(Class exceptionClass, String highlightElement) throws IOException { XContentParser parser = createParser(JsonXContent.jsonXContent, highlightElement); - QueryParseContext context = new QueryParseContext(indicesQueriesRegistry, parser, ParseFieldMatcher.STRICT); + QueryParseContext context = new QueryParseContext(parser, ParseFieldMatcher.STRICT); return expectThrows(exceptionClass, () -> HighlightBuilder.fromXContent(context)); } @@ -266,7 +266,7 @@ public class HighlightBuilderTests extends ESTestCase { IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(index, indexSettings); // shard context will only need indicesQueriesRegistry for building Query objects nested in highlighter QueryShardContext mockShardContext = new QueryShardContext(0, idxSettings, null, null, null, null, null, xContentRegistry(), - indicesQueriesRegistry, null, null, System::currentTimeMillis) { + null, null, System::currentTimeMillis) { @Override public MappedFieldType fieldMapper(String name) { TextFieldMapper.Builder builder = new TextFieldMapper.Builder(name); @@ -380,7 +380,7 @@ public class HighlightBuilderTests extends ESTestCase { "}\n"; XContentParser parser = createParser(JsonXContent.jsonXContent, highlightElement); - QueryParseContext context = new QueryParseContext(indicesQueriesRegistry, parser, ParseFieldMatcher.EMPTY); + QueryParseContext context = new QueryParseContext(parser, ParseFieldMatcher.EMPTY); HighlightBuilder highlightBuilder = HighlightBuilder.fromXContent(context); assertArrayEquals("setting tags_schema 'styled' should alter pre_tags", HighlightBuilder.DEFAULT_STYLED_PRE_TAG, highlightBuilder.preTags()); @@ -392,7 +392,7 @@ public class HighlightBuilderTests extends ESTestCase { "}\n"; parser = createParser(JsonXContent.jsonXContent, highlightElement); - context = new QueryParseContext(indicesQueriesRegistry, parser, ParseFieldMatcher.EMPTY); + context = new QueryParseContext(parser, ParseFieldMatcher.EMPTY); highlightBuilder = HighlightBuilder.fromXContent(context); assertArrayEquals("setting tags_schema 'default' should alter pre_tags", HighlightBuilder.DEFAULT_PRE_TAGS, highlightBuilder.preTags()); @@ -413,21 +413,21 @@ public class HighlightBuilderTests extends ESTestCase { String highlightElement = "{ }"; XContentParser parser = createParser(JsonXContent.jsonXContent, highlightElement); - QueryParseContext context = new QueryParseContext(indicesQueriesRegistry, parser, ParseFieldMatcher.EMPTY); + QueryParseContext context = new QueryParseContext(parser, ParseFieldMatcher.EMPTY); HighlightBuilder highlightBuilder = HighlightBuilder.fromXContent(context); assertEquals("expected plain HighlightBuilder", new HighlightBuilder(), highlightBuilder); highlightElement = "{ \"fields\" : { } }"; parser = createParser(JsonXContent.jsonXContent, highlightElement); - context = new QueryParseContext(indicesQueriesRegistry, parser, ParseFieldMatcher.EMPTY); + context = new QueryParseContext(parser, ParseFieldMatcher.EMPTY); highlightBuilder = HighlightBuilder.fromXContent(context); assertEquals("defining no field should return plain HighlightBuilder", new HighlightBuilder(), highlightBuilder); highlightElement = "{ \"fields\" : { \"foo\" : { } } }"; parser = createParser(JsonXContent.jsonXContent, highlightElement); - context = new QueryParseContext(indicesQueriesRegistry, parser, ParseFieldMatcher.EMPTY); + context = new QueryParseContext(parser, ParseFieldMatcher.EMPTY); highlightBuilder = HighlightBuilder.fromXContent(context); assertEquals("expected HighlightBuilder with field", new HighlightBuilder().field(new Field("foo")), highlightBuilder); } @@ -715,4 +715,9 @@ public class HighlightBuilderTests extends ESTestCase { private static HighlightBuilder serializedCopy(HighlightBuilder original) throws IOException { return ESTestCase.copyWriteable(original, namedWriteableRegistry, HighlightBuilder::new); } + + @Override + protected NamedXContentRegistry xContentRegistry() { + return xContentRegistry; + } } diff --git a/core/src/test/java/org/elasticsearch/search/internal/ShardSearchTransportRequestTests.java b/core/src/test/java/org/elasticsearch/search/internal/ShardSearchTransportRequestTests.java index 76474422961..1ac267330d7 100644 --- a/core/src/test/java/org/elasticsearch/search/internal/ShardSearchTransportRequestTests.java +++ b/core/src/test/java/org/elasticsearch/search/internal/ShardSearchTransportRequestTests.java @@ -164,7 +164,7 @@ public class ShardSearchTransportRequestTests extends AbstractSearchTestCase { public QueryBuilder aliasFilter(IndexMetaData indexMetaData, String... aliasNames) { ShardSearchRequest.FilterParser filterParser = bytes -> { try (XContentParser parser = XContentFactory.xContent(bytes).createParser(xContentRegistry(), bytes)) { - return new QueryParseContext(queriesRegistry, parser, new ParseFieldMatcher(Settings.EMPTY)).parseInnerQueryBuilder(); + return new QueryParseContext(parser, new ParseFieldMatcher(Settings.EMPTY)).parseInnerQueryBuilder(); } }; return ShardSearchRequest.parseAliasFilter(filterParser, indexMetaData, aliasNames); @@ -200,7 +200,7 @@ public class ShardSearchTransportRequestTests extends AbstractSearchTestCase { IndexSettings indexSettings = new IndexSettings(indexMetadata.build(), Settings.EMPTY); final long nowInMillis = randomPositiveLong(); QueryShardContext context = new QueryShardContext( - 0, indexSettings, null, null, null, null, null, xContentRegistry(), queriesRegistry, null, null, () -> nowInMillis); + 0, indexSettings, null, null, null, null, null, xContentRegistry(), null, null, () -> nowInMillis); readRequest.rewrite(context); QueryBuilder queryBuilder = readRequest.filteringAliases(); assertEquals(queryBuilder, QueryBuilders.boolQuery() diff --git a/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java b/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java index 160e5393391..3e051c9828b 100644 --- a/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/rescore/QueryRescoreBuilderTests.java @@ -27,6 +27,7 @@ import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -42,7 +43,6 @@ import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.index.query.QueryShardContext; -import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.rescore.QueryRescorer.QueryRescoreContext; import org.elasticsearch.test.ESTestCase; @@ -59,7 +59,7 @@ public class QueryRescoreBuilderTests extends ESTestCase { private static final int NUMBER_OF_TESTBUILDERS = 20; private static NamedWriteableRegistry namedWriteableRegistry; - private static IndicesQueriesRegistry indicesQueriesRegistry; + private static NamedXContentRegistry xContentRegistry; /** * setup for the whole base test class @@ -68,13 +68,13 @@ public class QueryRescoreBuilderTests extends ESTestCase { public static void init() { SearchModule searchModule = new SearchModule(Settings.EMPTY, false, emptyList()); namedWriteableRegistry = new NamedWriteableRegistry(searchModule.getNamedWriteables()); - indicesQueriesRegistry = searchModule.getQueryParserRegistry(); + xContentRegistry = new NamedXContentRegistry(searchModule.getNamedXContents()); } @AfterClass public static void afterClass() throws Exception { namedWriteableRegistry = null; - indicesQueriesRegistry = null; + xContentRegistry = null; } /** @@ -119,7 +119,7 @@ public class QueryRescoreBuilderTests extends ESTestCase { XContentParser parser = createParser(shuffled); - QueryParseContext context = new QueryParseContext(indicesQueriesRegistry, parser, ParseFieldMatcher.STRICT); + QueryParseContext context = new QueryParseContext(parser, ParseFieldMatcher.STRICT); parser.nextToken(); RescoreBuilder secondRescoreBuilder = RescoreBuilder.parseFromXContent(context); assertNotSame(rescoreBuilder, secondRescoreBuilder); @@ -139,7 +139,7 @@ public class QueryRescoreBuilderTests extends ESTestCase { IndexSettings idxSettings = IndexSettingsModule.newIndexSettings(randomAsciiOfLengthBetween(1, 10), indexSettings); // shard context will only need indicesQueriesRegistry for building Query objects nested in query rescorer QueryShardContext mockShardContext = new QueryShardContext(0, idxSettings, null, null, null, null, null, xContentRegistry(), - indicesQueriesRegistry, null, null, () -> nowInMillis) { + null, null, () -> nowInMillis) { @Override public MappedFieldType fieldMapper(String name) { TextFieldMapper.Builder builder = new TextFieldMapper.Builder(name); @@ -252,12 +252,17 @@ public class QueryRescoreBuilderTests extends ESTestCase { */ private QueryParseContext createContext(String rescoreElement) throws IOException { XContentParser parser = createParser(JsonXContent.jsonXContent, rescoreElement); - QueryParseContext context = new QueryParseContext(indicesQueriesRegistry, parser, ParseFieldMatcher.STRICT); + QueryParseContext context = new QueryParseContext(parser, ParseFieldMatcher.STRICT); // move to first token, this is where the internal fromXContent assertTrue(parser.nextToken() == XContentParser.Token.START_OBJECT); return context; } + @Override + protected NamedXContentRegistry xContentRegistry() { + return xContentRegistry; + } + private static RescoreBuilder mutate(RescoreBuilder original) throws IOException { RescoreBuilder mutation = ESTestCase.copyWriteable(original, namedWriteableRegistry, QueryRescorerBuilder::new); if (randomBoolean()) { diff --git a/core/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java b/core/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java index beb69a0184b..93663d93683 100644 --- a/core/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java @@ -28,14 +28,9 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; -import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.index.query.QueryParser; -import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.test.ESTestCase; import org.hamcrest.Matchers; -import org.junit.AfterClass; -import org.junit.BeforeClass; import java.io.IOException; import java.util.Collections; @@ -44,22 +39,6 @@ import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashC public class SearchAfterBuilderTests extends ESTestCase { private static final int NUMBER_OF_TESTBUILDERS = 20; - private static IndicesQueriesRegistry indicesQueriesRegistry; - - /** - * setup for the whole base test class - */ - @BeforeClass - public static void init() { - indicesQueriesRegistry = new IndicesQueriesRegistry(); - QueryParser parser = MatchAllQueryBuilder::fromXContent; - indicesQueriesRegistry.register(parser, MatchAllQueryBuilder.NAME); - } - - @AfterClass - public static void afterClass() throws Exception { - indicesQueriesRegistry = null; - } private static SearchAfterBuilder randomSearchAfterBuilder() throws IOException { int numSearchFrom = randomIntBetween(1, 10); @@ -189,7 +168,7 @@ public class SearchAfterBuilderTests extends ESTestCase { searchAfterBuilder.innerToXContent(builder); builder.endObject(); XContentParser parser = createParser(shuffleXContent(builder)); - new QueryParseContext(indicesQueriesRegistry, parser, ParseFieldMatcher.STRICT); + new QueryParseContext(parser, ParseFieldMatcher.STRICT); parser.nextToken(); parser.nextToken(); parser.nextToken(); diff --git a/core/src/test/java/org/elasticsearch/search/slice/SliceBuilderTests.java b/core/src/test/java/org/elasticsearch/search/slice/SliceBuilderTests.java index 52bc3bd4633..a51fc8d412f 100644 --- a/core/src/test/java/org/elasticsearch/search/slice/SliceBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/slice/SliceBuilderTests.java @@ -39,14 +39,9 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.fielddata.IndexNumericFieldData; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.UidFieldMapper; -import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.index.query.QueryParser; import org.elasticsearch.index.query.QueryShardContext; -import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.test.ESTestCase; -import org.junit.AfterClass; -import org.junit.BeforeClass; import java.io.IOException; import java.util.ArrayList; @@ -66,22 +61,6 @@ import static org.mockito.Mockito.when; public class SliceBuilderTests extends ESTestCase { private static final int MAX_SLICE = 20; - private static IndicesQueriesRegistry indicesQueriesRegistry; - - /** - * setup for the whole base test class - */ - @BeforeClass - public static void init() { - indicesQueriesRegistry = new IndicesQueriesRegistry(); - QueryParser parser = MatchAllQueryBuilder::fromXContent; - indicesQueriesRegistry.register(parser, MatchAllQueryBuilder.NAME); - } - - @AfterClass - public static void afterClass() throws Exception { - indicesQueriesRegistry = null; - } private static SliceBuilder randomSliceBuilder() throws IOException { int max = randomIntBetween(2, MAX_SLICE); @@ -125,8 +104,7 @@ public class SliceBuilderTests extends ESTestCase { sliceBuilder.innerToXContent(builder); builder.endObject(); XContentParser parser = createParser(shuffleXContent(builder)); - QueryParseContext context = new QueryParseContext(indicesQueriesRegistry, parser, - ParseFieldMatcher.STRICT); + QueryParseContext context = new QueryParseContext(parser, ParseFieldMatcher.STRICT); SliceBuilder secondSliceBuilder = SliceBuilder.fromXContent(context); assertNotSame(sliceBuilder, secondSliceBuilder); assertEquals(sliceBuilder, secondSliceBuilder); diff --git a/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java b/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java index 63e65a91062..683d5d78c9e 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java @@ -26,6 +26,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -50,7 +51,6 @@ import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache; -import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.script.CompiledScript; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptContext; @@ -77,11 +77,11 @@ import static java.util.Collections.emptyList; import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode; public abstract class AbstractSortTestCase> extends ESTestCase { + private static final int NUMBER_OF_TESTBUILDERS = 20; protected static NamedWriteableRegistry namedWriteableRegistry; - private static final int NUMBER_OF_TESTBUILDERS = 20; - static IndicesQueriesRegistry indicesQueriesRegistry; + private static NamedXContentRegistry xContentRegistry; private static ScriptService scriptService; @BeforeClass @@ -105,13 +105,13 @@ public abstract class AbstractSortTestCase> extends EST SearchModule searchModule = new SearchModule(Settings.EMPTY, false, emptyList()); namedWriteableRegistry = new NamedWriteableRegistry(searchModule.getNamedWriteables()); - indicesQueriesRegistry = searchModule.getQueryParserRegistry(); + xContentRegistry = new NamedXContentRegistry(searchModule.getNamedXContents()); } @AfterClass public static void afterClass() throws Exception { namedWriteableRegistry = null; - indicesQueriesRegistry = null; + xContentRegistry = null; } /** Returns random sort that is put under test */ @@ -146,7 +146,7 @@ public abstract class AbstractSortTestCase> extends EST String elementName = itemParser.currentName(); itemParser.nextToken(); - QueryParseContext context = new QueryParseContext(indicesQueriesRegistry, itemParser, ParseFieldMatcher.STRICT); + QueryParseContext context = new QueryParseContext(itemParser, ParseFieldMatcher.STRICT); T parsedItem = fromXContent(context, elementName); assertNotSame(testItem, parsedItem); assertEquals(testItem, parsedItem); @@ -210,7 +210,7 @@ public abstract class AbstractSortTestCase> extends EST }); long nowInMillis = randomPositiveLong(); return new QueryShardContext(0, idxSettings, bitsetFilterCache, ifds, null, null, scriptService, - xContentRegistry(), indicesQueriesRegistry, null, null, () -> nowInMillis) { + xContentRegistry(), null, null, () -> nowInMillis) { @Override public MappedFieldType fieldMapper(String name) { return provideMappedFieldType(name); @@ -235,6 +235,11 @@ public abstract class AbstractSortTestCase> extends EST return doubleFieldType; } + @Override + protected NamedXContentRegistry xContentRegistry() { + return xContentRegistry; + } + protected static QueryBuilder randomNestedFilter() { int id = randomIntBetween(0, 2); switch(id) { diff --git a/core/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java b/core/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java index 60b9f14c368..bb9761c0f26 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java @@ -134,7 +134,7 @@ public class FieldSortBuilderTests extends AbstractSortTestCase GeoDistanceSortBuilder.fromXContent(context, "")); assertEquals("sort_mode [sum] isn't supported for sorting by geo distance", e.getMessage()); @@ -335,7 +334,7 @@ public class GeoDistanceSortBuilderTests extends AbstractSortTestCase ScriptSortBuilder.fromXContent(context, null)); assertEquals("[_script] unknown field [bad_field], parser not found", e.getMessage()); } @@ -231,7 +231,7 @@ public class ScriptSortBuilderTests extends AbstractSortTestCase ScriptSortBuilder.fromXContent(context, null)); assertEquals("[_script] unknown field [bad_field], parser not found", e.getMessage()); } @@ -243,7 +243,7 @@ public class ScriptSortBuilderTests extends AbstractSortTestCase ScriptSortBuilder.fromXContent(context, null)); assertEquals("[_script] script doesn't support values of type: START_ARRAY", e.getMessage()); } diff --git a/core/src/test/java/org/elasticsearch/search/sort/SortBuilderTests.java b/core/src/test/java/org/elasticsearch/search/sort/SortBuilderTests.java index d31dee0b3d2..b521060f27e 100644 --- a/core/src/test/java/org/elasticsearch/search/sort/SortBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/search/sort/SortBuilderTests.java @@ -21,15 +21,14 @@ package org.elasticsearch.search.sort; import org.elasticsearch.common.ParseFieldMatcher; import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.search.SearchModule; import org.elasticsearch.test.ESTestCase; import org.junit.AfterClass; @@ -43,24 +42,19 @@ import java.util.List; import static java.util.Collections.emptyList; public class SortBuilderTests extends ESTestCase { - private static final int NUMBER_OF_RUNS = 20; - protected static NamedWriteableRegistry namedWriteableRegistry; - - static IndicesQueriesRegistry indicesQueriesRegistry; + private static NamedXContentRegistry xContentRegistry; @BeforeClass public static void init() { SearchModule searchModule = new SearchModule(Settings.EMPTY, false, emptyList()); - namedWriteableRegistry = new NamedWriteableRegistry(searchModule.getNamedWriteables()); - indicesQueriesRegistry = searchModule.getQueryParserRegistry(); + xContentRegistry = new NamedXContentRegistry(searchModule.getNamedXContents()); } @AfterClass public static void afterClass() throws Exception { - namedWriteableRegistry = null; - indicesQueriesRegistry = null; + xContentRegistry = null; } /** @@ -237,9 +231,14 @@ public class SortBuilderTests extends ESTestCase { assertEquals(new ScoreSortBuilder(), result.get(5)); } + @Override + protected NamedXContentRegistry xContentRegistry() { + return xContentRegistry; + } + private List> parseSort(String jsonString) throws IOException { XContentParser itemParser = createParser(JsonXContent.jsonXContent, jsonString); - QueryParseContext context = new QueryParseContext(indicesQueriesRegistry, itemParser, ParseFieldMatcher.STRICT); + QueryParseContext context = new QueryParseContext(itemParser, ParseFieldMatcher.STRICT); assertEquals(XContentParser.Token.START_OBJECT, itemParser.nextToken()); assertEquals(XContentParser.Token.FIELD_NAME, itemParser.nextToken()); diff --git a/core/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java b/core/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java index 0f93e6a36e5..f75b73a5138 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java @@ -29,7 +29,6 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.search.SearchModule; import org.elasticsearch.test.ESTestCase; import org.junit.AfterClass; @@ -44,7 +43,6 @@ public abstract class AbstractSuggestionBuilderTestCase extends ESTest toXContent.toXContent(builder, ToXContent.EMPTY_PARAMS); XContentParser parser = createParser(builder); parser.nextToken(); - QC fromXContext = fromXContent(new QueryParseContext(new IndicesQueriesRegistry(), parser, ParseFieldMatcher.STRICT)); + QC fromXContext = fromXContent(new QueryParseContext(parser, ParseFieldMatcher.STRICT)); assertEquals(toXContent, fromXContext); assertEquals(toXContent.hashCode(), fromXContext.hashCode()); } diff --git a/core/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java b/core/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java index 27ec964aa4b..eab93d9fbdf 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java @@ -30,7 +30,6 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.search.suggest.phrase.PhraseSuggestionContext.DirectCandidateGenerator; import org.elasticsearch.test.ESTestCase; @@ -42,13 +41,9 @@ import java.util.function.Supplier; import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode; -public class DirectCandidateGeneratorTests extends ESTestCase{ - - private static final IndicesQueriesRegistry mockRegistry = new IndicesQueriesRegistry(); +public class DirectCandidateGeneratorTests extends ESTestCase { private static final int NUMBER_OF_RUNS = 20; - - /** * Test serialization and deserialization of the generator */ @@ -113,7 +108,7 @@ public class DirectCandidateGeneratorTests extends ESTestCase{ } generator.toXContent(builder, ToXContent.EMPTY_PARAMS); XContentParser parser = createParser(shuffleXContent(builder)); - QueryParseContext context = new QueryParseContext(mockRegistry, parser, ParseFieldMatcher.STRICT); + QueryParseContext context = new QueryParseContext(parser, ParseFieldMatcher.STRICT); parser.nextToken(); DirectCandidateGeneratorBuilder secondGenerator = DirectCandidateGeneratorBuilder.fromXContent(context); assertNotSame(generator, secondGenerator); @@ -177,7 +172,7 @@ public class DirectCandidateGeneratorTests extends ESTestCase{ private void assertIllegalXContent(String directGenerator, Class exceptionClass, String exceptionMsg) throws IOException { XContentParser parser = createParser(JsonXContent.jsonXContent, directGenerator); - QueryParseContext context = new QueryParseContext(mockRegistry, parser, ParseFieldMatcher.STRICT); + QueryParseContext context = new QueryParseContext(parser, ParseFieldMatcher.STRICT); Exception e = expectThrows(exceptionClass, () -> DirectCandidateGeneratorBuilder.fromXContent(context)); assertEquals(exceptionMsg, e.getMessage()); } diff --git a/core/src/test/java/org/elasticsearch/search/suggest/phrase/SmoothingModelTestCase.java b/core/src/test/java/org/elasticsearch/search/suggest/phrase/SmoothingModelTestCase.java index 044982540a3..b8289c70062 100644 --- a/core/src/test/java/org/elasticsearch/search/suggest/phrase/SmoothingModelTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/suggest/phrase/SmoothingModelTestCase.java @@ -39,7 +39,6 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.query.QueryParseContext; -import org.elasticsearch.indices.query.IndicesQueriesRegistry; import org.elasticsearch.search.SearchModule; import org.elasticsearch.test.ESTestCase; import org.junit.AfterClass; @@ -99,7 +98,7 @@ public abstract class SmoothingModelTestCase extends ESTestCase { testModel.innerToXContent(contentBuilder, ToXContent.EMPTY_PARAMS); contentBuilder.endObject(); XContentParser parser = createParser(shuffleXContent(contentBuilder)); - QueryParseContext context = new QueryParseContext(new IndicesQueriesRegistry(), parser, ParseFieldMatcher.STRICT); + QueryParseContext context = new QueryParseContext(parser, ParseFieldMatcher.STRICT); parser.nextToken(); // go to start token, real parsing would do that in the outer element parser SmoothingModel parsedModel = fromXContent(context); assertNotSame(testModel, parsedModel); diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportSearchTemplateAction.java index 0342747b6b9..2ad36f3d1dc 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportSearchTemplateAction.java @@ -88,7 +88,7 @@ public class TransportSearchTemplateAction extends HandledTransportAction listener) { SearchRequest searchRequest; try { - searchRequest = createSearchRequest(request, docSource, searchRequestParsers.queryParsers, + searchRequest = createSearchRequest(request, docSource, searchRequestParsers.aggParsers, searchRequestParsers.searchExtParsers, xContentRegistry, parseFieldMatcher); } catch (IOException e) { listener.onFailure(e); @@ -131,7 +130,7 @@ public class TransportPercolateAction extends HandledTransportAction> * @return a new {@link QueryParseContext} based on the base test index and queryParserService */ protected static QueryParseContext createParseContext(XContentParser parser, ParseFieldMatcher matcher) { - return new QueryParseContext(serviceHolder.indicesQueriesRegistry, parser, matcher); + return new QueryParseContext(parser, matcher); } /** @@ -1013,8 +1012,6 @@ public abstract class AbstractQueryTestCase> } private static class ServiceHolder implements Closeable { - - private final IndicesQueriesRegistry indicesQueriesRegistry; private final IndexFieldDataService indexFieldDataService; private final SearchModule searchModule; private final NamedWriteableRegistry namedWriteableRegistry; @@ -1075,7 +1072,6 @@ public abstract class AbstractQueryTestCase> } }); - indicesQueriesRegistry = searchModule.getQueryParserRegistry(); for (String type : currentTypes) { mapperService.merge(type, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(type, @@ -1106,7 +1102,7 @@ public abstract class AbstractQueryTestCase> QueryShardContext createShardContext() { return new QueryShardContext(0, idxSettings, bitsetFilterCache, indexFieldDataService, mapperService, similarityService, - scriptService, xContentRegistry, indicesQueriesRegistry, this.client, null, () -> nowInMillis); + scriptService, xContentRegistry, this.client, null, () -> nowInMillis); } ScriptModule createScriptModule(List scriptPlugins) { diff --git a/test/framework/src/test/java/org/elasticsearch/search/MockSearchServiceTests.java b/test/framework/src/test/java/org/elasticsearch/search/MockSearchServiceTests.java index b000c42d41a..2542e1c374b 100644 --- a/test/framework/src/test/java/org/elasticsearch/search/MockSearchServiceTests.java +++ b/test/framework/src/test/java/org/elasticsearch/search/MockSearchServiceTests.java @@ -35,7 +35,7 @@ public class MockSearchServiceTests extends ESTestCase { public void testAssertNoInFlightContext() { final long nowInMillis = randomPositiveLong(); SearchContext s = new TestSearchContext(new QueryShardContext(0, new IndexSettings(IndexMetaData.PROTO, Settings.EMPTY), null, null, - null, null, null, xContentRegistry(), null, null, null, () -> nowInMillis)) { + null, null, null, xContentRegistry(), null, null, () -> nowInMillis)) { @Override public SearchShardTarget shardTarget() { return new SearchShardTarget("node", new Index("idx", "ignored"), 0); From 0e9186e1373b99e261d80317c836477b7cc31d66 Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Wed, 21 Dec 2016 15:09:58 +0100 Subject: [PATCH 14/21] Simplify Unicast Zen Ping (#22277) The `UnicastZenPing` shows it's age and is the result of many small changes. The current state of affairs is confusing and is hard to reason about. This PR cleans it up (while following the same original intentions). Highlights of the changes are: 1) Clear 3 round flow - no interleaving of scheduling. 2) The previous implementation did a best effort attempt to wait for ongoing pings to be sent and completed. The pings were guaranteed to complete because each used the total ping duration as a timeout. This did make it hard to reason about the total ping duration and the flow of the code. All of this is removed now and ping should just complete within the given duration or not be counted (note that it was very handy for testing, but I move the needed sync logic to the test). 3) Because of (2) the pinging scheduling changed a bit, to give a chance for the last round to complete. We now ping at the beginning, 1/3 and 2/3 of the duration. 4) To offset for (3) a bit, incoming ping requests are now added to on going ping collections. 5) UnicastZenPing never establishes full blown connections (but does reuse them if there). Relates to #22120 6) Discovery host providers are only used once per pinging round. Closes #21739 7) Usage of the ability to open a connection without connecting to a node ( #22194 ) and shorter connection timeouts helps with connections piling up. Closes #19370 8) Beefed up testing and sped them up. 9) removed light profile from production code --- .../TransportClientNodesService.java | 21 +- .../discovery/zen/ElectMasterService.java | 3 +- .../discovery/zen/UnicastZenPing.java | 530 +++++++++--------- .../discovery/zen/ZenDiscovery.java | 24 +- .../elasticsearch/discovery/zen/ZenPing.java | 21 +- .../transport/ConnectionProfile.java | 28 +- .../elasticsearch/transport/Transport.java | 3 +- .../transport/TransportService.java | 27 - .../discovery/zen/UnicastZenPingTests.java | 446 ++++++++++++--- .../discovery/zen/ZenPingTests.java | 2 +- .../transport/TCPTransportTests.java | 4 +- .../TransportServiceHandshakeTests.java | 16 +- .../file/FileBasedUnicastHostsProvider.java | 6 +- .../FileBasedUnicastHostsProviderTests.java | 9 +- .../test/discovery/MockZenPing.java | 12 +- .../AbstractSimpleTransportTestCase.java | 8 +- .../transport/MockTcpTransport.java | 19 +- 17 files changed, 720 insertions(+), 459 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java b/core/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java index 350a35b6e49..b8600465b0e 100644 --- a/core/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java +++ b/core/src/main/java/org/elasticsearch/client/transport/TransportClientNodesService.java @@ -101,6 +101,21 @@ final class TransportClientNodesService extends AbstractComponent implements Clo private final TransportClient.HostFailureListener hostFailureListener; + // TODO: migrate this to use low level connections and single type channels + /** {@link ConnectionProfile} to use when to connecting to the listed nodes and doing a liveness check */ + private static final ConnectionProfile LISTED_NODES_PROFILE; + + static { + ConnectionProfile.Builder builder = new ConnectionProfile.Builder(); + builder.addConnections(1, + TransportRequestOptions.Type.BULK, + TransportRequestOptions.Type.PING, + TransportRequestOptions.Type.RECOVERY, + TransportRequestOptions.Type.REG, + TransportRequestOptions.Type.STATE); + LISTED_NODES_PROFILE = builder.build(); + } + TransportClientNodesService(Settings settings, TransportService transportService, ThreadPool threadPool, TransportClient.HostFailureListener hostFailureListener) { super(settings); @@ -389,8 +404,8 @@ final class TransportClientNodesService extends AbstractComponent implements Clo if (!transportService.nodeConnected(listedNode)) { try { // its a listed node, light connect to it... - logger.trace("connecting to listed node (light) [{}]", listedNode); - transportService.connectToNode(listedNode, ConnectionProfile.LIGHT_PROFILE); + logger.trace("connecting to listed node [{}]", listedNode); + transportService.connectToNode(listedNode, LISTED_NODES_PROFILE); } catch (Exception e) { logger.info( (Supplier) @@ -470,7 +485,7 @@ final class TransportClientNodesService extends AbstractComponent implements Clo } else { // its a listed node, light connect to it... logger.trace("connecting to listed node (light) [{}]", listedNode); - transportService.connectToNode(listedNode, ConnectionProfile.LIGHT_PROFILE); + transportService.connectToNode(listedNode, LISTED_NODES_PROFILE); } } catch (Exception e) { logger.debug( diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/ElectMasterService.java b/core/src/main/java/org/elasticsearch/discovery/zen/ElectMasterService.java index 7116597bdaf..f0de15da9ae 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/ElectMasterService.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/ElectMasterService.java @@ -24,7 +24,6 @@ import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.component.AbstractComponent; -import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; @@ -174,7 +173,7 @@ public class ElectMasterService extends AbstractComponent { * Returns the given nodes sorted by likelihood of being elected as master, most likely first. * Non-master nodes are not removed but are rather put in the end */ - public static List sortByMasterLikelihood(Iterable nodes) { + static List sortByMasterLikelihood(Iterable nodes) { ArrayList sortedNodes = CollectionUtils.iterableAsArrayList(nodes); CollectionUtil.introSort(sortedNodes, ElectMasterService::compareNodes); return sortedNodes; diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/UnicastZenPing.java b/core/src/main/java/org/elasticsearch/discovery/zen/UnicastZenPing.java index 4a6006c1a04..8018c47b4e1 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/UnicastZenPing.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/UnicastZenPing.java @@ -23,13 +23,12 @@ import com.carrotsearch.hppc.cursors.ObjectCursor; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; -import org.elasticsearch.ElasticsearchException; +import org.apache.lucene.store.AlreadyClosedException; +import org.apache.lucene.util.IOUtils; import org.elasticsearch.Version; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -44,10 +43,14 @@ import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import org.elasticsearch.common.util.concurrent.EsThreadPoolExecutor; +import org.elasticsearch.common.util.concurrent.KeyedLock; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.ConnectTransportException; +import org.elasticsearch.transport.ConnectionProfile; +import org.elasticsearch.transport.NodeNotConnectedException; import org.elasticsearch.transport.RemoteTransportException; +import org.elasticsearch.transport.Transport.Connection; import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportException; import org.elasticsearch.transport.TransportRequest; @@ -60,8 +63,8 @@ import org.elasticsearch.transport.TransportService; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; -import java.util.Collection; -import java.util.HashSet; +import java.util.Collections; +import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Locale; @@ -70,18 +73,17 @@ import java.util.Objects; import java.util.Queue; import java.util.Set; import java.util.concurrent.Callable; -import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Future; -import java.util.concurrent.RejectedExecutionException; import java.util.concurrent.ThreadFactory; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; -import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Consumer; import java.util.function.Function; import java.util.stream.Collectors; +import java.util.stream.Stream; import static java.util.Collections.emptyList; import static java.util.Collections.emptyMap; @@ -116,22 +118,19 @@ public class UnicastZenPing extends AbstractComponent implements ZenPing { private volatile PingContextProvider contextProvider; - private final AtomicInteger pingHandlerIdGenerator = new AtomicInteger(); + private final AtomicInteger pingingRoundIdGenerator = new AtomicInteger(); - // used to generate unique ids for nodes/address we temporarily connect to - private final AtomicInteger unicastNodeIdGenerator = new AtomicInteger(); - - // used as a node id prefix for nodes/address we temporarily connect to + // used as a node id prefix for configured unicast host nodes/address private static final String UNICAST_NODE_PREFIX = "#zen_unicast_"; - private final Map receivedResponses = newConcurrentMap(); + private final Map activePingingRounds = newConcurrentMap(); // a list of temporal responses a node will return for a request (holds responses from other nodes) private final Queue temporalResponses = ConcurrentCollections.newQueue(); private final UnicastHostsProvider hostsProvider; - private final ExecutorService unicastZenPingExecutorService; + protected final EsThreadPoolExecutor unicastZenPingExecutorService; private final TimeValue resolveTimeout; @@ -146,15 +145,14 @@ public class UnicastZenPing extends AbstractComponent implements ZenPing { this.hostsProvider = unicastHostsProvider; this.concurrentConnects = DISCOVERY_ZEN_PING_UNICAST_CONCURRENT_CONNECTS_SETTING.get(settings); - final List hosts = DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING.get(settings); - if (hosts.isEmpty()) { + if (DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING.exists(settings)) { + configuredHosts = DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING.get(settings); + // we only limit to 1 addresses, makes no sense to ping 100 ports + limitPortCounts = LIMIT_FOREIGN_PORTS_COUNT; + } else { // if unicast hosts are not specified, fill with simple defaults on the local machine configuredHosts = transportService.getLocalAddresses(); limitPortCounts = LIMIT_LOCAL_PORTS_COUNT; - } else { - configuredHosts = hosts; - // we only limit to 1 addresses, makes no sense to ping 100 ports - limitPortCounts = LIMIT_FOREIGN_PORTS_COUNT; } resolveTimeout = DISCOVERY_ZEN_PING_UNICAST_HOSTS_RESOLVE_TIMEOUT.get(settings); logger.debug( @@ -164,7 +162,7 @@ public class UnicastZenPing extends AbstractComponent implements ZenPing { resolveTimeout); transportService.registerRequestHandler(ACTION_NAME, UnicastPingRequest::new, ThreadPool.Names.SAME, - new UnicastPingRequestHandler()); + new UnicastPingRequestHandler()); final ThreadFactory threadFactory = EsExecutors.daemonThreadFactory(settings, "[unicast_connect]"); unicastZenPingExecutorService = EsExecutors.newScaling( @@ -186,23 +184,23 @@ public class UnicastZenPing extends AbstractComponent implements ZenPing { * @param hosts the hosts to resolve * @param limitPortCounts the number of ports to resolve (should be 1 for non-local transport) * @param transportService the transport service - * @param idGenerator the generator to supply unique ids for each discovery node + * @param nodeId_prefix a prefix to use for node ids * @param resolveTimeout the timeout before returning from hostname lookups * @return a list of discovery nodes with resolved transport addresses */ - public static List resolveDiscoveryNodes( + public static List resolveHostsLists( final ExecutorService executorService, final Logger logger, final List hosts, final int limitPortCounts, final TransportService transportService, - final Supplier idGenerator, + final String nodeId_prefix, final TimeValue resolveTimeout) throws InterruptedException { Objects.requireNonNull(executorService); Objects.requireNonNull(logger); Objects.requireNonNull(hosts); Objects.requireNonNull(transportService); - Objects.requireNonNull(idGenerator); + Objects.requireNonNull(nodeId_prefix); Objects.requireNonNull(resolveTimeout); if (resolveTimeout.nanos() < 0) { throw new IllegalArgumentException("resolve timeout must be non-negative but was [" + resolveTimeout + "]"); @@ -211,7 +209,7 @@ public class UnicastZenPing extends AbstractComponent implements ZenPing { final List> callables = hosts .stream() - .map(hn -> (Callable)() -> transportService.addressesFromString(hn, limitPortCounts)) + .map(hn -> (Callable) () -> transportService.addressesFromString(hn, limitPortCounts)) .collect(Collectors.toList()); final List> futures = executorService.invokeAll(callables, resolveTimeout.nanos(), TimeUnit.NANOSECONDS); @@ -226,11 +224,11 @@ public class UnicastZenPing extends AbstractComponent implements ZenPing { try { final TransportAddress[] addresses = future.get(); logger.trace("resolved host [{}] to {}", hostname, addresses); - for (final TransportAddress address : addresses) { + for (int addressId = 0; addressId < addresses.length; addressId++) { discoveryNodes.add( new DiscoveryNode( - idGenerator.get(), - address, + nodeId_prefix + hostname + "_" + addressId + "#", + addresses[addressId], emptyMap(), emptySet(), Version.CURRENT.minimumCompatibilityVersion())); @@ -249,8 +247,8 @@ public class UnicastZenPing extends AbstractComponent implements ZenPing { @Override public void close() { - ThreadPool.terminate(unicastZenPingExecutorService, 0, TimeUnit.SECONDS); - Releasables.close(receivedResponses.values()); + ThreadPool.terminate(unicastZenPingExecutorService, 10, TimeUnit.SECONDS); + Releasables.close(activePingingRounds.values()); closed = true; } @@ -266,106 +264,106 @@ public class UnicastZenPing extends AbstractComponent implements ZenPing { temporalResponses.clear(); } - // test only - Collection pingAndWait(TimeValue duration) { - final AtomicReference> response = new AtomicReference<>(); - final CountDownLatch latch = new CountDownLatch(1); - ping(pings -> { - response.set(pings); - latch.countDown(); - }, duration); - try { - latch.await(); - return response.get(); - } catch (InterruptedException e) { - return null; - } - } - /** - * Sends three rounds of pings notifying the specified {@link PingListener} when pinging is complete. Pings are sent after resolving + * Sends three rounds of pings notifying the specified {@link Consumer} when pinging is complete. Pings are sent after resolving * configured unicast hosts to their IP address (subject to DNS caching within the JVM). A batch of pings is sent, then another batch * of pings is sent at half the specified {@link TimeValue}, and then another batch of pings is sent at the specified {@link TimeValue}. * The pings that are sent carry a timeout of 1.25 times the specified {@link TimeValue}. When pinging each node, a connection and * handshake is performed, with a connection timeout of the specified {@link TimeValue}. * - * @param listener the callback when pinging is complete - * @param duration the timeout for various components of the pings + * @param resultsConsumer the callback when pinging is complete + * @param duration the timeout for various components of the pings */ @Override - public void ping(final PingListener listener, final TimeValue duration) { - final List resolvedDiscoveryNodes; + public void ping(final Consumer resultsConsumer, final TimeValue duration) { + ping(resultsConsumer, duration, duration); + } + + /** + * a variant of {@link #ping(Consumer, TimeValue)}, but allows separating the scheduling duration + * from the duration used for request level time outs. This is useful for testing + */ + protected void ping(final Consumer resultsConsumer, + final TimeValue scheduleDuration, + final TimeValue requestDuration) { + final List seedNodes; try { - resolvedDiscoveryNodes = resolveDiscoveryNodes( + seedNodes = resolveHostsLists( unicastZenPingExecutorService, logger, configuredHosts, limitPortCounts, transportService, - () -> UNICAST_NODE_PREFIX + unicastNodeIdGenerator.incrementAndGet() + "#", + UNICAST_NODE_PREFIX, resolveTimeout); } catch (InterruptedException e) { throw new RuntimeException(e); } - final SendPingsHandler sendPingsHandler = new SendPingsHandler(pingHandlerIdGenerator.incrementAndGet()); - try { - receivedResponses.put(sendPingsHandler.id(), sendPingsHandler); - try { - sendPings(duration, null, sendPingsHandler, resolvedDiscoveryNodes); - } catch (RejectedExecutionException e) { - logger.debug("Ping execution rejected", e); - // The RejectedExecutionException can come from the fact unicastZenPingExecutorService is at its max down in sendPings - // But don't bail here, we can retry later on after the send ping has been scheduled. + seedNodes.addAll(hostsProvider.buildDynamicNodes()); + final DiscoveryNodes nodes = contextProvider.nodes(); + // add all possible master nodes that were active in the last known cluster configuration + for (ObjectCursor masterNode : nodes.getMasterNodes().values()) { + seedNodes.add(masterNode.value); + } + + final ConnectionProfile connectionProfile = + ConnectionProfile.buildSingleChannelProfile(TransportRequestOptions.Type.REG, requestDuration, requestDuration); + final PingingRound pingingRound = new PingingRound(pingingRoundIdGenerator.incrementAndGet(), seedNodes, resultsConsumer, + nodes.getLocalNode(), connectionProfile); + activePingingRounds.put(pingingRound.id(), pingingRound); + final AbstractRunnable pingSender = new AbstractRunnable() { + @Override + public void onFailure(Exception e) { + if (e instanceof AlreadyClosedException == false) { + logger.warn("unexpected error while pinging", e); + } } - threadPool.schedule(TimeValue.timeValueMillis(duration.millis() / 2), ThreadPool.Names.GENERIC, new AbstractRunnable() { - @Override - protected void doRun() { - sendPings(duration, null, sendPingsHandler, resolvedDiscoveryNodes); - threadPool.schedule(TimeValue.timeValueMillis(duration.millis() / 2), ThreadPool.Names.GENERIC, new AbstractRunnable() { - @Override - protected void doRun() throws Exception { - sendPings(duration, TimeValue.timeValueMillis(duration.millis() / 2), sendPingsHandler, resolvedDiscoveryNodes); - sendPingsHandler.close(); - listener.onPing(sendPingsHandler.pingCollection().toList()); - for (DiscoveryNode node : sendPingsHandler.nodeToDisconnect) { - logger.trace("[{}] disconnecting from {}", sendPingsHandler.id(), node); - transportService.disconnectFromNode(node); - } - } + @Override + protected void doRun() throws Exception { + sendPings(requestDuration, pingingRound); + } + }; + threadPool.generic().execute(pingSender); + threadPool.schedule(TimeValue.timeValueMillis(scheduleDuration.millis() / 3), ThreadPool.Names.GENERIC, pingSender); + threadPool.schedule(TimeValue.timeValueMillis(scheduleDuration.millis() / 3 * 2), ThreadPool.Names.GENERIC, pingSender); + threadPool.schedule(scheduleDuration, ThreadPool.Names.GENERIC, new AbstractRunnable() { + @Override + protected void doRun() throws Exception { + finishPingingRound(pingingRound); + } - @Override - public void onFailure(Exception e) { - logger.debug("Ping execution failed", e); - sendPingsHandler.close(); - } - }); - } - - @Override - public void onFailure(Exception e) { - logger.debug("Ping execution failed", e); - sendPingsHandler.close(); - } - }); - } catch (EsRejectedExecutionException ex) { // TODO: remove this once ScheduledExecutor has support for AbstractRunnable - sendPingsHandler.close(); - // we are shutting down - } catch (Exception e) { - sendPingsHandler.close(); - throw new ElasticsearchException("Ping execution failed", e); - } + @Override + public void onFailure(Exception e) { + logger.warn("unexpected error while finishing pinging round", e); + } + }); } - class SendPingsHandler implements Releasable { + // for testing + protected void finishPingingRound(PingingRound pingingRound) { + pingingRound.close(); + } + + protected class PingingRound implements Releasable { private final int id; - private final Set nodeToDisconnect = ConcurrentCollections.newConcurrentSet(); + private final Map tempConnections = new HashMap<>(); + private final KeyedLock connectionLock = new KeyedLock<>(true); private final PingCollection pingCollection; + private final List seedNodes; + private final Consumer pingListener; + private final DiscoveryNode localNode; + private final ConnectionProfile connectionProfile; private AtomicBoolean closed = new AtomicBoolean(false); - SendPingsHandler(int id) { + PingingRound(int id, List seedNodes, Consumer resultsConsumer, DiscoveryNode localNode, + ConnectionProfile connectionProfile) { this.id = id; + this.seedNodes = Collections.unmodifiableList(new ArrayList<>(seedNodes)); + this.pingListener = resultsConsumer; + this.localNode = localNode; + this.connectionProfile = connectionProfile; this.pingCollection = new PingCollection(); } @@ -377,154 +375,170 @@ public class UnicastZenPing extends AbstractComponent implements ZenPing { return this.closed.get(); } - public PingCollection pingCollection() { - return pingCollection; + public List getSeedNodes() { + ensureOpen(); + return seedNodes; + } + + public Connection getOrConnect(DiscoveryNode node) throws IOException { + Connection result; + try (Releasable ignore = connectionLock.acquire(node.getAddress())) { + result = tempConnections.get(node.getAddress()); + if (result == null) { + boolean success = false; + result = transportService.openConnection(node, connectionProfile); + try { + transportService.handshake(result, connectionProfile.getHandshakeTimeout().millis()); + synchronized (this) { + // acquire lock to prevent concurrent closing + Connection existing = tempConnections.put(node.getAddress(), result); + assert existing == null; + success = true; + } + } finally { + if (success == false) { + IOUtils.closeWhileHandlingException(result); + } + } + } + } + return result; + } + + private void ensureOpen() { + if (isClosed()) { + throw new AlreadyClosedException("pinging round [" + id + "] is finished"); + } + } + + public void addPingResponseToCollection(PingResponse pingResponse) { + if (localNode.equals(pingResponse.node()) == false) { + pingCollection.addPing(pingResponse); + } } @Override public void close() { - if (closed.compareAndSet(false, true)) { - receivedResponses.remove(id); + List toClose = null; + synchronized (this) { + if (closed.compareAndSet(false, true)) { + activePingingRounds.remove(id); + toClose = new ArrayList<>(tempConnections.values()); + tempConnections.clear(); + } } + if (toClose != null) { + // we actually closed + try { + pingListener.accept(pingCollection); + } finally { + IOUtils.closeWhileHandlingException(toClose); + } + } + } + + public ConnectionProfile getConnectionProfile() { + return connectionProfile; } } - void sendPings( - final TimeValue timeout, - @Nullable TimeValue waitTime, - final SendPingsHandler sendPingsHandler, - final List resolvedDiscoveryNodes) { + protected void sendPings(final TimeValue timeout, final PingingRound pingingRound) { final UnicastPingRequest pingRequest = new UnicastPingRequest(); - pingRequest.id = sendPingsHandler.id(); + pingRequest.id = pingingRound.id(); pingRequest.timeout = timeout; DiscoveryNodes discoNodes = contextProvider.nodes(); pingRequest.pingResponse = createPingResponse(discoNodes); - HashSet nodesToPingSet = new HashSet<>(); - for (PingResponse temporalResponse : temporalResponses) { - // Only send pings to nodes that have the same cluster name. - if (clusterName.equals(temporalResponse.clusterName())) { - nodesToPingSet.add(temporalResponse.node()); - } - } - nodesToPingSet.addAll(hostsProvider.buildDynamicNodes()); + Set nodesFromResponses = temporalResponses.stream().map(pingResponse -> { + assert clusterName.equals(pingResponse.clusterName()) : + "got a ping request from a different cluster. expected " + clusterName + " got " + pingResponse.clusterName(); + return pingResponse.node(); + }).collect(Collectors.toSet()); - // add all possible master nodes that were active in the last known cluster configuration - for (ObjectCursor masterNode : discoNodes.getMasterNodes().values()) { - nodesToPingSet.add(masterNode.value); - } + // dedup by address + final Map uniqueNodesByAddress = + Stream.concat(pingingRound.getSeedNodes().stream(), nodesFromResponses.stream()) + .collect(Collectors.toMap(DiscoveryNode::getAddress, Function.identity(), (n1, n2) -> n1)); - // sort the nodes by likelihood of being an active master - List sortedNodesToPing = ElectMasterService.sortByMasterLikelihood(nodesToPingSet); - // add the configured hosts first - final List nodesToPing = new ArrayList<>(resolvedDiscoveryNodes.size() + sortedNodesToPing.size()); - nodesToPing.addAll(resolvedDiscoveryNodes); - nodesToPing.addAll(sortedNodesToPing); - - final CountDownLatch latch = new CountDownLatch(nodesToPing.size()); - for (final DiscoveryNode node : nodesToPing) { - // make sure we are connected - final boolean nodeFoundByAddress; - DiscoveryNode nodeToSend = discoNodes.findByAddress(node.getAddress()); - if (nodeToSend != null) { - nodeFoundByAddress = true; - } else { - nodeToSend = node; - nodeFoundByAddress = false; - } - - if (!transportService.nodeConnected(nodeToSend)) { - if (sendPingsHandler.isClosed()) { - return; + // resolve what we can via the latest cluster state + final Set nodesToPing = uniqueNodesByAddress.values().stream() + .map(node -> { + DiscoveryNode foundNode = discoNodes.findByAddress(node.getAddress()); + if (foundNode == null) { + return node; + } else { + return foundNode; } - // if we find on the disco nodes a matching node by address, we are going to restore the connection - // anyhow down the line if its not connected... - // if we can't resolve the node, we don't know and we have to clean up after pinging. We do have - // to make sure we don't disconnect a true node which was temporarily removed from the DiscoveryNodes - // but will be added again during the pinging. We therefore create a new temporary node - if (!nodeFoundByAddress) { - if (!nodeToSend.getId().startsWith(UNICAST_NODE_PREFIX)) { - DiscoveryNode tempNode = new DiscoveryNode("", - UNICAST_NODE_PREFIX + unicastNodeIdGenerator.incrementAndGet() + "_" + nodeToSend.getId() + "#", - UUIDs.randomBase64UUID(), nodeToSend.getHostName(), nodeToSend.getHostAddress(), nodeToSend.getAddress(), - nodeToSend.getAttributes(), nodeToSend.getRoles(), nodeToSend.getVersion()); + }).collect(Collectors.toSet()); - logger.trace("replacing {} with temp node {}", nodeToSend, tempNode); - nodeToSend = tempNode; - } - sendPingsHandler.nodeToDisconnect.add(nodeToSend); - } - // fork the connection to another thread - final DiscoveryNode finalNodeToSend = nodeToSend; - unicastZenPingExecutorService.execute(new Runnable() { - @Override - public void run() { - if (sendPingsHandler.isClosed()) { - return; - } - boolean success = false; - try { - // connect to the node, see if we manage to do it, if not, bail - if (!nodeFoundByAddress) { - logger.trace("[{}] connecting (light) to {}", sendPingsHandler.id(), finalNodeToSend); - transportService.connectToNodeAndHandshake(finalNodeToSend, timeout.getMillis()); - } else { - logger.trace("[{}] connecting to {}", sendPingsHandler.id(), finalNodeToSend); - transportService.connectToNode(finalNodeToSend); - } - logger.trace("[{}] connected to {}", sendPingsHandler.id(), node); - if (receivedResponses.containsKey(sendPingsHandler.id())) { - // we are connected and still in progress, send the ping request - sendPingRequestToNode(sendPingsHandler.id(), timeout, pingRequest, latch, node, finalNodeToSend); - } else { - // connect took too long, just log it and bail - latch.countDown(); - logger.trace("[{}] connect to {} was too long outside of ping window, bailing", - sendPingsHandler.id(), node); - } - success = true; - } catch (ConnectTransportException e) { - // can't connect to the node - this is a more common path! - logger.trace( - (Supplier) () -> new ParameterizedMessage( - "[{}] failed to connect to {}", sendPingsHandler.id(), finalNodeToSend), e); - } catch (RemoteTransportException e) { - // something went wrong on the other side - logger.debug( - (Supplier) () -> new ParameterizedMessage( - "[{}] received a remote error as a response to ping {}", sendPingsHandler.id(), finalNodeToSend), e); - } catch (Exception e) { - logger.warn( - (Supplier) () -> new ParameterizedMessage( - "[{}] failed send ping to {}", sendPingsHandler.id(), finalNodeToSend), e); - } finally { - if (!success) { - latch.countDown(); - } - } - } - }); - } else { - sendPingRequestToNode(sendPingsHandler.id(), timeout, pingRequest, latch, node, nodeToSend); - } - } - if (waitTime != null) { - try { - latch.await(waitTime.millis(), TimeUnit.MILLISECONDS); - } catch (InterruptedException e) { - // ignore - } - } + nodesToPing.forEach(node -> sendPingRequestToNode(node, timeout, pingingRound, pingRequest)); } - private void sendPingRequestToNode(final int id, final TimeValue timeout, final UnicastPingRequest pingRequest, - final CountDownLatch latch, final DiscoveryNode node, final DiscoveryNode nodeToSend) { - logger.trace("[{}] sending to {}", id, nodeToSend); - transportService.sendRequest(nodeToSend, ACTION_NAME, pingRequest, TransportRequestOptions.builder() - .withTimeout((long) (timeout.millis() * 1.25)).build(), new TransportResponseHandler() { + private void sendPingRequestToNode(final DiscoveryNode node, TimeValue timeout, final PingingRound pingingRound, + final UnicastPingRequest pingRequest) { + submitToExecutor(new AbstractRunnable() { + @Override + protected void doRun() throws Exception { + Connection connection = null; + if (transportService.nodeConnected(node)) { + try { + // concurrency can still cause disconnects + connection = transportService.getConnection(node); + } catch (NodeNotConnectedException e) { + logger.trace("[{}] node [{}] just disconnected, will create a temp connection", pingingRound.id(), node); + } + } + + if (connection == null) { + connection = pingingRound.getOrConnect(node); + } + + logger.trace("[{}] sending to {}", pingingRound.id(), node); + transportService.sendRequest(connection, ACTION_NAME, pingRequest, + TransportRequestOptions.builder().withTimeout((long) (timeout.millis() * 1.25)).build(), + getPingResponseHandler(pingingRound, node)); + } + + @Override + public void onFailure(Exception e) { + if (e instanceof ConnectTransportException || e instanceof AlreadyClosedException) { + // can't connect to the node - this is more common path! + logger.trace( + (Supplier) () -> new ParameterizedMessage( + "[{}] failed to ping {}", pingingRound.id(), node), e); + } else if (e instanceof RemoteTransportException) { + // something went wrong on the other side + logger.debug( + (Supplier) () -> new ParameterizedMessage( + "[{}] received a remote error as a response to ping {}", pingingRound.id(), node), e); + } else { + logger.warn( + (Supplier) () -> new ParameterizedMessage( + "[{}] failed send ping to {}", pingingRound.id(), node), e); + } + } + + @Override + public void onRejection(Exception e) { + // The RejectedExecutionException can come from the fact unicastZenPingExecutorService is at its max down in sendPings + // But don't bail here, we can retry later on after the send ping has been scheduled. + logger.debug("Ping execution rejected", e); + } + }); + } + + // for testing + protected void submitToExecutor(AbstractRunnable abstractRunnable) { + unicastZenPingExecutorService.execute(abstractRunnable); + } + + // for testing + protected TransportResponseHandler getPingResponseHandler(final PingingRound pingingRound, + final DiscoveryNode node) { + return new TransportResponseHandler() { @Override public UnicastPingResponse newInstance() { @@ -538,50 +552,36 @@ public class UnicastZenPing extends AbstractComponent implements ZenPing { @Override public void handleResponse(UnicastPingResponse response) { - logger.trace("[{}] received response from {}: {}", id, nodeToSend, Arrays.toString(response.pingResponses)); - try { - DiscoveryNodes discoveryNodes = contextProvider.nodes(); - for (PingResponse pingResponse : response.pingResponses) { - if (pingResponse.node().equals(discoveryNodes.getLocalNode())) { - // that's us, ignore - continue; - } - SendPingsHandler sendPingsHandler = receivedResponses.get(response.id); - if (sendPingsHandler == null) { - if (!closed) { - // Only log when we're not closing the node. Having no send ping handler is then expected - logger.warn("received ping response {} with no matching handler id [{}]", pingResponse, response.id); - } - } else { - sendPingsHandler.pingCollection().addPing(pingResponse); - } + logger.trace("[{}] received response from {}: {}", pingingRound.id(), node, Arrays.toString(response.pingResponses)); + if (pingingRound.isClosed()) { + if (logger.isTraceEnabled()) { + logger.trace("[{}] skipping received response from {}. already closed", pingingRound.id(), node); } - } finally { - latch.countDown(); + } else { + Stream.of(response.pingResponses).forEach(pingingRound::addPingResponseToCollection); } } @Override public void handleException(TransportException exp) { - latch.countDown(); - if (exp instanceof ConnectTransportException) { + if (exp instanceof ConnectTransportException || exp.getCause() instanceof ConnectTransportException) { // ok, not connected... - logger.trace((Supplier) () -> new ParameterizedMessage("failed to connect to {}", nodeToSend), exp); - } else { + logger.trace((Supplier) () -> new ParameterizedMessage("failed to connect to {}", node), exp); + } else if (closed == false) { logger.warn((Supplier) () -> new ParameterizedMessage("failed to send ping to [{}]", node), exp); } } - }); + }; } private UnicastPingResponse handlePingRequest(final UnicastPingRequest request) { + assert clusterName.equals(request.pingResponse.clusterName()) : + "got a ping request from a different cluster. expected " + clusterName + " got " + request.pingResponse.clusterName(); temporalResponses.add(request.pingResponse); - threadPool.schedule(TimeValue.timeValueMillis(request.timeout.millis() * 2), ThreadPool.Names.SAME, new Runnable() { - @Override - public void run() { - temporalResponses.remove(request.pingResponse); - } - }); + // add to any ongoing pinging + activePingingRounds.values().forEach(p -> p.addPingResponseToCollection(request.pingResponse)); + threadPool.schedule(TimeValue.timeValueMillis(request.timeout.millis() * 2), ThreadPool.Names.SAME, + () -> temporalResponses.remove(request.pingResponse)); List pingResponses = CollectionUtils.iterableAsArrayList(temporalResponses); pingResponses.add(createPingResponse(contextProvider.nodes())); @@ -601,11 +601,11 @@ public class UnicastZenPing extends AbstractComponent implements ZenPing { channel.sendResponse(handlePingRequest(request)); } else { throw new IllegalStateException( - String.format( - Locale.ROOT, - "mismatched cluster names; request: [%s], local: [%s]", - request.pingResponse.clusterName().value(), - clusterName.value())); + String.format( + Locale.ROOT, + "mismatched cluster names; request: [%s], local: [%s]", + request.pingResponse.clusterName().value(), + clusterName.value())); } } diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java b/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java index 16ca16d4a90..b0adf1696ee 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java @@ -67,11 +67,11 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Set; -import java.util.concurrent.CountDownLatch; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ExecutionException; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; -import java.util.function.BiFunction; import java.util.function.Consumer; import java.util.stream.Collectors; @@ -1021,24 +1021,22 @@ public class ZenDiscovery extends AbstractLifecycleComponent implements Discover } private ZenPing.PingCollection pingAndWait(TimeValue timeout) { - final ZenPing.PingCollection response = new ZenPing.PingCollection(); - final CountDownLatch latch = new CountDownLatch(1); + final CompletableFuture response = new CompletableFuture<>(); try { - zenPing.ping(pings -> { - response.addPings(pings); - latch.countDown(); - }, timeout); + zenPing.ping(response::complete, timeout); } catch (Exception ex) { - logger.warn("Ping execution failed", ex); - latch.countDown(); + // logged later + response.completeExceptionally(ex); } try { - latch.await(); - return response; + return response.get(); } catch (InterruptedException e) { logger.trace("pingAndWait interrupted"); - return response; + return new ZenPing.PingCollection(); + } catch (ExecutionException e) { + logger.warn("Ping execution failed", e); + return new ZenPing.PingCollection(); } } diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/ZenPing.java b/core/src/main/java/org/elasticsearch/discovery/zen/ZenPing.java index 75ea701dc99..622c4649db2 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/ZenPing.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/ZenPing.java @@ -30,11 +30,11 @@ import org.elasticsearch.common.unit.TimeValue; import java.io.IOException; import java.util.ArrayList; -import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicLong; +import java.util.function.Consumer; import static org.elasticsearch.gateway.GatewayService.STATE_NOT_RECOVERED_BLOCK; @@ -42,17 +42,7 @@ public interface ZenPing extends Releasable { void start(PingContextProvider contextProvider); - void ping(PingListener listener, TimeValue timeout); - - interface PingListener { - - /** - * called when pinging is done. - * - * @param pings ping result *must - */ - void onPing(Collection pings); - } + void ping(Consumer resultsConsumer, TimeValue timeout); class PingResponse implements Streamable { @@ -191,13 +181,6 @@ public interface ZenPing extends Releasable { return false; } - /** adds multiple pings if newer than previous pings from the same node */ - public synchronized void addPings(Iterable pings) { - for (PingResponse ping : pings) { - addPing(ping); - } - } - /** serialize current pings to a list. It is guaranteed that the list contains one ping response per node */ public synchronized List toList() { return new ArrayList<>(pings.values()); diff --git a/core/src/main/java/org/elasticsearch/transport/ConnectionProfile.java b/core/src/main/java/org/elasticsearch/transport/ConnectionProfile.java index 92421adea6a..8c14c6b5c9d 100644 --- a/core/src/main/java/org/elasticsearch/transport/ConnectionProfile.java +++ b/core/src/main/java/org/elasticsearch/transport/ConnectionProfile.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.transport; +import org.elasticsearch.common.inject.internal.Nullable; import org.elasticsearch.common.unit.TimeValue; import java.util.ArrayList; @@ -35,16 +36,25 @@ import java.util.concurrent.atomic.AtomicInteger; public final class ConnectionProfile { /** - * A pre-built light connection profile that shares a single connection across all - * types. + * Builds a connection profile that is dedicated to a single channel type. Use this + * when opening single use connections */ - public static final ConnectionProfile LIGHT_PROFILE = new ConnectionProfile( - Collections.singletonList(new ConnectionTypeHandle(0, 1, EnumSet.of( - TransportRequestOptions.Type.BULK, - TransportRequestOptions.Type.PING, - TransportRequestOptions.Type.RECOVERY, - TransportRequestOptions.Type.REG, - TransportRequestOptions.Type.STATE))), 1, null, null); + public static ConnectionProfile buildSingleChannelProfile(TransportRequestOptions.Type channelType, + @Nullable TimeValue connectTimeout, + @Nullable TimeValue handshakeTimeout) { + Builder builder = new Builder(); + builder.addConnections(1, channelType); + final EnumSet otherTypes = EnumSet.allOf(TransportRequestOptions.Type.class); + otherTypes.remove(channelType); + builder.addConnections(0, otherTypes.stream().toArray(TransportRequestOptions.Type[]::new)); + if (connectTimeout != null) { + builder.setConnectTimeout(connectTimeout); + } + if (handshakeTimeout != null) { + builder.setHandshakeTimeout(handshakeTimeout); + } + return builder.build(); + } private final List handles; private final int numConnections; diff --git a/core/src/main/java/org/elasticsearch/transport/Transport.java b/core/src/main/java/org/elasticsearch/transport/Transport.java index e337aaf41b2..44c72e1f548 100644 --- a/core/src/main/java/org/elasticsearch/transport/Transport.java +++ b/core/src/main/java/org/elasticsearch/transport/Transport.java @@ -63,8 +63,7 @@ public interface Transport extends LifecycleComponent { boolean nodeConnected(DiscoveryNode node); /** - * Connects to a node with the given connection profile. Use {@link ConnectionProfile#LIGHT_PROFILE} when just connecting for ping - * and then disconnecting. If the node is already connected this method has no effect + * Connects to a node with the given connection profile. If the node is already connected this method has no effect */ void connectToNode(DiscoveryNode node, ConnectionProfile connectionProfile) throws ConnectTransportException; diff --git a/core/src/main/java/org/elasticsearch/transport/TransportService.java b/core/src/main/java/org/elasticsearch/transport/TransportService.java index 8884177ba63..e76210ff195 100644 --- a/core/src/main/java/org/elasticsearch/transport/TransportService.java +++ b/core/src/main/java/org/elasticsearch/transport/TransportService.java @@ -62,7 +62,6 @@ import java.util.Objects; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ScheduledFuture; -import java.util.concurrent.atomic.AtomicLong; import java.util.function.Function; import java.util.function.Supplier; @@ -328,32 +327,6 @@ public class TransportService extends AbstractLifecycleComponent { } } - /** - * Lightly connect to the specified node, returning updated node - * information. The handshake will fail if the cluster name on the - * target node mismatches the local cluster name and - * {@code checkClusterName} is {@code true}. - * - * @param node the node to connect to - * @param handshakeTimeout handshake timeout - * @return the connected node - * @throws ConnectTransportException if the connection failed - * @throws IllegalStateException if the handshake failed - */ - public DiscoveryNode connectToNodeAndHandshake( - final DiscoveryNode node, - final long handshakeTimeout) throws IOException { - if (node.equals(localNode)) { - return localNode; - } - DiscoveryNode handshakeNode; - try (Transport.Connection connection = transport.openConnection(node, ConnectionProfile.LIGHT_PROFILE)) { - handshakeNode = handshake(connection, handshakeTimeout); - } - connectToNode(node, ConnectionProfile.LIGHT_PROFILE); - return handshakeNode; - } - /** * Executes a high-level handshake using the given connection * and returns the discovery node of the node the connection diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java b/core/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java index de8d1a562e8..84650f042cf 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java @@ -26,6 +26,7 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNode.Role; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.network.NetworkAddress; @@ -34,18 +35,22 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.VersionUtils; -import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.ConnectionProfile; import org.elasticsearch.transport.MockTcpTransport; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportConnectionListener; +import org.elasticsearch.transport.TransportException; +import org.elasticsearch.transport.TransportRequestOptions; +import org.elasticsearch.transport.TransportResponseHandler; import org.elasticsearch.transport.TransportService; import org.elasticsearch.transport.TransportSettings; import org.junit.After; @@ -60,12 +65,14 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; +import java.util.EnumSet; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.Stack; +import java.util.concurrent.CompletableFuture; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; @@ -82,7 +89,6 @@ import static java.util.Collections.emptySet; import static org.elasticsearch.gateway.GatewayService.STATE_NOT_RECOVERED_BLOCK; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasSize; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.mock; @@ -124,8 +130,7 @@ public class UnicastZenPingTests extends ESTestCase { private static final UnicastHostsProvider EMPTY_HOSTS_PROVIDER = Collections::emptyList; - @TestLogging("org.elasticsearch.transport:TRACE,org.elasticsearch.discovery.zen.UnicastZenPing:TRACE") - public void testSimplePings() throws IOException, InterruptedException { + public void testSimplePings() throws IOException, InterruptedException, ExecutionException { // use ephemeral ports final Settings settings = Settings.builder().put("cluster.name", "test").put(TransportSettings.PORT.getKey(), 0).build(); final Settings settingsMismatch = @@ -140,7 +145,12 @@ public class UnicastZenPingTests extends ESTestCase { new NoneCircuitBreakerService(), new NamedWriteableRegistry(Collections.emptyList()), networkService, - v); + v) { + @Override + public void connectToNode(DiscoveryNode node, ConnectionProfile connectionProfile) { + throw new AssertionError("zen pings should never connect to node (got [" + node + "])"); + } + }; NetworkHandle handleA = startServices(settings, threadPool, "UZP_A", Version.CURRENT, supplier); closeables.push(handleA.transportService); @@ -148,25 +158,30 @@ public class UnicastZenPingTests extends ESTestCase { closeables.push(handleB.transportService); NetworkHandle handleC = startServices(settingsMismatch, threadPool, "UZP_C", Version.CURRENT, supplier); closeables.push(handleC.transportService); - // just fake that no versions are compatible with this node - Version previousVersion = VersionUtils.getPreviousVersion(Version.CURRENT.minimumCompatibilityVersion()); - Version versionD = VersionUtils.randomVersionBetween(random(), previousVersion.minimumCompatibilityVersion(), previousVersion); + final Version versionD; + if (randomBoolean()) { + versionD = VersionUtils.randomVersionBetween(random(), Version.CURRENT.minimumCompatibilityVersion(), Version.CURRENT); + } else { + versionD = Version.CURRENT; + } + logger.info("UZP_D version set to [{}]", versionD); NetworkHandle handleD = startServices(settingsMismatch, threadPool, "UZP_D", versionD, supplier); closeables.push(handleD.transportService); final ClusterState state = ClusterState.builder(new ClusterName("test")).version(randomPositiveLong()).build(); + final ClusterState stateMismatch = ClusterState.builder(new ClusterName("mismatch")).version(randomPositiveLong()).build(); Settings hostsSettings = Settings.builder() - .putArray("discovery.zen.ping.unicast.hosts", + .putArray("discovery.zen.ping.unicast.hosts", NetworkAddress.format(new InetSocketAddress(handleA.address.address().getAddress(), handleA.address.address().getPort())), NetworkAddress.format(new InetSocketAddress(handleB.address.address().getAddress(), handleB.address.address().getPort())), NetworkAddress.format(new InetSocketAddress(handleC.address.address().getAddress(), handleC.address.address().getPort())), NetworkAddress.format(new InetSocketAddress(handleD.address.address().getAddress(), handleD.address.address().getPort()))) - .put("cluster.name", "test") - .build(); + .put("cluster.name", "test") + .build(); Settings hostsSettingsMismatch = Settings.builder().put(hostsSettings).put(settingsMismatch).build(); - UnicastZenPing zenPingA = new UnicastZenPing(hostsSettings, threadPool, handleA.transportService, EMPTY_HOSTS_PROVIDER); + TestUnicastZenPing zenPingA = new TestUnicastZenPing(hostsSettings, threadPool, handleA, EMPTY_HOSTS_PROVIDER); zenPingA.start(new PingContextProvider() { @Override public DiscoveryNodes nodes() { @@ -180,7 +195,7 @@ public class UnicastZenPingTests extends ESTestCase { }); closeables.push(zenPingA); - UnicastZenPing zenPingB = new UnicastZenPing(hostsSettings, threadPool, handleB.transportService, EMPTY_HOSTS_PROVIDER); + TestUnicastZenPing zenPingB = new TestUnicastZenPing(hostsSettings, threadPool, handleB, EMPTY_HOSTS_PROVIDER); zenPingB.start(new PingContextProvider() { @Override public DiscoveryNodes nodes() { @@ -194,7 +209,8 @@ public class UnicastZenPingTests extends ESTestCase { }); closeables.push(zenPingB); - UnicastZenPing zenPingC = new UnicastZenPing(hostsSettingsMismatch, threadPool, handleC.transportService, EMPTY_HOSTS_PROVIDER) { + TestUnicastZenPing zenPingC = new TestUnicastZenPing(hostsSettingsMismatch, threadPool, handleC, + EMPTY_HOSTS_PROVIDER) { @Override protected Version getVersion() { return versionD; @@ -208,12 +224,13 @@ public class UnicastZenPingTests extends ESTestCase { @Override public ClusterState clusterState() { - return state; + return stateMismatch; } }); closeables.push(zenPingC); - UnicastZenPing zenPingD = new UnicastZenPing(hostsSettingsMismatch, threadPool, handleD.transportService, EMPTY_HOSTS_PROVIDER); + TestUnicastZenPing zenPingD = new TestUnicastZenPing(hostsSettingsMismatch, threadPool, handleD, + EMPTY_HOSTS_PROVIDER); zenPingD.start(new PingContextProvider() { @Override public DiscoveryNodes nodes() { @@ -222,40 +239,48 @@ public class UnicastZenPingTests extends ESTestCase { @Override public ClusterState clusterState() { - return state; + return stateMismatch; } }); closeables.push(zenPingD); logger.info("ping from UZP_A"); - Collection pingResponses = zenPingA.pingAndWait(TimeValue.timeValueMillis(500)); + Collection pingResponses = zenPingA.pingAndWait().toList(); assertThat(pingResponses.size(), equalTo(1)); ZenPing.PingResponse ping = pingResponses.iterator().next(); assertThat(ping.node().getId(), equalTo("UZP_B")); assertThat(ping.getClusterStateVersion(), equalTo(state.version())); - assertCountersMoreThan(handleA, handleB, handleC, handleD); + assertPingCount(handleA, handleB, 3); + assertPingCount(handleA, handleC, 0); // mismatch, shouldn't ping + assertPingCount(handleA, handleD, 0); // mismatch, shouldn't ping // ping again, this time from B, logger.info("ping from UZP_B"); - pingResponses = zenPingB.pingAndWait(TimeValue.timeValueMillis(500)); + pingResponses = zenPingB.pingAndWait().toList(); assertThat(pingResponses.size(), equalTo(1)); ping = pingResponses.iterator().next(); assertThat(ping.node().getId(), equalTo("UZP_A")); assertThat(ping.getClusterStateVersion(), equalTo(ElectMasterService.MasterCandidate.UNRECOVERED_CLUSTER_VERSION)); - assertCountersMoreThan(handleB, handleA, handleC, handleD); + assertPingCount(handleB, handleA, 3); + assertPingCount(handleB, handleC, 0); // mismatch, shouldn't ping + assertPingCount(handleB, handleD, 0); // mismatch, shouldn't ping logger.info("ping from UZP_C"); - pingResponses = zenPingC.pingAndWait(TimeValue.timeValueMillis(500)); - assertThat(pingResponses.size(), equalTo(0)); - assertCountersMoreThan(handleC, handleA, handleB, handleD); + pingResponses = zenPingC.pingAndWait().toList(); + assertThat(pingResponses.size(), equalTo(1)); + assertPingCount(handleC, handleA, 0); + assertPingCount(handleC, handleB, 0); + assertPingCount(handleC, handleD, 3); logger.info("ping from UZP_D"); - pingResponses = zenPingD.pingAndWait(TimeValue.timeValueMillis(500)); - assertThat(pingResponses.size(), equalTo(0)); - assertCountersMoreThan(handleD, handleA, handleB, handleC); + pingResponses = zenPingD.pingAndWait().toList(); + assertThat(pingResponses.size(), equalTo(1)); + assertPingCount(handleD, handleA, 0); + assertPingCount(handleD, handleB, 0); + assertPingCount(handleD, handleC, 3); } - public void testUnknownHostNotCached() { + public void testUnknownHostNotCached() throws ExecutionException, InterruptedException { // use ephemeral ports final Settings settings = Settings.builder().put("cluster.name", "test").put(TransportSettings.PORT.getKey(), 0).build(); @@ -306,7 +331,7 @@ public class UnicastZenPingTests extends ESTestCase { final ClusterState state = ClusterState.builder(new ClusterName("test")).version(randomPositiveLong()).build(); - final UnicastZenPing zenPingA = new UnicastZenPing(hostsSettings, threadPool, handleA.transportService, EMPTY_HOSTS_PROVIDER); + final TestUnicastZenPing zenPingA = new TestUnicastZenPing(hostsSettings, threadPool, handleA, EMPTY_HOSTS_PROVIDER); zenPingA.start(new PingContextProvider() { @Override public DiscoveryNodes nodes() { @@ -320,7 +345,7 @@ public class UnicastZenPingTests extends ESTestCase { }); closeables.push(zenPingA); - UnicastZenPing zenPingB = new UnicastZenPing(hostsSettings, threadPool, handleB.transportService, EMPTY_HOSTS_PROVIDER); + TestUnicastZenPing zenPingB = new TestUnicastZenPing(hostsSettings, threadPool, handleB, EMPTY_HOSTS_PROVIDER); zenPingB.start(new PingContextProvider() { @Override public DiscoveryNodes nodes() { @@ -334,7 +359,7 @@ public class UnicastZenPingTests extends ESTestCase { }); closeables.push(zenPingB); - UnicastZenPing zenPingC = new UnicastZenPing(hostsSettings, threadPool, handleC.transportService, EMPTY_HOSTS_PROVIDER); + TestUnicastZenPing zenPingC = new TestUnicastZenPing(hostsSettings, threadPool, handleC, EMPTY_HOSTS_PROVIDER); zenPingC.start(new PingContextProvider() { @Override public DiscoveryNodes nodes() { @@ -350,12 +375,13 @@ public class UnicastZenPingTests extends ESTestCase { // the presence of an unresolvable host should not prevent resolvable hosts from being pinged { - final Collection pingResponses = zenPingA.pingAndWait(TimeValue.timeValueMillis(500)); + final Collection pingResponses = zenPingA.pingAndWait().toList(); assertThat(pingResponses.size(), equalTo(1)); ZenPing.PingResponse ping = pingResponses.iterator().next(); assertThat(ping.node().getId(), equalTo("UZP_C")); assertThat(ping.getClusterStateVersion(), equalTo(state.version())); - assertCountersMoreThan(handleA, handleC); + assertPingCount(handleA, handleB, 0); + assertPingCount(handleA, handleC, 3); assertNull(handleA.counters.get(handleB.address)); } @@ -373,11 +399,13 @@ public class UnicastZenPingTests extends ESTestCase { // now we should see pings to UZP_B; this establishes that host resolutions are not cached { - final Collection secondPingResponses = zenPingA.pingAndWait(TimeValue.timeValueMillis(500)); + handleA.counters.clear(); + final Collection secondPingResponses = zenPingA.pingAndWait().toList(); assertThat(secondPingResponses.size(), equalTo(2)); final Set ids = new HashSet<>(secondPingResponses.stream().map(p -> p.node().getId()).collect(Collectors.toList())); assertThat(ids, equalTo(new HashSet<>(Arrays.asList("UZP_B", "UZP_C")))); - assertCountersMoreThan(moreThan, handleA, handleB, handleC); + assertPingCount(handleA, handleB, 3); + assertPingCount(handleA, handleC, 3); } } @@ -395,15 +423,14 @@ public class UnicastZenPingTests extends ESTestCase { final TransportService transportService = new TransportService(Settings.EMPTY, transport, threadPool, TransportService.NOOP_TRANSPORT_INTERCEPTOR, null); closeables.push(transportService); - final AtomicInteger idGenerator = new AtomicInteger(); final int limitPortCounts = randomIntBetween(1, 10); - final List discoveryNodes = UnicastZenPing.resolveDiscoveryNodes( + final List discoveryNodes = TestUnicastZenPing.resolveHostsLists( executorService, logger, Collections.singletonList("127.0.0.1"), limitPortCounts, transportService, - () -> Integer.toString(idGenerator.incrementAndGet()), + "test_", TimeValue.timeValueSeconds(1)); assertThat(discoveryNodes, hasSize(limitPortCounts)); final Set ports = new HashSet<>(); @@ -439,15 +466,14 @@ public class UnicastZenPingTests extends ESTestCase { final TransportService transportService = new TransportService(Settings.EMPTY, transport, threadPool, TransportService.NOOP_TRANSPORT_INTERCEPTOR, null); closeables.push(transportService); - final AtomicInteger idGenerator = new AtomicInteger(); - final List discoveryNodes = UnicastZenPing.resolveDiscoveryNodes( + final List discoveryNodes = TestUnicastZenPing.resolveHostsLists( executorService, logger, Arrays.asList(hostname), 1, transportService, - () -> Integer.toString(idGenerator.incrementAndGet()), + "test_", TimeValue.timeValueSeconds(1) ); @@ -490,16 +516,15 @@ public class UnicastZenPingTests extends ESTestCase { final TransportService transportService = new TransportService(Settings.EMPTY, transport, threadPool, TransportService.NOOP_TRANSPORT_INTERCEPTOR, null); closeables.push(transportService); - final AtomicInteger idGenerator = new AtomicInteger(); final TimeValue resolveTimeout = TimeValue.timeValueSeconds(randomIntBetween(1, 3)); try { - final List discoveryNodes = UnicastZenPing.resolveDiscoveryNodes( + final List discoveryNodes = TestUnicastZenPing.resolveHostsLists( executorService, logger, Arrays.asList("hostname1", "hostname2"), 1, transportService, - () -> Integer.toString(idGenerator.incrementAndGet()), + "test+", resolveTimeout); assertThat(discoveryNodes, hasSize(1)); @@ -513,6 +538,156 @@ public class UnicastZenPingTests extends ESTestCase { } } + public void testResolveReuseExistingNodeConnections() throws ExecutionException, InterruptedException { + final Settings settings = Settings.builder().put("cluster.name", "test").put(TransportSettings.PORT.getKey(), 0).build(); + + NetworkService networkService = new NetworkService(settings, Collections.emptyList()); + + final BiFunction supplier = (s, v) -> new MockTcpTransport( + s, + threadPool, + BigArrays.NON_RECYCLING_INSTANCE, + new NoneCircuitBreakerService(), + new NamedWriteableRegistry(Collections.emptyList()), + networkService, + v); + + NetworkHandle handleA = startServices(settings, threadPool, "UZP_A", Version.CURRENT, supplier, EnumSet.allOf(Role.class)); + closeables.push(handleA.transportService); + NetworkHandle handleB = startServices(settings, threadPool, "UZP_B", Version.CURRENT, supplier, EnumSet.allOf(Role.class)); + closeables.push(handleB.transportService); + + final boolean useHosts = randomBoolean(); + final Settings.Builder hostsSettingsBuilder = Settings.builder().put("cluster.name", "test"); + if (useHosts) { + hostsSettingsBuilder.putArray("discovery.zen.ping.unicast.hosts", + NetworkAddress.format(new InetSocketAddress(handleB.address.address().getAddress(), handleB.address.address().getPort())) + ); + } else { + hostsSettingsBuilder.put("discovery.zen.ping.unicast.hosts", (String) null); + } + final Settings hostsSettings = hostsSettingsBuilder.build(); + final ClusterState state = ClusterState.builder(new ClusterName("test")).version(randomPositiveLong()).build(); + + // connection to reuse + handleA.transportService.connectToNode(handleB.node); + + // install a listener to check that no new connections are made + handleA.transportService.addConnectionListener(new TransportConnectionListener() { + @Override + public void onConnectionOpened(DiscoveryNode node) { + fail("should not open any connections. got [" + node + "]"); + } + }); + + final TestUnicastZenPing zenPingA = new TestUnicastZenPing(hostsSettings, threadPool, handleA, EMPTY_HOSTS_PROVIDER); + zenPingA.start(new PingContextProvider() { + @Override + public DiscoveryNodes nodes() { + return DiscoveryNodes.builder().add(handleA.node).add(handleB.node).localNodeId("UZP_A").build(); + } + + @Override + public ClusterState clusterState() { + return ClusterState.builder(state).blocks(ClusterBlocks.builder().addGlobalBlock(STATE_NOT_RECOVERED_BLOCK)).build(); + } + }); + closeables.push(zenPingA); + + TestUnicastZenPing zenPingB = new TestUnicastZenPing(hostsSettings, threadPool, handleB, EMPTY_HOSTS_PROVIDER); + zenPingB.start(new PingContextProvider() { + @Override + public DiscoveryNodes nodes() { + return DiscoveryNodes.builder().add(handleB.node).localNodeId("UZP_B").build(); + } + + @Override + public ClusterState clusterState() { + return state; + } + }); + closeables.push(zenPingB); + + Collection pingResponses = zenPingA.pingAndWait().toList(); + assertThat(pingResponses.size(), equalTo(1)); + ZenPing.PingResponse ping = pingResponses.iterator().next(); + assertThat(ping.node().getId(), equalTo("UZP_B")); + assertThat(ping.getClusterStateVersion(), equalTo(state.version())); + + } + + public void testPingingTemporalPings() throws ExecutionException, InterruptedException { + final Settings settings = Settings.builder().put("cluster.name", "test").put(TransportSettings.PORT.getKey(), 0).build(); + + NetworkService networkService = new NetworkService(settings, Collections.emptyList()); + + final BiFunction supplier = (s, v) -> new MockTcpTransport( + s, + threadPool, + BigArrays.NON_RECYCLING_INSTANCE, + new NoneCircuitBreakerService(), + new NamedWriteableRegistry(Collections.emptyList()), + networkService, + v); + + NetworkHandle handleA = startServices(settings, threadPool, "UZP_A", Version.CURRENT, supplier, EnumSet.allOf(Role.class)); + closeables.push(handleA.transportService); + NetworkHandle handleB = startServices(settings, threadPool, "UZP_B", Version.CURRENT, supplier, EnumSet.allOf(Role.class)); + closeables.push(handleB.transportService); + + final Settings hostsSettings = Settings.builder() + .put("cluster.name", "test") + .put("discovery.zen.ping.unicast.hosts", (String) null) // use nodes for simplicity + .build(); + final ClusterState state = ClusterState.builder(new ClusterName("test")).version(randomPositiveLong()).build(); + + final TestUnicastZenPing zenPingA = new TestUnicastZenPing(hostsSettings, threadPool, handleA, EMPTY_HOSTS_PROVIDER); + zenPingA.start(new PingContextProvider() { + @Override + public DiscoveryNodes nodes() { + return DiscoveryNodes.builder().add(handleA.node).add(handleB.node).localNodeId("UZP_A").build(); + } + + @Override + public ClusterState clusterState() { + return ClusterState.builder(state).blocks(ClusterBlocks.builder().addGlobalBlock(STATE_NOT_RECOVERED_BLOCK)).build(); + } + }); + closeables.push(zenPingA); + + // Node B doesn't know about A! + TestUnicastZenPing zenPingB = new TestUnicastZenPing(hostsSettings, threadPool, handleB, EMPTY_HOSTS_PROVIDER); + zenPingB.start(new PingContextProvider() { + @Override + public DiscoveryNodes nodes() { + return DiscoveryNodes.builder().add(handleB.node).localNodeId("UZP_B").build(); + } + + @Override + public ClusterState clusterState() { + return state; + } + }); + closeables.push(zenPingB); + + { + logger.info("pinging from UZP_A so UZP_B will learn about it"); + Collection pingResponses = zenPingA.pingAndWait().toList(); + assertThat(pingResponses.size(), equalTo(1)); + ZenPing.PingResponse ping = pingResponses.iterator().next(); + assertThat(ping.node().getId(), equalTo("UZP_B")); + assertThat(ping.getClusterStateVersion(), equalTo(state.version())); + } + { + logger.info("pinging from UZP_B"); + Collection pingResponses = zenPingB.pingAndWait().toList(); + assertThat(pingResponses.size(), equalTo(1)); + ZenPing.PingResponse ping = pingResponses.iterator().next(); + assertThat(ping.node().getId(), equalTo("UZP_A")); + assertThat(ping.getClusterStateVersion(), equalTo(-1L)); // A has a block + } + } + public void testInvalidHosts() throws InterruptedException { final Logger logger = mock(Logger.class); final NetworkService networkService = new NetworkService(Settings.EMPTY, Collections.emptyList()); @@ -529,14 +704,13 @@ public class UnicastZenPingTests extends ESTestCase { final TransportService transportService = new TransportService(Settings.EMPTY, transport, threadPool, TransportService.NOOP_TRANSPORT_INTERCEPTOR, null); closeables.push(transportService); - final AtomicInteger idGenerator = new AtomicInteger(); - final List discoveryNodes = UnicastZenPing.resolveDiscoveryNodes( + final List discoveryNodes = TestUnicastZenPing.resolveHostsLists( executorService, logger, Arrays.asList("127.0.0.1:9300:9300", "127.0.0.1:9301"), 1, transportService, - () -> Integer.toString(idGenerator.incrementAndGet()), + "test_", TimeValue.timeValueSeconds(1)); assertThat(discoveryNodes, hasSize(1)); // only one of the two is valid and will be used assertThat(discoveryNodes.get(0).getAddress().getAddress(), equalTo("127.0.0.1")); @@ -544,24 +718,13 @@ public class UnicastZenPingTests extends ESTestCase { verify(logger).warn(eq("failed to resolve host [127.0.0.1:9300:9300]"), Matchers.any(ExecutionException.class)); } - // assert that we tried to ping each of the configured nodes at least once - private void assertCountersMoreThan(final NetworkHandle that, final NetworkHandle...handles) { - final HashMap moreThan = new HashMap<>(); - for (final NetworkHandle handle : handles) { - assert handle != that; - moreThan.put(handle.address, 0); - } - assertCountersMoreThan(moreThan, that, handles); - } - - private void assertCountersMoreThan( - final Map moreThan, - final NetworkHandle that, - final NetworkHandle... handles) { - for (final NetworkHandle handle : handles) { - assert handle != that; - assertThat(that.counters.get(handle.address).get(), greaterThan(moreThan.get(handle.address))); - } + private void assertPingCount(final NetworkHandle fromNode, final NetworkHandle toNode, int expectedCount) { + final AtomicInteger counter = fromNode.counters.getOrDefault(toNode.address, new AtomicInteger()); + final String onNodeName = fromNode.node.getName(); + assertNotNull("handle for [" + onNodeName + "] has no 'expected' counter", counter); + final String forNodeName = toNode.node.getName(); + assertThat("node [" + onNodeName + "] ping count to [" + forNodeName + "] is unexpected", + counter.get(), equalTo(expectedCount)); } private NetworkHandle startServices( @@ -570,31 +733,36 @@ public class UnicastZenPingTests extends ESTestCase { final String nodeId, final Version version, final BiFunction supplier) { - final Transport transport = supplier.apply(settings, version); - final TransportService transportService = - new MockTransportService(settings, transport, threadPool, TransportService.NOOP_TRANSPORT_INTERCEPTOR, null); + return startServices(settings, threadPool, nodeId, version, supplier, emptySet()); + + } + + private NetworkHandle startServices( + final Settings settings, + final ThreadPool threadPool, + final String nodeId, + final Version version, + final BiFunction supplier, + final Set nodeRoles) { + final Settings nodeSettings = Settings.builder().put(settings) + .put("node.name", nodeId) + .put(TransportService.TRACE_LOG_INCLUDE_SETTING.getKey(), "internal:discovery/zen/unicast") + .build(); + final Transport transport = supplier.apply(nodeSettings, version); + final MockTransportService transportService = + new MockTransportService(nodeSettings, transport, threadPool, TransportService.NOOP_TRANSPORT_INTERCEPTOR, null); transportService.start(); transportService.acceptIncomingRequests(); final ConcurrentMap counters = ConcurrentCollections.newConcurrentMap(); - transportService.addConnectionListener(new TransportConnectionListener() { - + transportService.addTracer(new MockTransportService.Tracer() { @Override - public void onNodeConnected(DiscoveryNode node) { - } - - @Override - public void onConnectionOpened(DiscoveryNode node) { + public void requestSent(DiscoveryNode node, long requestId, String action, TransportRequestOptions options) { counters.computeIfAbsent(node.getAddress(), k -> new AtomicInteger()); counters.get(node.getAddress()).incrementAndGet(); } - - @Override - public void onNodeDisconnected(DiscoveryNode node) { - } - }); final DiscoveryNode node = - new DiscoveryNode(nodeId, transportService.boundAddress().publishAddress(), emptyMap(), emptySet(), version); + new DiscoveryNode(nodeId, nodeId, transportService.boundAddress().publishAddress(), emptyMap(), nodeRoles, version); transportService.setLocalNode(node); return new NetworkHandle(transport.boundAddress().publishAddress(), transportService, node, counters); } @@ -616,7 +784,123 @@ public class UnicastZenPingTests extends ESTestCase { this.node = discoveryNode; this.counters = counters; } + } + private static class TestUnicastZenPing extends UnicastZenPing { + + public TestUnicastZenPing(Settings settings, ThreadPool threadPool, NetworkHandle networkHandle, + UnicastHostsProvider unicastHostsProvider) { + super(Settings.builder().put("node.name", networkHandle.node.getName()).put(settings).build(), + threadPool, networkHandle.transportService, unicastHostsProvider); + } + + volatile CountDownLatch allTasksCompleted; + volatile AtomicInteger pendingTasks; + + PingCollection pingAndWait() throws ExecutionException, InterruptedException { + allTasksCompleted = new CountDownLatch(1); + pendingTasks = new AtomicInteger(); + // make the three sending rounds to come as started + markTaskAsStarted("send pings"); + markTaskAsStarted("send pings"); + markTaskAsStarted("send pings"); + final CompletableFuture response = new CompletableFuture<>(); + try { + ping(response::complete, TimeValue.timeValueMillis(1), TimeValue.timeValueSeconds(1)); + } catch (Exception ex) { + response.completeExceptionally(ex); + } + return response.get(); + } + + @Override + protected void finishPingingRound(PingingRound pingingRound) { + // wait for all activity to finish before closing + try { + allTasksCompleted.await(); + } catch (InterruptedException e) { + // ok, finish anyway + } + super.finishPingingRound(pingingRound); + } + + @Override + protected void sendPings(TimeValue timeout, PingingRound pingingRound) { + super.sendPings(timeout, pingingRound); + markTaskAsCompleted("send pings"); + } + + @Override + protected void submitToExecutor(AbstractRunnable abstractRunnable) { + markTaskAsStarted("executor runnable"); + super.submitToExecutor(new AbstractRunnable() { + @Override + public void onRejection(Exception e) { + try { + super.onRejection(e); + } finally { + markTaskAsCompleted("executor runnable (rejected)"); + } + } + + @Override + public void onAfter() { + markTaskAsCompleted("executor runnable"); + } + + @Override + protected void doRun() throws Exception { + abstractRunnable.run(); + } + + @Override + public void onFailure(Exception e) { + // we shouldn't really end up here. + throw new AssertionError("unexpected error", e); + } + }); + } + + private void markTaskAsStarted(String task) { + logger.trace("task [{}] started. count [{}]", task, pendingTasks.incrementAndGet()); + } + + private void markTaskAsCompleted(String task) { + final int left = pendingTasks.decrementAndGet(); + logger.trace("task [{}] completed. count [{}]", task, left); + if (left == 0) { + allTasksCompleted.countDown(); + } + } + + @Override + protected TransportResponseHandler getPingResponseHandler(PingingRound pingingRound, DiscoveryNode node) { + markTaskAsStarted("ping [" + node + "]"); + TransportResponseHandler original = super.getPingResponseHandler(pingingRound, node); + return new TransportResponseHandler() { + @Override + public UnicastPingResponse newInstance() { + return original.newInstance(); + } + + @Override + public void handleResponse(UnicastPingResponse response) { + original.handleResponse(response); + markTaskAsCompleted("ping [" + node + "]"); + } + + @Override + public void handleException(TransportException exp) { + original.handleException(exp); + markTaskAsCompleted("ping [" + node + "] (error)"); + } + + @Override + public String executor() { + return original.executor(); + } + }; + } } } diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/ZenPingTests.java b/core/src/test/java/org/elasticsearch/discovery/zen/ZenPingTests.java index 9c70587d0e5..9fa680fc200 100644 --- a/core/src/test/java/org/elasticsearch/discovery/zen/ZenPingTests.java +++ b/core/src/test/java/org/elasticsearch/discovery/zen/ZenPingTests.java @@ -68,7 +68,7 @@ public class ZenPingTests extends ESTestCase { Collections.shuffle(pings, random()); ZenPing.PingCollection collection = new ZenPing.PingCollection(); - collection.addPings(pings); + pings.forEach(collection::addPing); List aggregate = collection.toList(); diff --git a/core/src/test/java/org/elasticsearch/transport/TCPTransportTests.java b/core/src/test/java/org/elasticsearch/transport/TCPTransportTests.java index aabc58887b2..0b46843cdb7 100644 --- a/core/src/test/java/org/elasticsearch/transport/TCPTransportTests.java +++ b/core/src/test/java/org/elasticsearch/transport/TCPTransportTests.java @@ -208,8 +208,8 @@ public class TCPTransportTests extends ESTestCase { @Override public NodeChannels getConnection(DiscoveryNode node) { - return new NodeChannels(node, new Object[ConnectionProfile.LIGHT_PROFILE.getNumConnections()], - ConnectionProfile.LIGHT_PROFILE); + return new NodeChannels(node, new Object[MockTcpTransport.LIGHT_PROFILE.getNumConnections()], + MockTcpTransport.LIGHT_PROFILE); } }; DiscoveryNode node = new DiscoveryNode("foo", buildNewFakeTransportAddress(), Version.CURRENT); diff --git a/core/src/test/java/org/elasticsearch/transport/TransportServiceHandshakeTests.java b/core/src/test/java/org/elasticsearch/transport/TransportServiceHandshakeTests.java index fd756f6790e..f7d05f22cd5 100644 --- a/core/src/test/java/org/elasticsearch/transport/TransportServiceHandshakeTests.java +++ b/core/src/test/java/org/elasticsearch/transport/TransportServiceHandshakeTests.java @@ -113,7 +113,7 @@ public class TransportServiceHandshakeTests extends ESTestCase { emptyMap(), emptySet(), Version.CURRENT.minimumCompatibilityVersion()); - try (Transport.Connection connection = handleA.transportService.openConnection(discoveryNode, ConnectionProfile.LIGHT_PROFILE)){ + try (Transport.Connection connection = handleA.transportService.openConnection(discoveryNode, MockTcpTransport.LIGHT_PROFILE)){ DiscoveryNode connectedNode = handleA.transportService.handshake(connection, timeout); assertNotNull(connectedNode); // the name and version should be updated @@ -121,16 +121,6 @@ public class TransportServiceHandshakeTests extends ESTestCase { assertEquals(connectedNode.getVersion(), handleB.discoveryNode.getVersion()); assertFalse(handleA.transportService.nodeConnected(discoveryNode)); } - - DiscoveryNode connectedNode = - handleA.transportService.connectToNodeAndHandshake(discoveryNode, timeout); - assertNotNull(connectedNode); - - // the name and version should be updated - assertEquals(connectedNode.getName(), "TS_B"); - assertEquals(connectedNode.getVersion(), handleB.discoveryNode.getVersion()); - assertTrue(handleA.transportService.nodeConnected(discoveryNode)); - } public void testMismatchedClusterName() { @@ -145,7 +135,7 @@ public class TransportServiceHandshakeTests extends ESTestCase { Version.CURRENT.minimumCompatibilityVersion()); IllegalStateException ex = expectThrows(IllegalStateException.class, () -> { try (Transport.Connection connection = handleA.transportService.openConnection(discoveryNode, - ConnectionProfile.LIGHT_PROFILE)) { + MockTcpTransport.LIGHT_PROFILE)) { handleA.transportService.handshake(connection, timeout); } }); @@ -166,7 +156,7 @@ public class TransportServiceHandshakeTests extends ESTestCase { Version.CURRENT.minimumCompatibilityVersion()); IllegalStateException ex = expectThrows(IllegalStateException.class, () -> { try (Transport.Connection connection = handleA.transportService.openConnection(discoveryNode, - ConnectionProfile.LIGHT_PROFILE)) { + MockTcpTransport.LIGHT_PROFILE)) { handleA.transportService.handshake(connection, timeout); } }); diff --git a/plugins/discovery-file/src/main/java/org/elasticsearch/discovery/file/FileBasedUnicastHostsProvider.java b/plugins/discovery-file/src/main/java/org/elasticsearch/discovery/file/FileBasedUnicastHostsProvider.java index 55e2029c8b7..196e98d6582 100644 --- a/plugins/discovery-file/src/main/java/org/elasticsearch/discovery/file/FileBasedUnicastHostsProvider.java +++ b/plugins/discovery-file/src/main/java/org/elasticsearch/discovery/file/FileBasedUnicastHostsProvider.java @@ -43,7 +43,7 @@ import java.util.stream.Collectors; import java.util.stream.Stream; import static org.elasticsearch.discovery.zen.UnicastZenPing.DISCOVERY_ZEN_PING_UNICAST_HOSTS_RESOLVE_TIMEOUT; -import static org.elasticsearch.discovery.zen.UnicastZenPing.resolveDiscoveryNodes; +import static org.elasticsearch.discovery.zen.UnicastZenPing.resolveHostsLists; /** * An implementation of {@link UnicastHostsProvider} that reads hosts/ports @@ -97,13 +97,13 @@ class FileBasedUnicastHostsProvider extends AbstractComponent implements Unicast final List discoNodes = new ArrayList<>(); try { - discoNodes.addAll(resolveDiscoveryNodes( + discoNodes.addAll(resolveHostsLists( executorService, logger, hostsList, 1, transportService, - () -> UNICAST_HOST_PREFIX + nodeIdGenerator.incrementAndGet() + "#", + UNICAST_HOST_PREFIX, resolveTimeout)); } catch (InterruptedException e) { throw new RuntimeException(e); diff --git a/plugins/discovery-file/src/test/java/org/elasticsearch/discovery/file/FileBasedUnicastHostsProviderTests.java b/plugins/discovery-file/src/test/java/org/elasticsearch/discovery/file/FileBasedUnicastHostsProviderTests.java index 920792b6c7a..314e195754a 100644 --- a/plugins/discovery-file/src/test/java/org/elasticsearch/discovery/file/FileBasedUnicastHostsProviderTests.java +++ b/plugins/discovery-file/src/test/java/org/elasticsearch/discovery/file/FileBasedUnicastHostsProviderTests.java @@ -33,9 +33,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.MockTcpTransport; import org.elasticsearch.transport.TransportService; import org.junit.After; -import org.junit.AfterClass; import org.junit.Before; -import org.junit.BeforeClass; import java.io.BufferedWriter; import java.io.IOException; @@ -44,7 +42,6 @@ import java.nio.file.Path; import java.util.Arrays; import java.util.Collections; import java.util.List; -import java.util.concurrent.Executor; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; @@ -99,13 +96,13 @@ public class FileBasedUnicastHostsProviderTests extends ESTestCase { assertEquals(hostEntries.size() - 1, nodes.size()); // minus 1 because we are ignoring the first line that's a comment assertEquals("192.168.0.1", nodes.get(0).getAddress().getAddress()); assertEquals(9300, nodes.get(0).getAddress().getPort()); - assertEquals(UNICAST_HOST_PREFIX + "1#", nodes.get(0).getId()); + assertEquals(UNICAST_HOST_PREFIX + "192.168.0.1_0#", nodes.get(0).getId()); assertEquals("192.168.0.2", nodes.get(1).getAddress().getAddress()); assertEquals(9305, nodes.get(1).getAddress().getPort()); - assertEquals(UNICAST_HOST_PREFIX + "2#", nodes.get(1).getId()); + assertEquals(UNICAST_HOST_PREFIX + "192.168.0.2:9305_0#", nodes.get(1).getId()); assertEquals("255.255.23.15", nodes.get(2).getAddress().getAddress()); assertEquals(9300, nodes.get(2).getAddress().getPort()); - assertEquals(UNICAST_HOST_PREFIX + "3#", nodes.get(2).getId()); + assertEquals(UNICAST_HOST_PREFIX + "255.255.23.15_0#", nodes.get(2).getId()); } public void testEmptyUnicastHostsFile() throws Exception { diff --git a/test/framework/src/main/java/org/elasticsearch/test/discovery/MockZenPing.java b/test/framework/src/main/java/org/elasticsearch/test/discovery/MockZenPing.java index fe16f034116..b03e157b01c 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/discovery/MockZenPing.java +++ b/test/framework/src/main/java/org/elasticsearch/test/discovery/MockZenPing.java @@ -28,10 +28,9 @@ import org.elasticsearch.discovery.zen.PingContextProvider; import org.elasticsearch.discovery.zen.ZenPing; import java.util.HashMap; -import java.util.List; import java.util.Map; import java.util.Set; -import java.util.stream.Collectors; +import java.util.function.Consumer; /** * A {@link ZenPing} implementation which returns results based on an static in-memory map. This allows pinging @@ -62,7 +61,7 @@ public final class MockZenPing extends AbstractComponent implements ZenPing { } @Override - public void ping(PingListener listener, TimeValue timeout) { + public void ping(Consumer resultsConsumer, TimeValue timeout) { logger.info("pinging using mock zen ping"); synchronized (activeNodesPerCluster) { Set activeNodes = getActiveNodesForCurrentCluster(); @@ -76,11 +75,12 @@ public final class MockZenPing extends AbstractComponent implements ZenPing { activeNodes = getActiveNodesForCurrentCluster(); } lastDiscoveredPings = activeNodes; - List responseList = activeNodes.stream() + PingCollection pingCollection = new PingCollection(); + activeNodes.stream() .filter(p -> p != this) // remove this as pings are not expected to return the local node .map(MockZenPing::getPingResponse) - .collect(Collectors.toList()); - listener.onPing(responseList); + .forEach(pingCollection::addPing); + resultsConsumer.accept(pingCollection); } } diff --git a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java index 540420f35e7..8852b386ffd 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java @@ -29,9 +29,7 @@ import org.elasticsearch.Version; import org.elasticsearch.action.ActionListenerResponseHandler; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.common.io.stream.InputStreamStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.logging.Loggers; @@ -54,8 +52,6 @@ import org.junit.After; import org.junit.Before; import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; import java.io.UncheckedIOException; import java.net.InetAddress; import java.net.InetSocketAddress; @@ -1358,7 +1354,7 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { // all is well } - try (Transport.Connection connection = serviceB.openConnection(nodeA, ConnectionProfile.LIGHT_PROFILE)){ + try (Transport.Connection connection = serviceB.openConnection(nodeA, MockTcpTransport.LIGHT_PROFILE)){ serviceB.handshake(connection, 100); fail("exception should be thrown"); } catch (IllegalStateException e) { @@ -1416,7 +1412,7 @@ public abstract class AbstractSimpleTransportTestCase extends ESTestCase { // all is well } - try (Transport.Connection connection = serviceB.openConnection(nodeA, ConnectionProfile.LIGHT_PROFILE)){ + try (Transport.Connection connection = serviceB.openConnection(nodeA, MockTcpTransport.LIGHT_PROFILE)){ serviceB.handshake(connection, 100); fail("exception should be thrown"); } catch (IllegalStateException e) { diff --git a/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java b/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java index acda061a704..a2d5f10483f 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java @@ -66,6 +66,23 @@ import java.util.function.Consumer; */ public class MockTcpTransport extends TcpTransport { + /** + * A pre-built light connection profile that shares a single connection across all + * types. + */ + public static final ConnectionProfile LIGHT_PROFILE; + + static { + ConnectionProfile.Builder builder = new ConnectionProfile.Builder(); + builder.addConnections(1, + TransportRequestOptions.Type.BULK, + TransportRequestOptions.Type.PING, + TransportRequestOptions.Type.RECOVERY, + TransportRequestOptions.Type.REG, + TransportRequestOptions.Type.STATE); + LIGHT_PROFILE = builder.build(); + } + private final ExecutorService executor; private final Version mockVersion; @@ -159,7 +176,7 @@ public class MockTcpTransport extends TcpTransport @Override protected NodeChannels connectToChannels(DiscoveryNode node, ConnectionProfile profile) throws IOException { final MockChannel[] mockChannels = new MockChannel[1]; - final NodeChannels nodeChannels = new NodeChannels(node, mockChannels, ConnectionProfile.LIGHT_PROFILE); // we always use light here + final NodeChannels nodeChannels = new NodeChannels(node, mockChannels, LIGHT_PROFILE); // we always use light here boolean success = false; final Socket socket = new Socket(); try { From 3894ec9bae53d37744c2933c9b841982361a675b Mon Sep 17 00:00:00 2001 From: Nic Palmer Date: Wed, 21 Dec 2016 15:48:07 +0000 Subject: [PATCH 15/21] Fixed eu-west-2 entries for discovery-ec2 and repository-s3 also updated the asciidocs --- docs/plugins/discovery-ec2.asciidoc | 1 + docs/plugins/repository-s3.asciidoc | 1 + .../java/org/elasticsearch/cloud/aws/AwsEc2ServiceImpl.java | 2 +- .../java/org/elasticsearch/cloud/aws/InternalAwsS3Service.java | 3 +++ 4 files changed, 6 insertions(+), 1 deletion(-) diff --git a/docs/plugins/discovery-ec2.asciidoc b/docs/plugins/discovery-ec2.asciidoc index 26d4f321899..861a6da575b 100644 --- a/docs/plugins/discovery-ec2.asciidoc +++ b/docs/plugins/discovery-ec2.asciidoc @@ -120,6 +120,7 @@ The available values are: * `ap-northeast` (`ap-northeast-1`) for Asia Pacific (Tokyo) * `ap-northeast-2` (`ap-northeast-2`) for Asia Pacific (Seoul) * `eu-west` (`eu-west-1`) for EU (Ireland) +* `eu-west-2` (`eu-west-2`) for EU (London) * `eu-central` (`eu-central-1`) for EU (Frankfurt) * `sa-east` (`sa-east-1`) for South America (São Paulo) * `cn-north` (`cn-north-1`) for China (Beijing) diff --git a/docs/plugins/repository-s3.asciidoc b/docs/plugins/repository-s3.asciidoc index fcac3105930..351d1daeca8 100644 --- a/docs/plugins/repository-s3.asciidoc +++ b/docs/plugins/repository-s3.asciidoc @@ -125,6 +125,7 @@ The available values are: * `ap-northeast` (`ap-northeast-1`) for Asia Pacific (Tokyo) * `ap-northeast-2` (`ap-northeast-2`) for Asia Pacific (Seoul) * `eu-west` (`eu-west-1`) for EU (Ireland) +* `eu-west-2` (`eu-west-2`) for EU (London) * `eu-central` (`eu-central-1`) for EU (Frankfurt) * `sa-east` (`sa-east-1`) for South America (São Paulo) * `cn-north` (`cn-north-1`) for China (Beijing) diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2ServiceImpl.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2ServiceImpl.java index f8dec51f41a..fb76967914d 100644 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2ServiceImpl.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/cloud/aws/AwsEc2ServiceImpl.java @@ -210,4 +210,4 @@ public class AwsEc2ServiceImpl extends AbstractComponent implements AwsEc2Servic // Ensure that IdleConnectionReaper is shutdown IdleConnectionReaper.shutdown(); } -} +} \ No newline at end of file diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/InternalAwsS3Service.java b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/InternalAwsS3Service.java index 25a8ab403d7..065d571dc35 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/InternalAwsS3Service.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/cloud/aws/InternalAwsS3Service.java @@ -202,6 +202,9 @@ public class InternalAwsS3Service extends AbstractLifecycleComponent implements case "eu-west-1": endpoint = "s3-eu-west-1.amazonaws.com"; break; + case "eu-west-2": + endpoint = "s3-eu-west-2.amazonaws.com"; + break; case "eu-central": case "eu-central-1": endpoint = "s3.eu-central-1.amazonaws.com"; From 3b3f9216dbeb03c81e72d11dfdd3361a5d98c8e6 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Wed, 21 Dec 2016 16:48:53 +0100 Subject: [PATCH 16/21] Allow terms aggregations on pure boolean scripts. (#22201) The way aggregations on scripts work is by hiding scripts behind the same API that we use for regular fields. However, there is no native support for boolean fields, those need to be exposed as integers, with `0` standing for `false` and `1` for true. Relates #20941 --- .../support/values/ScriptLongValues.java | 26 +++++++++++++------ .../support/ScriptValuesTests.java | 21 +++++++++++++++ 2 files changed, 39 insertions(+), 8 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/support/values/ScriptLongValues.java b/core/src/main/java/org/elasticsearch/search/aggregations/support/values/ScriptLongValues.java index 06bf998d8da..503cec39b8d 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/support/values/ScriptLongValues.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/support/values/ScriptLongValues.java @@ -50,15 +50,10 @@ public class ScriptLongValues extends SortingNumericDocValues implements ScorerA resize(0); } - else if (value instanceof Number) { - resize(1); - values[0] = ((Number) value).longValue(); - } - else if (value.getClass().isArray()) { resize(Array.getLength(value)); for (int i = 0; i < count(); ++i) { - values[i] = ((Number) Array.get(value, i)).longValue(); + values[i] = toLongValue(Array.get(value, i)); } } @@ -66,18 +61,33 @@ public class ScriptLongValues extends SortingNumericDocValues implements ScorerA resize(((Collection) value).size()); int i = 0; for (Iterator it = ((Collection) value).iterator(); it.hasNext(); ++i) { - values[i] = ((Number) it.next()).longValue(); + values[i] = toLongValue(it.next()); } assert i == count(); } else { - throw new AggregationExecutionException("Unsupported script value [" + value + "]"); + resize(1); + values[0] = toLongValue(value); } sort(); } + private static long toLongValue(Object o) { + if (o instanceof Number) { + return ((Number) o).longValue(); + } else if (o instanceof Boolean) { + // We do expose boolean fields as boolean in scripts, however aggregations still expect + // that scripts return the same internal representation as regular fields, so boolean + // values in scripts need to be converted to a number, and the value formatter will + // make sure of using true/false in the key_as_string field + return ((Boolean) o).booleanValue() ? 1L : 0L; + } else { + throw new AggregationExecutionException("Unsupported script value [" + o + "], expected a number"); + } + } + @Override public void setScorer(Scorer scorer) { script.setScorer(scorer); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/support/ScriptValuesTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/support/ScriptValuesTests.java index dcf6cfdb0ba..f338e961312 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/support/ScriptValuesTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/support/ScriptValuesTests.java @@ -108,6 +108,27 @@ public class ScriptValuesTests extends ESTestCase { } } + public void testBooleans() { + final Object[][] values = new Boolean[randomInt(10)][]; + for (int i = 0; i < values.length; ++i) { + Boolean[] booleans = new Boolean[randomInt(8)]; + for (int j = 0; j < booleans.length; ++j) { + booleans[j] = randomBoolean(); + } + Arrays.sort(booleans); + values[i] = booleans; + } + FakeSearchScript script = new FakeSearchScript(values); + ScriptLongValues scriptValues = new ScriptLongValues(script); + for (int i = 0; i < values.length; ++i) { + scriptValues.setDocument(i); + assertEquals(values[i].length, scriptValues.count()); + for (int j = 0; j < values[i].length; ++j) { + assertEquals(values[i][j], scriptValues.valueAt(j) == 1L); + } + } + } + public void testDoubles() { final Object[][] values = new Double[randomInt(10)][]; for (int i = 0; i < values.length; ++i) { From 31bf279ec7bdc96eced2b2509a75285ecd586314 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Wed, 21 Dec 2016 10:49:33 -0500 Subject: [PATCH 17/21] Tell Netty not to be unsafe in transport client Today we ship with default jvm.options for server Elasticsearch that prevents Netty from using some unsafe optimizations. Yet, the settings do nothing for the transport client since it is embedded in other applications that will not read and use those settings. This commit adds these settings for the transport client, and is done so in a way that still enables users to go unsafe if they want to go unsafe (they shouldn't, but the option is there). Relates #22284 --- .../client/PreBuiltTransportClient.java | 27 +++++++++++++++++++ .../client/PreBuiltTransportClientTests.java | 2 +- 2 files changed, 28 insertions(+), 1 deletion(-) diff --git a/client/transport/src/main/java/org/elasticsearch/transport/client/PreBuiltTransportClient.java b/client/transport/src/main/java/org/elasticsearch/transport/client/PreBuiltTransportClient.java index ca90723ae82..a173d43a96c 100644 --- a/client/transport/src/main/java/org/elasticsearch/transport/client/PreBuiltTransportClient.java +++ b/client/transport/src/main/java/org/elasticsearch/transport/client/PreBuiltTransportClient.java @@ -22,6 +22,7 @@ package org.elasticsearch.transport.client; import io.netty.util.ThreadDeathWatcher; import io.netty.util.concurrent.GlobalEventExecutor; import org.elasticsearch.client.transport.TransportClient; +import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.reindex.ReindexPlugin; @@ -47,6 +48,32 @@ import java.util.concurrent.TimeUnit; @SuppressWarnings({"unchecked","varargs"}) public class PreBuiltTransportClient extends TransportClient { + static { + // initialize Netty system properties before triggering any Netty class loads + initializeNetty(); + } + + /** + * Netty wants to do some unsafe things like use unsafe and replace a private field. This method disables these things by default, but + * can be overridden by setting the corresponding system properties. + */ + @SuppressForbidden(reason = "set system properties to configure Netty") + private static void initializeNetty() { + final String noUnsafeKey = "io.netty.noUnsafe"; + final String noUnsafe = System.getProperty(noUnsafeKey); + if (noUnsafe == null) { + // disable Netty from using unsafe + System.setProperty(noUnsafeKey, Boolean.toString(true)); + } + + final String noKeySetOptimizationKey = "io.netty.noKeySetOptimization"; + final String noKeySetOptimization = System.getProperty(noKeySetOptimizationKey); + if (noKeySetOptimization == null) { + // disable Netty from replacing the selector key set + System.setProperty(noKeySetOptimizationKey, Boolean.toString(true)); + } + } + private static final Collection> PRE_INSTALLED_PLUGINS = Collections.unmodifiableList( Arrays.asList( diff --git a/client/transport/src/test/java/org/elasticsearch/transport/client/PreBuiltTransportClientTests.java b/client/transport/src/test/java/org/elasticsearch/transport/client/PreBuiltTransportClientTests.java index a1d95b68af7..dd628fad31f 100644 --- a/client/transport/src/test/java/org/elasticsearch/transport/client/PreBuiltTransportClientTests.java +++ b/client/transport/src/test/java/org/elasticsearch/transport/client/PreBuiltTransportClientTests.java @@ -41,7 +41,7 @@ public class PreBuiltTransportClientTests extends RandomizedTest { @Test public void testPluginInstalled() { - // TODO: remove when Netty 4.1.5 is upgraded to Netty 4.1.6 including https://github.com/netty/netty/pull/5778 + // TODO: remove when Netty 4.1.6 is upgraded to Netty 4.1.7 including https://github.com/netty/netty/pull/6068 assumeFalse(Constants.JRE_IS_MINIMUM_JAVA9); try (TransportClient client = new PreBuiltTransportClient(Settings.EMPTY)) { Settings settings = client.settings(); From 84edf36f11da2d9c416a14ae2420c56a32823a38 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Wed, 21 Dec 2016 16:51:45 +0100 Subject: [PATCH 18/21] Make `-0` compare less than `+0` consistently. (#22173) Our `float`/`double` fields generally assume that `-0` compares less than `+0`, except when bounds are exclusive: an exclusive lower bound on `-0` excludes `+0` and an exclusive upper bound on `+0` excludes `-0`. Closes #22167 --- .../index/mapper/NumberFieldMapper.java | 86 +++++++++++++++++-- .../index/mapper/NumberFieldTypeTests.java | 16 ++++ docs/reference/mapping/types/numeric.asciidoc | 6 ++ 3 files changed, 101 insertions(+), 7 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java index 9e615e10169..33a6d481ae4 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java @@ -186,6 +186,30 @@ public class NumberFieldMapper extends FieldMapper { return HalfFloatPoint.newSetQuery(field, v); } + private float nextDown(float f) { + // HalfFloatPoint.nextDown considers that -0 is the same as +0 + // while point ranges are consistent with Float.compare, so + // they consider that -0 < +0, so we explicitly make sure + // that nextDown(+0) returns -0 + if (Float.floatToIntBits(f) == Float.floatToIntBits(0f)) { + return -0f; + } else { + return HalfFloatPoint.nextDown(f); + } + } + + private float nextUp(float f) { + // HalfFloatPoint.nextUp considers that -0 is the same as +0 + // while point ranges are consistent with Float.compare, so + // they consider that -0 < +0, so we explicitly make sure + // that nextUp(-0) returns +0 + if (Float.floatToIntBits(f) == Float.floatToIntBits(-0f)) { + return +0f; + } else { + return HalfFloatPoint.nextUp(f); + } + } + @Override Query rangeQuery(String field, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) { @@ -194,16 +218,16 @@ public class NumberFieldMapper extends FieldMapper { if (lowerTerm != null) { l = parse(lowerTerm); if (includeLower) { - l = Math.nextDown(l); + l = nextDown(l); } l = HalfFloatPoint.nextUp(l); } if (upperTerm != null) { u = parse(upperTerm); if (includeUpper) { - u = Math.nextUp(u); + u = nextUp(u); } - u = HalfFloatPoint.nextDown(u); + u = nextDown(u); } return HalfFloatPoint.newRangeQuery(field, l, u); } @@ -276,6 +300,30 @@ public class NumberFieldMapper extends FieldMapper { return FloatPoint.newSetQuery(field, v); } + private float nextDown(float f) { + // Math.nextDown considers that -0 is the same as +0 + // while point ranges are consistent with Float.compare, so + // they consider that -0 < +0, so we explicitly make sure + // that nextDown(+0) returns -0 + if (Float.floatToIntBits(f) == Float.floatToIntBits(0f)) { + return -0f; + } else { + return Math.nextDown(f); + } + } + + private float nextUp(float f) { + // Math.nextUp considers that -0 is the same as +0 + // while point ranges are consistent with Float.compare, so + // they consider that -0 < +0, so we explicitly make sure + // that nextUp(-0) returns +0 + if (Float.floatToIntBits(f) == Float.floatToIntBits(-0f)) { + return +0f; + } else { + return Math.nextUp(f); + } + } + @Override Query rangeQuery(String field, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) { @@ -284,13 +332,13 @@ public class NumberFieldMapper extends FieldMapper { if (lowerTerm != null) { l = parse(lowerTerm); if (includeLower == false) { - l = Math.nextUp(l); + l = nextUp(l); } } if (upperTerm != null) { u = parse(upperTerm); if (includeUpper == false) { - u = Math.nextDown(u); + u = nextDown(u); } } return FloatPoint.newRangeQuery(field, l, u); @@ -364,6 +412,30 @@ public class NumberFieldMapper extends FieldMapper { return DoublePoint.newSetQuery(field, v); } + private double nextDown(double d) { + // Math.nextDown considers that -0 is the same as +0 + // while point ranges are consistent with Double.compare, so + // they consider that -0 < +0, so we explicitly make sure + // that nextDown(+0) returns -0 + if (Double.doubleToLongBits(d) == Double.doubleToLongBits(0d)) { + return -0d; + } else { + return Math.nextDown(d); + } + } + + private double nextUp(double d) { + // Math.nextUp considers that -0 is the same as +0 + // while point ranges are consistent with Double.compare, so + // they consider that -0 < +0, so we explicitly make sure + // that nextUp(-0) returns +0 + if (Double.doubleToLongBits(d) == Double.doubleToLongBits(-0d)) { + return +0d; + } else { + return Math.nextUp(d); + } + } + @Override Query rangeQuery(String field, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper) { @@ -372,13 +444,13 @@ public class NumberFieldMapper extends FieldMapper { if (lowerTerm != null) { l = parse(lowerTerm); if (includeLower == false) { - l = Math.nextUp(l); + l = nextUp(l); } } if (upperTerm != null) { u = parse(upperTerm); if (includeUpper == false) { - u = Math.nextDown(u); + u = nextDown(u); } } return DoublePoint.newRangeQuery(field, l, u); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/NumberFieldTypeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/NumberFieldTypeTests.java index d7e178404f1..19f0936de54 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/NumberFieldTypeTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/NumberFieldTypeTests.java @@ -149,4 +149,20 @@ public class NumberFieldTypeTests extends FieldTypeTestCase { } IOUtils.close(reader, dir); } + + public void testNegativeZero() { + assertEquals( + NumberType.DOUBLE.rangeQuery("field", null, -0d, true, true), + NumberType.DOUBLE.rangeQuery("field", null, +0d, true, false)); + assertEquals( + NumberType.FLOAT.rangeQuery("field", null, -0f, true, true), + NumberType.FLOAT.rangeQuery("field", null, +0f, true, false)); + assertEquals( + NumberType.HALF_FLOAT.rangeQuery("field", null, -0f, true, true), + NumberType.HALF_FLOAT.rangeQuery("field", null, +0f, true, false)); + + assertFalse(NumberType.DOUBLE.termQuery("field", -0d).equals(NumberType.DOUBLE.termQuery("field", +0d))); + assertFalse(NumberType.FLOAT.termQuery("field", -0f).equals(NumberType.FLOAT.termQuery("field", +0f))); + assertFalse(NumberType.HALF_FLOAT.termQuery("field", -0f).equals(NumberType.HALF_FLOAT.termQuery("field", +0f))); + } } diff --git a/docs/reference/mapping/types/numeric.asciidoc b/docs/reference/mapping/types/numeric.asciidoc index 6fdc0c806a4..48a74cd1760 100644 --- a/docs/reference/mapping/types/numeric.asciidoc +++ b/docs/reference/mapping/types/numeric.asciidoc @@ -39,6 +39,12 @@ PUT my_index -------------------------------------------------- // CONSOLE +NOTE: The `double`, `float` and `half_float` types consider that `-0.0` and +`+0.0` are different values. As a consequence, doing a `term` query on +`-0.0` will not match `+0.0` and vice-versa. Same is true for range queries: +if the upper bound is `-0.0` then `+0.0` will not match, and if the lower +bound is `+0.0` then `-0.0` will not match. + ==== Which type should I use? As far as integer types (`byte`, `short`, `integer` and `long`) are concerned, From 5c8232c03b8c7c0edbea74d8139f441083a6d439 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Wed, 21 Dec 2016 16:56:55 +0100 Subject: [PATCH 19/21] Restore deprecation warning for invalid match_mapping_type values (#22304) The deprecation warning gives now the same message as 5.x. The deprecation warning was previously removed, but given that we are still lenient with old indices we should still output the warning. --- .../java/org/elasticsearch/index/mapper/DynamicTemplate.java | 4 +++- .../org/elasticsearch/index/mapper/DynamicTemplateTests.java | 5 +++-- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/mapper/DynamicTemplate.java b/core/src/main/java/org/elasticsearch/index/mapper/DynamicTemplate.java index dcb991095b2..c31306c7fa3 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/DynamicTemplate.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/DynamicTemplate.java @@ -152,7 +152,7 @@ public class DynamicTemplate implements ToXContent { return v; } } - throw new IllegalArgumentException("No xcontent type matched on [" + value + "], possible values are " + throw new IllegalArgumentException("No field type matched on [" + value + "], possible values are " + Arrays.toString(values())); } @@ -208,6 +208,8 @@ public class DynamicTemplate implements ToXContent { if (indexVersionCreated.onOrAfter(Version.V_6_0_0_alpha1_UNRELEASED)) { throw e; } else { + DEPRECATION_LOGGER.deprecated("match_mapping_type [" + matchMappingType + "] is invalid and will be ignored: " + + e.getMessage()); // this template is on an unknown type so it will never match anything // null indicates that the template should be ignored return null; diff --git a/core/src/test/java/org/elasticsearch/index/mapper/DynamicTemplateTests.java b/core/src/test/java/org/elasticsearch/index/mapper/DynamicTemplateTests.java index 24023281f5e..42dd8c3bf37 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/DynamicTemplateTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/DynamicTemplateTests.java @@ -50,14 +50,15 @@ public class DynamicTemplateTests extends ESTestCase { templateDef.put("mapping", Collections.singletonMap("store", true)); // if a wrong match type is specified, we ignore the template assertNull(DynamicTemplate.parse("my_template", templateDef, Version.V_5_0_0_alpha5)); - + assertWarnings("match_mapping_type [short] is invalid and will be ignored: No field type matched on [short], " + + "possible values are [object, string, long, double, boolean, date, binary]"); Map templateDef2 = new HashMap<>(); templateDef2.put("match_mapping_type", "text"); templateDef2.put("mapping", Collections.singletonMap("store", true)); // if a wrong match type is specified, we ignore the template IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> DynamicTemplate.parse("my_template", templateDef2, Version.V_6_0_0_alpha1_UNRELEASED)); - assertEquals("No xcontent type matched on [text], possible values are [object, string, long, double, boolean, date, binary]", + assertEquals("No field type matched on [text], possible values are [object, string, long, double, boolean, date, binary]", e.getMessage()); } From dec6fc2d405d3efc5729e1bb7885fce10a62a477 Mon Sep 17 00:00:00 2001 From: Francesc Gil Date: Wed, 21 Dec 2016 17:31:21 +0100 Subject: [PATCH 20/21] Repeated language analyzers (#22240) * Repeated language analyzers The `catalan` analyzer was repeated on the supported list :) * Reordered the languages to have alphabetic order * Added space for format * Reordered the languages and removed repeated --- docs/reference/analysis/analyzers/lang-analyzer.asciidoc | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/reference/analysis/analyzers/lang-analyzer.asciidoc b/docs/reference/analysis/analyzers/lang-analyzer.asciidoc index 983a719e4f4..02843789c4b 100644 --- a/docs/reference/analysis/analyzers/lang-analyzer.asciidoc +++ b/docs/reference/analysis/analyzers/lang-analyzer.asciidoc @@ -55,9 +55,9 @@ functionality is implemented by adding the with the `keywords` set to the value of the `stem_exclusion` parameter. The following analyzers support setting custom `stem_exclusion` list: -`arabic`, `armenian`, `basque`, `catalan`, `bulgarian`, `catalan`, -`czech`, `finnish`, `dutch`, `english`, `finnish`, `french`, `galician`, -`german`, `irish`, `hindi`, `hungarian`, `indonesian`, `italian`, `latvian`, +`arabic`, `armenian`, `basque`, `bulgarian`, `catalan`, `czech`, +`dutch`, `english`, `finnish`, `french`, `galician`, +`german`, `hindi`, `hungarian`, `indonesian`, `irish`, `italian`, `latvian`, `lithuanian`, `norwegian`, `portuguese`, `romanian`, `russian`, `sorani`, `spanish`, `swedish`, `turkish`. From 8aca504c863d2351258e33e86b2c18e9365bbf99 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Wed, 21 Dec 2016 13:39:28 -0500 Subject: [PATCH 21/21] Clear static variable after suite This was causing test failures: https://elasticsearch-ci.elastic.co/job/elastic+elasticsearch+master+java9-periodic/1101/console https://elasticsearch-ci.elastic.co/job/elastic+elasticsearch+master+dockeralpine-periodic/513/consoleFull --- .../elasticsearch/index/query/QueryParseContextTests.java | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/core/src/test/java/org/elasticsearch/index/query/QueryParseContextTests.java b/core/src/test/java/org/elasticsearch/index/query/QueryParseContextTests.java index 1e00176f558..c509468a1a7 100644 --- a/core/src/test/java/org/elasticsearch/index/query/QueryParseContextTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/QueryParseContextTests.java @@ -30,6 +30,7 @@ import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.search.SearchModule; import org.elasticsearch.test.ESTestCase; import org.junit.After; +import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; @@ -45,6 +46,11 @@ public class QueryParseContextTests extends ESTestCase { xContentRegistry = new NamedXContentRegistry(new SearchModule(Settings.EMPTY, false, emptyList()).getNamedXContents()); } + @AfterClass + public static void cleanup() { + xContentRegistry = null; + } + private ThreadContext threadContext; @Before