From 4632661bc71bb22fc577df476e70e9dfabaaae66 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Tue, 18 Apr 2017 15:17:21 +0200 Subject: [PATCH 01/19] Upgrade to a Lucene 7 snapshot (#24089) We want to upgrade to Lucene 7 ahead of time in order to be able to check whether it causes any trouble to Elasticsearch before Lucene 7.0 gets released. From a user perspective, the main benefit of this upgrade is the enhanced support for sparse fields, whose resource consumption is now function of the number of docs that have a value rather than the total number of docs in the index. Some notes about the change: - it includes the deprecation of the `disable_coord` parameter of the `bool` and `common_terms` queries: Lucene has removed support for coord factors - it includes the deprecation of the `index.similarity.base` expert setting, since it was only useful to configure coords and query norms, which have both been removed - two tests have been marked with `@AwaitsFix` because of #23966, which we intend to address after the merge --- .../resources/forbidden/es-all-signatures.txt | 6 - .../forbidden/es-core-signatures.txt | 10 - buildSrc/version.properties | 2 +- .../lucene-analyzers-common-6.5.0.jar.sha1 | 1 - ...ers-common-7.0.0-snapshot-89f6d17.jar.sha1 | 1 + .../lucene-backward-codecs-6.5.0.jar.sha1 | 1 - ...ard-codecs-7.0.0-snapshot-89f6d17.jar.sha1 | 1 + core/licenses/lucene-core-6.5.0.jar.sha1 | 1 - ...ucene-core-7.0.0-snapshot-89f6d17.jar.sha1 | 1 + core/licenses/lucene-grouping-6.5.0.jar.sha1 | 1 - ...e-grouping-7.0.0-snapshot-89f6d17.jar.sha1 | 1 + .../lucene-highlighter-6.5.0.jar.sha1 | 1 - ...ighlighter-7.0.0-snapshot-89f6d17.jar.sha1 | 1 + core/licenses/lucene-join-6.5.0.jar.sha1 | 1 - ...ucene-join-7.0.0-snapshot-89f6d17.jar.sha1 | 1 + core/licenses/lucene-memory-6.5.0.jar.sha1 | 1 - ...ene-memory-7.0.0-snapshot-89f6d17.jar.sha1 | 1 + core/licenses/lucene-misc-6.5.0.jar.sha1 | 1 - ...ucene-misc-7.0.0-snapshot-89f6d17.jar.sha1 | 1 + core/licenses/lucene-queries-6.5.0.jar.sha1 | 1 - ...ne-queries-7.0.0-snapshot-89f6d17.jar.sha1 | 1 + .../lucene-queryparser-6.5.0.jar.sha1 | 1 - ...ueryparser-7.0.0-snapshot-89f6d17.jar.sha1 | 1 + core/licenses/lucene-sandbox-6.5.0.jar.sha1 | 1 - ...ne-sandbox-7.0.0-snapshot-89f6d17.jar.sha1 | 1 + core/licenses/lucene-spatial-6.5.0.jar.sha1 | 1 - ...ne-spatial-7.0.0-snapshot-89f6d17.jar.sha1 | 1 + .../lucene-spatial-extras-6.5.0.jar.sha1 | 1 - ...ial-extras-7.0.0-snapshot-89f6d17.jar.sha1 | 1 + core/licenses/lucene-spatial3d-6.5.0.jar.sha1 | 1 - ...-spatial3d-7.0.0-snapshot-89f6d17.jar.sha1 | 1 + core/licenses/lucene-suggest-6.5.0.jar.sha1 | 1 - ...ne-suggest-7.0.0-snapshot-89f6d17.jar.sha1 | 1 + .../lucene/queries/BlendedTermQuery.java | 13 +- .../queries/ExtendedCommonTermsQuery.java | 4 +- .../apache/lucene/queries/MinDocQuery.java | 4 +- .../classic/MapperQueryParser.java | 36 +- .../grouping/CollapsingDocValuesSource.java | 151 ++- .../grouping/CollapsingTopDocsCollector.java | 67 +- .../main/java/org/elasticsearch/Version.java | 2 +- .../common/geo/GeoHashUtils.java | 54 +- .../elasticsearch/common/geo/GeoPoint.java | 7 +- .../elasticsearch/common/geo/GeoUtils.java | 40 +- .../elasticsearch/common/lucene/Lucene.java | 8 +- .../common/lucene/ShardCoreKeyMap.java | 26 +- .../common/lucene/all/AllTermQuery.java | 16 +- .../index/ElasticsearchDirectoryReader.java | 21 +- .../lucene/index/ElasticsearchLeafReader.java | 9 +- .../common/lucene/search/Queries.java | 7 +- .../function/FieldValueFactorFunction.java | 20 +- .../function/FiltersFunctionScoreQuery.java | 18 +- .../search/function/FunctionScoreQuery.java | 16 +- .../search/function/LeafScoreFunction.java | 2 +- .../search/function/RandomScoreFunction.java | 11 +- .../search/function/WeightFactorFunction.java | 2 +- .../uid/PerThreadIDVersionAndSeqNoLookup.java | 52 +- .../lucene/uid/VersionsAndSeqNoResolver.java | 13 +- .../index/cache/bitset/BitsetFilterCache.java | 22 +- .../index/codec/CodecService.java | 6 +- .../PerFieldMappingPostingFormatCodec.java | 4 +- .../fielddata/AbstractBinaryDocValues.java | 55 ++ .../fielddata/AbstractNumericDocValues.java | 50 + ...Ords.java => AbstractSortedDocValues.java} | 34 +- .../AbstractSortedNumericDocValues.java | 55 ++ .../fielddata/AbstractSortedSetDocValues.java | 56 ++ .../AbstractSortingNumericDocValues.java | 54 ++ .../fielddata/AtomicOrdinalsFieldData.java | 4 +- .../index/fielddata/FieldData.java | 420 ++++---- .../index/fielddata/GeoPointValues.java | 16 +- .../index/fielddata/MultiGeoPointValues.java | 22 +- .../index/fielddata/NumericDoubleValues.java | 53 +- .../index/fielddata/ScriptDocValues.java | 324 +++++-- .../SingletonMultiGeoPointValues.java | 34 +- .../SingletonSortedBinaryDocValues.java | 32 +- .../SingletonSortedNumericDoubleValues.java | 60 +- .../SortableLongBitsNumericDocValues.java | 20 +- ...ortableLongBitsSortedNumericDocValues.java | 16 +- ...SortableLongBitsToNumericDoubleValues.java | 11 +- ...leLongBitsToSortedNumericDoubleValues.java | 14 +- .../fielddata/SortedBinaryDocValues.java | 25 +- .../fielddata/SortedNumericDoubleValues.java | 32 +- .../fielddata/SortingBinaryDocValues.java | 9 +- .../fielddata/SortingNumericDocValues.java | 11 +- .../fielddata/SortingNumericDoubleValues.java | 13 +- .../BytesRefFieldComparatorSource.java | 49 +- .../ordinals/GlobalOrdinalMapping.java | 55 +- .../ordinals/GlobalOrdinalsBuilder.java | 10 +- .../InternalGlobalOrdinalsIndexFieldData.java | 12 +- .../fielddata/ordinals/MultiOrdinals.java | 65 +- .../index/fielddata/ordinals/Ordinals.java | 6 +- .../fielddata/ordinals/OrdinalsBuilder.java | 59 -- .../ordinals/SinglePackedOrdinals.java | 30 +- .../AbstractAtomicGeoPointFieldData.java | 2 +- .../AbstractAtomicOrdinalsFieldData.java | 12 +- .../AbstractAtomicParentChildFieldData.java | 30 +- .../AbstractGeoPointDVIndexFieldData.java | 83 -- .../plain/AbstractIndexGeoPointFieldData.java | 117 --- .../AbstractLatLonPointDVIndexFieldData.java | 18 +- .../plain/AtomicDoubleFieldData.java | 2 +- .../plain/BinaryDVAtomicFieldData.java | 4 +- .../plain/BytesBinaryDVAtomicFieldData.java | 45 +- .../plain/DocValuesIndexFieldData.java | 6 +- .../plain/GeoPointDVAtomicFieldData.java | 90 -- .../fielddata/plain/IndexIndexFieldData.java | 25 +- .../plain/LatLonPointDVAtomicFieldData.java | 63 +- .../plain/PagedBytesAtomicFieldData.java | 4 +- .../plain/ParentChildIndexFieldData.java | 32 +- .../plain/SortedNumericDVIndexFieldData.java | 62 +- .../SortedSetDVBytesAtomicFieldData.java | 9 +- .../SortedSetDVOrdinalsIndexFieldData.java | 8 +- .../index/mapper/CustomDocValuesField.java | 5 - .../index/mapper/DocumentMapper.java | 2 +- .../index/mapper/FieldMapper.java | 14 - .../index/mapper/GeoShapeFieldMapper.java | 5 - .../index/mapper/IpFieldMapper.java | 37 +- .../index/mapper/ScaledFloatFieldMapper.java | 24 +- .../index/query/BoolQueryBuilder.java | 40 +- .../index/query/CommonTermsQueryBuilder.java | 43 +- .../index/query/ScriptQueryBuilder.java | 28 +- .../index/query/SimpleQueryParser.java | 6 +- .../functionscore/DecayFunctionBuilder.java | 57 +- .../search/ESToParentBlockJoinQuery.java | 4 +- .../index/search/MatchQuery.java | 2 +- .../index/search/MultiMatchQuery.java | 5 +- .../index/shard/FilterDocValuesProducer.java | 157 --- .../index/shard/IndexSearcherWrapper.java | 12 +- .../index/similarity/SimilarityService.java | 16 +- .../indices/IndicesQueryCache.java | 10 - .../indices/IndicesRequestCache.java | 7 +- .../cache/IndicesFieldDataCache.java | 33 +- .../elasticsearch/search/MultiValueMode.java | 917 +++++++++--------- .../children/ParentToChildrenAggregator.java | 47 +- .../geogrid/GeoGridAggregationBuilder.java | 23 +- .../bucket/geogrid/GeoHashGridAggregator.java | 27 +- .../histogram/DateHistogramAggregator.java | 35 +- .../bucket/histogram/HistogramAggregator.java | 35 +- .../bucket/range/BinaryRangeAggregator.java | 24 +- .../bucket/range/RangeAggregator.java | 11 +- ...DiversifiedBytesHashSamplerAggregator.java | 38 +- .../DiversifiedMapSamplerAggregator.java | 46 +- .../DiversifiedNumericSamplerAggregator.java | 32 +- .../DiversifiedOrdinalsSamplerAggregator.java | 59 +- ...balOrdinalsSignificantTermsAggregator.java | 22 +- .../GlobalOrdinalsStringTermsAggregator.java | 119 +-- .../bucket/terms/LongTermsAggregator.java | 44 +- .../bucket/terms/StringTermsAggregator.java | 53 +- .../bucket/terms/support/IncludeExclude.java | 13 +- .../metrics/avg/AvgAggregator.java | 15 +- .../cardinality/CardinalityAggregator.java | 72 +- .../geobounds/GeoBoundsAggregator.java | 67 +- .../geocentroid/GeoCentroidAggregator.java | 23 +- .../geocentroid/InternalGeoCentroid.java | 18 +- .../metrics/max/MaxAggregator.java | 10 +- .../metrics/min/MinAggregator.java | 10 +- .../hdr/AbstractHDRPercentilesAggregator.java | 9 +- .../AbstractTDigestPercentilesAggregator.java | 9 +- .../metrics/stats/StatsAggregator.java | 29 +- .../extended/ExtendedStatsAggregator.java | 35 +- .../metrics/sum/SumAggregator.java | 13 +- .../valuecount/ValueCountAggregator.java | 5 +- .../aggregations/support/MissingValues.java | 207 ++-- .../aggregations/support/ValuesSource.java | 153 ++- .../support/values/ScriptBytesValues.java | 15 +- .../support/values/ScriptDoubleValues.java | 26 +- .../support/values/ScriptLongValues.java | 30 +- .../search/fetch/FetchPhase.java | 44 +- .../search/fetch/FetchSubPhase.java | 5 +- .../subphase/DocValueFieldsFetchSubPhase.java | 3 +- .../fetch/subphase/InnerHitsContext.java | 9 +- .../subphase/ParentFieldSubFetchPhase.java | 4 +- .../fetch/subphase/VersionFetchSubPhase.java | 4 +- .../search/internal/ContextIndexSearcher.java | 6 +- .../search/lookup/LeafDocLookup.java | 8 +- .../search/profile/query/ProfileWeight.java | 10 - .../search/slice/DocValuesSliceQuery.java | 40 +- .../search/slice/TermsSliceQuery.java | 4 +- .../search/sort/ScriptSortBuilder.java | 23 +- .../completion/CompletionSuggester.java | 4 +- .../elasticsearch/bootstrap/security.policy | 4 +- .../bootstrap/test-framework.policy | 2 +- .../lucene/queries/BlendedTermQueryTests.java | 12 +- .../common/lucene/LuceneTests.java | 3 +- .../common/lucene/ShardCoreKeyMapTests.java | 4 +- .../lucene/index/ESDirectoryReaderTests.java | 3 +- .../deps/lucene/SimpleLuceneTests.java | 116 +-- .../elasticsearch/index/IndexModuleTests.java | 4 +- .../elasticsearch/index/codec/CodecTests.java | 10 +- .../index/engine/InternalEngineTests.java | 5 +- .../AbstractFieldDataImplTestCase.java | 48 +- .../AbstractGeoFieldDataTestCase.java | 21 +- .../AbstractStringFieldDataTestCase.java | 60 +- .../fielddata/BinaryDVFieldDataTests.java | 26 +- .../index/fielddata/FieldDataTests.java | 83 +- .../index/fielddata/FilterFieldDataTests.java | 12 +- .../fielddata/ParentChildFieldDataTests.java | 57 +- .../fielddata/ScriptDocValuesDatesTests.java | 18 +- .../ScriptDocValuesGeoPointsTests.java | 17 +- .../fielddata/ScriptDocValuesLongsTests.java | 20 +- .../fieldcomparator/ReplaceMissingTests.java | 49 +- .../ordinals/MultiOrdinalsTests.java | 48 +- .../ordinals/SingleOrdinalsTests.java | 7 +- .../plain/HalfFloatFielddataTests.java | 14 +- .../index/mapper/BooleanFieldMapperTests.java | 6 +- .../index/mapper/FakeStringFieldMapper.java | 5 - .../mapper/ScaledFloatFieldTypeTests.java | 14 +- .../index/query/BoolQueryBuilderTests.java | 8 +- .../query/CommonTermsQueryBuilderTests.java | 8 +- .../query/MultiMatchQueryBuilderTests.java | 2 - .../index/query/NestedQueryBuilderTests.java | 1 - .../query/QueryStringQueryBuilderTests.java | 3 - .../index/query/SimpleQueryParserTests.java | 1 - .../query/SimpleQueryStringBuilderTests.java | 16 +- .../functionscore/FunctionScoreTests.java | 20 +- .../query/plugin/DummyQueryParserPlugin.java | 4 +- .../index/search/MultiMatchQueryTests.java | 10 +- .../shard/IndexSearcherWrapperTests.java | 97 +- .../elasticsearch/index/store/StoreTests.java | 2 +- .../indices/IndicesQueryCacheTests.java | 4 +- .../RandomExceptionCircuitBreakerIT.java | 6 +- .../search/MultiValueModeTests.java | 423 ++++---- .../aggregations/AggregatorTestCase.java | 2 +- .../range/BinaryRangeAggregatorTests.java | 19 +- .../support/IncludeExcludeTests.java | 16 +- .../support/MissingValuesTests.java | 166 ++-- .../support/ScriptValuesTests.java | 49 +- .../support/ValuesSourceConfigTests.java | 63 +- .../basic/SearchWithRandomExceptionsIT.java | 5 + .../highlight/PlainHighlighterTests.java | 53 - .../search/functionscore/QueryRescorerIT.java | 4 +- .../search/query/SimpleQueryStringIT.java | 1 + .../metrics/geocentroid-aggregation.asciidoc | 14 +- .../index-modules/similarity.asciidoc | 12 +- .../migration/migrate_6_0/search.asciidoc | 3 + .../migration/migrate_6_0/settings.asciidoc | 8 +- docs/reference/query-dsl/bool-query.asciidoc | 5 - .../query-dsl/common-terms-query.asciidoc | 4 +- .../matrix/stats/MatrixStatsAggregator.java | 14 +- .../lucene-expressions-6.5.0.jar.sha1 | 1 - ...xpressions-7.0.0-snapshot-89f6d17.jar.sha1 | 1 + .../expression/CountMethodValueSource.java | 13 +- .../expression/DateMethodValueSource.java | 16 +- .../expression/DateObjectValueSource.java | 16 +- .../expression/EmptyMemberValueSource.java | 17 +- .../expression/FieldDataValueSource.java | 8 +- .../expression/GeoEmptyValueSource.java | 5 +- .../expression/GeoLatitudeValueSource.java | 9 +- .../expression/GeoLongitudeValueSource.java | 9 +- .../percolator/PercolateQuery.java | 16 +- .../percolator/PercolateQueryBuilder.java | 6 +- .../percolator/CandidateQueryTests.java | 12 +- .../percolator/QueryAnalyzerTests.java | 2 +- .../lucene-analyzers-icu-6.5.0.jar.sha1 | 1 - ...lyzers-icu-7.0.0-snapshot-89f6d17.jar.sha1 | 1 + .../lucene-analyzers-kuromoji-6.5.0.jar.sha1 | 1 - ...s-kuromoji-7.0.0-snapshot-89f6d17.jar.sha1 | 1 + .../lucene-analyzers-phonetic-6.5.0.jar.sha1 | 1 - ...s-phonetic-7.0.0-snapshot-89f6d17.jar.sha1 | 1 + .../lucene-analyzers-smartcn-6.5.0.jar.sha1 | 1 - ...rs-smartcn-7.0.0-snapshot-89f6d17.jar.sha1 | 1 + .../lucene-analyzers-stempel-6.5.0.jar.sha1 | 1 - ...rs-stempel-7.0.0-snapshot-89f6d17.jar.sha1 | 1 + ...lucene-analyzers-morfologik-6.5.0.jar.sha1 | 1 - ...morfologik-7.0.0-snapshot-89f6d17.jar.sha1 | 1 + .../test/search_shards/10_basic.yaml | 1 - .../AnalysisFactoryTestCase.java | 2 + .../org/elasticsearch/test/ESTestCase.java | 4 - .../test/FieldMaskingReader.java | 25 +- .../test/engine/MockEngineSupport.java | 5 - .../engine/ThrowingLeafReaderWrapper.java | 11 + 269 files changed, 3993 insertions(+), 3868 deletions(-) delete mode 100644 core/licenses/lucene-analyzers-common-6.5.0.jar.sha1 create mode 100644 core/licenses/lucene-analyzers-common-7.0.0-snapshot-89f6d17.jar.sha1 delete mode 100644 core/licenses/lucene-backward-codecs-6.5.0.jar.sha1 create mode 100644 core/licenses/lucene-backward-codecs-7.0.0-snapshot-89f6d17.jar.sha1 delete mode 100644 core/licenses/lucene-core-6.5.0.jar.sha1 create mode 100644 core/licenses/lucene-core-7.0.0-snapshot-89f6d17.jar.sha1 delete mode 100644 core/licenses/lucene-grouping-6.5.0.jar.sha1 create mode 100644 core/licenses/lucene-grouping-7.0.0-snapshot-89f6d17.jar.sha1 delete mode 100644 core/licenses/lucene-highlighter-6.5.0.jar.sha1 create mode 100644 core/licenses/lucene-highlighter-7.0.0-snapshot-89f6d17.jar.sha1 delete mode 100644 core/licenses/lucene-join-6.5.0.jar.sha1 create mode 100644 core/licenses/lucene-join-7.0.0-snapshot-89f6d17.jar.sha1 delete mode 100644 core/licenses/lucene-memory-6.5.0.jar.sha1 create mode 100644 core/licenses/lucene-memory-7.0.0-snapshot-89f6d17.jar.sha1 delete mode 100644 core/licenses/lucene-misc-6.5.0.jar.sha1 create mode 100644 core/licenses/lucene-misc-7.0.0-snapshot-89f6d17.jar.sha1 delete mode 100644 core/licenses/lucene-queries-6.5.0.jar.sha1 create mode 100644 core/licenses/lucene-queries-7.0.0-snapshot-89f6d17.jar.sha1 delete mode 100644 core/licenses/lucene-queryparser-6.5.0.jar.sha1 create mode 100644 core/licenses/lucene-queryparser-7.0.0-snapshot-89f6d17.jar.sha1 delete mode 100644 core/licenses/lucene-sandbox-6.5.0.jar.sha1 create mode 100644 core/licenses/lucene-sandbox-7.0.0-snapshot-89f6d17.jar.sha1 delete mode 100644 core/licenses/lucene-spatial-6.5.0.jar.sha1 create mode 100644 core/licenses/lucene-spatial-7.0.0-snapshot-89f6d17.jar.sha1 delete mode 100644 core/licenses/lucene-spatial-extras-6.5.0.jar.sha1 create mode 100644 core/licenses/lucene-spatial-extras-7.0.0-snapshot-89f6d17.jar.sha1 delete mode 100644 core/licenses/lucene-spatial3d-6.5.0.jar.sha1 create mode 100644 core/licenses/lucene-spatial3d-7.0.0-snapshot-89f6d17.jar.sha1 delete mode 100644 core/licenses/lucene-suggest-6.5.0.jar.sha1 create mode 100644 core/licenses/lucene-suggest-7.0.0-snapshot-89f6d17.jar.sha1 create mode 100644 core/src/main/java/org/elasticsearch/index/fielddata/AbstractBinaryDocValues.java create mode 100644 core/src/main/java/org/elasticsearch/index/fielddata/AbstractNumericDocValues.java rename core/src/main/java/org/elasticsearch/index/fielddata/{AbstractRandomAccessOrds.java => AbstractSortedDocValues.java} (55%) create mode 100644 core/src/main/java/org/elasticsearch/index/fielddata/AbstractSortedNumericDocValues.java create mode 100644 core/src/main/java/org/elasticsearch/index/fielddata/AbstractSortedSetDocValues.java create mode 100644 core/src/main/java/org/elasticsearch/index/fielddata/AbstractSortingNumericDocValues.java delete mode 100644 core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractGeoPointDVIndexFieldData.java delete mode 100644 core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexGeoPointFieldData.java delete mode 100644 core/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointDVAtomicFieldData.java delete mode 100644 core/src/main/java/org/elasticsearch/index/shard/FilterDocValuesProducer.java delete mode 100644 modules/lang-expression/licenses/lucene-expressions-6.5.0.jar.sha1 create mode 100644 modules/lang-expression/licenses/lucene-expressions-7.0.0-snapshot-89f6d17.jar.sha1 delete mode 100644 plugins/analysis-icu/licenses/lucene-analyzers-icu-6.5.0.jar.sha1 create mode 100644 plugins/analysis-icu/licenses/lucene-analyzers-icu-7.0.0-snapshot-89f6d17.jar.sha1 delete mode 100644 plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-6.5.0.jar.sha1 create mode 100644 plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.0.0-snapshot-89f6d17.jar.sha1 delete mode 100644 plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-6.5.0.jar.sha1 create mode 100644 plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.0.0-snapshot-89f6d17.jar.sha1 delete mode 100644 plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-6.5.0.jar.sha1 create mode 100644 plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.0.0-snapshot-89f6d17.jar.sha1 delete mode 100644 plugins/analysis-stempel/licenses/lucene-analyzers-stempel-6.5.0.jar.sha1 create mode 100644 plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.0.0-snapshot-89f6d17.jar.sha1 delete mode 100644 plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-6.5.0.jar.sha1 create mode 100644 plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.0.0-snapshot-89f6d17.jar.sha1 diff --git a/buildSrc/src/main/resources/forbidden/es-all-signatures.txt b/buildSrc/src/main/resources/forbidden/es-all-signatures.txt index 64ae6784f15..f1d271d602c 100644 --- a/buildSrc/src/main/resources/forbidden/es-all-signatures.txt +++ b/buildSrc/src/main/resources/forbidden/es-all-signatures.txt @@ -26,12 +26,6 @@ java.util.concurrent.ThreadLocalRandom java.security.MessageDigest#clone() @ use org.elasticsearch.common.hash.MessageDigests -@defaultMessage this should not have been added to lucene in the first place -org.apache.lucene.index.IndexReader#getCombinedCoreAndDeletesKey() - -@defaultMessage Soon to be removed -org.apache.lucene.document.FieldType#numericType() - @defaultMessage Don't use MethodHandles in slow ways, don't be lenient in tests. java.lang.invoke.MethodHandle#invoke(java.lang.Object[]) java.lang.invoke.MethodHandle#invokeWithArguments(java.lang.Object[]) diff --git a/buildSrc/src/main/resources/forbidden/es-core-signatures.txt b/buildSrc/src/main/resources/forbidden/es-core-signatures.txt index 059be403a67..6507f05be5c 100644 --- a/buildSrc/src/main/resources/forbidden/es-core-signatures.txt +++ b/buildSrc/src/main/resources/forbidden/es-core-signatures.txt @@ -36,16 +36,6 @@ org.apache.lucene.index.IndexReader#decRef() org.apache.lucene.index.IndexReader#incRef() org.apache.lucene.index.IndexReader#tryIncRef() -@defaultMessage Close listeners can only installed via ElasticsearchDirectoryReader#addReaderCloseListener -org.apache.lucene.index.IndexReader#addReaderClosedListener(org.apache.lucene.index.IndexReader$ReaderClosedListener) -org.apache.lucene.index.IndexReader#removeReaderClosedListener(org.apache.lucene.index.IndexReader$ReaderClosedListener) - -@defaultMessage Pass the precision step from the mappings explicitly instead -org.apache.lucene.search.LegacyNumericRangeQuery#newDoubleRange(java.lang.String,java.lang.Double,java.lang.Double,boolean,boolean) -org.apache.lucene.search.LegacyNumericRangeQuery#newFloatRange(java.lang.String,java.lang.Float,java.lang.Float,boolean,boolean) -org.apache.lucene.search.LegacyNumericRangeQuery#newIntRange(java.lang.String,java.lang.Integer,java.lang.Integer,boolean,boolean) -org.apache.lucene.search.LegacyNumericRangeQuery#newLongRange(java.lang.String,java.lang.Long,java.lang.Long,boolean,boolean) - @defaultMessage Only use wait / notify when really needed try to use concurrency primitives, latches or callbacks instead. java.lang.Object#wait() java.lang.Object#wait(long) diff --git a/buildSrc/version.properties b/buildSrc/version.properties index a3a1681eb3e..ca263b0efa9 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -1,6 +1,6 @@ # When updating elasticsearch, please update 'rest' version in core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy elasticsearch = 6.0.0-alpha1 -lucene = 6.5.0 +lucene = 7.0.0-snapshot-89f6d17 # optional dependencies spatial4j = 0.6 diff --git a/core/licenses/lucene-analyzers-common-6.5.0.jar.sha1 b/core/licenses/lucene-analyzers-common-6.5.0.jar.sha1 deleted file mode 100644 index 77d21a23774..00000000000 --- a/core/licenses/lucene-analyzers-common-6.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3989779b05ecd0ace6affe19223b1c27156604f1 \ No newline at end of file diff --git a/core/licenses/lucene-analyzers-common-7.0.0-snapshot-89f6d17.jar.sha1 b/core/licenses/lucene-analyzers-common-7.0.0-snapshot-89f6d17.jar.sha1 new file mode 100644 index 00000000000..93afa8a73f4 --- /dev/null +++ b/core/licenses/lucene-analyzers-common-7.0.0-snapshot-89f6d17.jar.sha1 @@ -0,0 +1 @@ +e69234c2e898d86a53edbe8d22e33bebc45286cd \ No newline at end of file diff --git a/core/licenses/lucene-backward-codecs-6.5.0.jar.sha1 b/core/licenses/lucene-backward-codecs-6.5.0.jar.sha1 deleted file mode 100644 index 9eaff80ac08..00000000000 --- a/core/licenses/lucene-backward-codecs-6.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -6a8660e7133f357ef40d9cac26316ccd9937a2eb \ No newline at end of file diff --git a/core/licenses/lucene-backward-codecs-7.0.0-snapshot-89f6d17.jar.sha1 b/core/licenses/lucene-backward-codecs-7.0.0-snapshot-89f6d17.jar.sha1 new file mode 100644 index 00000000000..707ed41267f --- /dev/null +++ b/core/licenses/lucene-backward-codecs-7.0.0-snapshot-89f6d17.jar.sha1 @@ -0,0 +1 @@ +48172a8e1fe6562f55ab671d42af53652794d5df \ No newline at end of file diff --git a/core/licenses/lucene-core-6.5.0.jar.sha1 b/core/licenses/lucene-core-6.5.0.jar.sha1 deleted file mode 100644 index 7af91ec15bf..00000000000 --- a/core/licenses/lucene-core-6.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ff176c9bde4228b43827849f5d2ff2e2717e3297 \ No newline at end of file diff --git a/core/licenses/lucene-core-7.0.0-snapshot-89f6d17.jar.sha1 b/core/licenses/lucene-core-7.0.0-snapshot-89f6d17.jar.sha1 new file mode 100644 index 00000000000..ef6a939a668 --- /dev/null +++ b/core/licenses/lucene-core-7.0.0-snapshot-89f6d17.jar.sha1 @@ -0,0 +1 @@ +3dab251d4c7ab4ff5095e5f1d1e127ec2cf3c07d \ No newline at end of file diff --git a/core/licenses/lucene-grouping-6.5.0.jar.sha1 b/core/licenses/lucene-grouping-6.5.0.jar.sha1 deleted file mode 100644 index 08ccc2cd086..00000000000 --- a/core/licenses/lucene-grouping-6.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -10d2e5b36f460527ac9b948be0ec3077bde5b0ca \ No newline at end of file diff --git a/core/licenses/lucene-grouping-7.0.0-snapshot-89f6d17.jar.sha1 b/core/licenses/lucene-grouping-7.0.0-snapshot-89f6d17.jar.sha1 new file mode 100644 index 00000000000..dfa06b60b4e --- /dev/null +++ b/core/licenses/lucene-grouping-7.0.0-snapshot-89f6d17.jar.sha1 @@ -0,0 +1 @@ +c01ae8a23b733d75d058a76bd85fcb49b9fd06fd \ No newline at end of file diff --git a/core/licenses/lucene-highlighter-6.5.0.jar.sha1 b/core/licenses/lucene-highlighter-6.5.0.jar.sha1 deleted file mode 100644 index a8069723f16..00000000000 --- a/core/licenses/lucene-highlighter-6.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -0019bb6a631ea0123e8e553b0510fa81c9d3c3eb \ No newline at end of file diff --git a/core/licenses/lucene-highlighter-7.0.0-snapshot-89f6d17.jar.sha1 b/core/licenses/lucene-highlighter-7.0.0-snapshot-89f6d17.jar.sha1 new file mode 100644 index 00000000000..51c8a47474f --- /dev/null +++ b/core/licenses/lucene-highlighter-7.0.0-snapshot-89f6d17.jar.sha1 @@ -0,0 +1 @@ +c53df048b97946fe66035505306b5651b702adb1 \ No newline at end of file diff --git a/core/licenses/lucene-join-6.5.0.jar.sha1 b/core/licenses/lucene-join-6.5.0.jar.sha1 deleted file mode 100644 index cbad6199d76..00000000000 --- a/core/licenses/lucene-join-6.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -dad85baba266793b9ceb80a9b08c4ee9838e09df \ No newline at end of file diff --git a/core/licenses/lucene-join-7.0.0-snapshot-89f6d17.jar.sha1 b/core/licenses/lucene-join-7.0.0-snapshot-89f6d17.jar.sha1 new file mode 100644 index 00000000000..ad7e123f6df --- /dev/null +++ b/core/licenses/lucene-join-7.0.0-snapshot-89f6d17.jar.sha1 @@ -0,0 +1 @@ +1ecb349ba29abab75359e5125ac8a94fc81441d5 \ No newline at end of file diff --git a/core/licenses/lucene-memory-6.5.0.jar.sha1 b/core/licenses/lucene-memory-6.5.0.jar.sha1 deleted file mode 100644 index 5f22c0d9cfe..00000000000 --- a/core/licenses/lucene-memory-6.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -938f9f7efe8a403fd57c99aedd75d040d9caa896 \ No newline at end of file diff --git a/core/licenses/lucene-memory-7.0.0-snapshot-89f6d17.jar.sha1 b/core/licenses/lucene-memory-7.0.0-snapshot-89f6d17.jar.sha1 new file mode 100644 index 00000000000..6c9fc8be424 --- /dev/null +++ b/core/licenses/lucene-memory-7.0.0-snapshot-89f6d17.jar.sha1 @@ -0,0 +1 @@ +e5f53b38652b1284ff254fba39e624ec117aef7d \ No newline at end of file diff --git a/core/licenses/lucene-misc-6.5.0.jar.sha1 b/core/licenses/lucene-misc-6.5.0.jar.sha1 deleted file mode 100644 index 2b405d7f16a..00000000000 --- a/core/licenses/lucene-misc-6.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -afdff39ecb30f6e2c6f056a5bdfcb13d928a25af \ No newline at end of file diff --git a/core/licenses/lucene-misc-7.0.0-snapshot-89f6d17.jar.sha1 b/core/licenses/lucene-misc-7.0.0-snapshot-89f6d17.jar.sha1 new file mode 100644 index 00000000000..d69e68cdde6 --- /dev/null +++ b/core/licenses/lucene-misc-7.0.0-snapshot-89f6d17.jar.sha1 @@ -0,0 +1 @@ +2f340ed3f46d6b4c89fa31975b675c19028c15eb \ No newline at end of file diff --git a/core/licenses/lucene-queries-6.5.0.jar.sha1 b/core/licenses/lucene-queries-6.5.0.jar.sha1 deleted file mode 100644 index 9a046ce204f..00000000000 --- a/core/licenses/lucene-queries-6.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -8e3971a008070712d57b59cf1f7b44c0d9d3df25 \ No newline at end of file diff --git a/core/licenses/lucene-queries-7.0.0-snapshot-89f6d17.jar.sha1 b/core/licenses/lucene-queries-7.0.0-snapshot-89f6d17.jar.sha1 new file mode 100644 index 00000000000..1d2f98e6c8c --- /dev/null +++ b/core/licenses/lucene-queries-7.0.0-snapshot-89f6d17.jar.sha1 @@ -0,0 +1 @@ +a13862fb62cc1e516d16d6b6bb3cdb906c4925f6 \ No newline at end of file diff --git a/core/licenses/lucene-queryparser-6.5.0.jar.sha1 b/core/licenses/lucene-queryparser-6.5.0.jar.sha1 deleted file mode 100644 index 3136885ab92..00000000000 --- a/core/licenses/lucene-queryparser-6.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -225b904edf91ccdffffa398e1924ebadd5677c09 \ No newline at end of file diff --git a/core/licenses/lucene-queryparser-7.0.0-snapshot-89f6d17.jar.sha1 b/core/licenses/lucene-queryparser-7.0.0-snapshot-89f6d17.jar.sha1 new file mode 100644 index 00000000000..29492f1ed6b --- /dev/null +++ b/core/licenses/lucene-queryparser-7.0.0-snapshot-89f6d17.jar.sha1 @@ -0,0 +1 @@ +4e014f72a588453bae7dd1a555d741cf3bf39032 \ No newline at end of file diff --git a/core/licenses/lucene-sandbox-6.5.0.jar.sha1 b/core/licenses/lucene-sandbox-6.5.0.jar.sha1 deleted file mode 100644 index e3787e336df..00000000000 --- a/core/licenses/lucene-sandbox-6.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5c994fc5dc4f37133a861571211303d81c5d51ff \ No newline at end of file diff --git a/core/licenses/lucene-sandbox-7.0.0-snapshot-89f6d17.jar.sha1 b/core/licenses/lucene-sandbox-7.0.0-snapshot-89f6d17.jar.sha1 new file mode 100644 index 00000000000..80794b3e272 --- /dev/null +++ b/core/licenses/lucene-sandbox-7.0.0-snapshot-89f6d17.jar.sha1 @@ -0,0 +1 @@ +5e87d61c604d6b1c0ee5c38f09441d1b8b9c8c2b \ No newline at end of file diff --git a/core/licenses/lucene-spatial-6.5.0.jar.sha1 b/core/licenses/lucene-spatial-6.5.0.jar.sha1 deleted file mode 100644 index cbadbfc42d7..00000000000 --- a/core/licenses/lucene-spatial-6.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -553b7b13bef994f14076a85557df03cad67322e9 \ No newline at end of file diff --git a/core/licenses/lucene-spatial-7.0.0-snapshot-89f6d17.jar.sha1 b/core/licenses/lucene-spatial-7.0.0-snapshot-89f6d17.jar.sha1 new file mode 100644 index 00000000000..859a89f5a02 --- /dev/null +++ b/core/licenses/lucene-spatial-7.0.0-snapshot-89f6d17.jar.sha1 @@ -0,0 +1 @@ +be14aa163b339403d8ec904493c1be5dfa9baeaf \ No newline at end of file diff --git a/core/licenses/lucene-spatial-extras-6.5.0.jar.sha1 b/core/licenses/lucene-spatial-extras-6.5.0.jar.sha1 deleted file mode 100644 index f2ad71855f2..00000000000 --- a/core/licenses/lucene-spatial-extras-6.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -73deae791d861820974600705ba06e9f801cbe56 \ No newline at end of file diff --git a/core/licenses/lucene-spatial-extras-7.0.0-snapshot-89f6d17.jar.sha1 b/core/licenses/lucene-spatial-extras-7.0.0-snapshot-89f6d17.jar.sha1 new file mode 100644 index 00000000000..ee3c7292882 --- /dev/null +++ b/core/licenses/lucene-spatial-extras-7.0.0-snapshot-89f6d17.jar.sha1 @@ -0,0 +1 @@ +a2c13be0fe4c5a98a30ec6ae673be1442409817c \ No newline at end of file diff --git a/core/licenses/lucene-spatial3d-6.5.0.jar.sha1 b/core/licenses/lucene-spatial3d-6.5.0.jar.sha1 deleted file mode 100644 index 8fca9ac1ebc..00000000000 --- a/core/licenses/lucene-spatial3d-6.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c2aad69500dac79338ef45f570cab47bec3d2724 \ No newline at end of file diff --git a/core/licenses/lucene-spatial3d-7.0.0-snapshot-89f6d17.jar.sha1 b/core/licenses/lucene-spatial3d-7.0.0-snapshot-89f6d17.jar.sha1 new file mode 100644 index 00000000000..28fab4a3819 --- /dev/null +++ b/core/licenses/lucene-spatial3d-7.0.0-snapshot-89f6d17.jar.sha1 @@ -0,0 +1 @@ +92b8282e474845fdae31f9f239f953bc7164401f \ No newline at end of file diff --git a/core/licenses/lucene-suggest-6.5.0.jar.sha1 b/core/licenses/lucene-suggest-6.5.0.jar.sha1 deleted file mode 100644 index 62764fbbc32..00000000000 --- a/core/licenses/lucene-suggest-6.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -acf211f2bf901dfc8155a46c5a42c5650edf74ef \ No newline at end of file diff --git a/core/licenses/lucene-suggest-7.0.0-snapshot-89f6d17.jar.sha1 b/core/licenses/lucene-suggest-7.0.0-snapshot-89f6d17.jar.sha1 new file mode 100644 index 00000000000..e928966f274 --- /dev/null +++ b/core/licenses/lucene-suggest-7.0.0-snapshot-89f6d17.jar.sha1 @@ -0,0 +1 @@ +1c4aaea267ed41657ebf01769bfddbcab5b27414 \ No newline at end of file diff --git a/core/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java b/core/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java index 0b34a95710c..dbad7e0bf72 100644 --- a/core/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java +++ b/core/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java @@ -296,16 +296,15 @@ public abstract class BlendedTermQuery extends Query { return Objects.hash(classHash(), Arrays.hashCode(equalsTerms())); } - public static BlendedTermQuery booleanBlendedQuery(Term[] terms, final boolean disableCoord) { - return booleanBlendedQuery(terms, null, disableCoord); + public static BlendedTermQuery booleanBlendedQuery(Term[] terms) { + return booleanBlendedQuery(terms, null); } - public static BlendedTermQuery booleanBlendedQuery(Term[] terms, final float[] boosts, final boolean disableCoord) { + public static BlendedTermQuery booleanBlendedQuery(Term[] terms, final float[] boosts) { return new BlendedTermQuery(terms, boosts) { @Override protected Query topLevelQuery(Term[] terms, TermContext[] ctx, int[] docFreqs, int maxDoc) { BooleanQuery.Builder booleanQueryBuilder = new BooleanQuery.Builder(); - booleanQueryBuilder.setDisableCoord(disableCoord); for (int i = 0; i < terms.length; i++) { Query query = new TermQuery(terms[i], ctx[i]); if (boosts != null && boosts[i] != 1f) { @@ -318,14 +317,12 @@ public abstract class BlendedTermQuery extends Query { }; } - public static BlendedTermQuery commonTermsBlendedQuery(Term[] terms, final float[] boosts, final boolean disableCoord, final float maxTermFrequency) { + public static BlendedTermQuery commonTermsBlendedQuery(Term[] terms, final float[] boosts, final float maxTermFrequency) { return new BlendedTermQuery(terms, boosts) { @Override protected Query topLevelQuery(Term[] terms, TermContext[] ctx, int[] docFreqs, int maxDoc) { BooleanQuery.Builder highBuilder = new BooleanQuery.Builder(); - highBuilder.setDisableCoord(disableCoord); BooleanQuery.Builder lowBuilder = new BooleanQuery.Builder(); - lowBuilder.setDisableCoord(disableCoord); for (int i = 0; i < terms.length; i++) { Query query = new TermQuery(terms[i], ctx[i]); if (boosts != null && boosts[i] != 1f) { @@ -343,7 +340,6 @@ public abstract class BlendedTermQuery extends Query { BooleanQuery low = lowBuilder.build(); if (low.clauses().isEmpty()) { BooleanQuery.Builder queryBuilder = new BooleanQuery.Builder(); - queryBuilder.setDisableCoord(disableCoord); for (BooleanClause booleanClause : high) { queryBuilder.add(booleanClause.getQuery(), Occur.MUST); } @@ -352,7 +348,6 @@ public abstract class BlendedTermQuery extends Query { return low; } else { return new BooleanQuery.Builder() - .setDisableCoord(true) .add(high, BooleanClause.Occur.SHOULD) .add(low, BooleanClause.Occur.MUST) .build(); diff --git a/core/src/main/java/org/apache/lucene/queries/ExtendedCommonTermsQuery.java b/core/src/main/java/org/apache/lucene/queries/ExtendedCommonTermsQuery.java index 1889c6e759b..4580de4cc4a 100644 --- a/core/src/main/java/org/apache/lucene/queries/ExtendedCommonTermsQuery.java +++ b/core/src/main/java/org/apache/lucene/queries/ExtendedCommonTermsQuery.java @@ -35,8 +35,8 @@ public class ExtendedCommonTermsQuery extends CommonTermsQuery { private final MappedFieldType fieldType; - public ExtendedCommonTermsQuery(Occur highFreqOccur, Occur lowFreqOccur, float maxTermFrequency, boolean disableCoord, MappedFieldType fieldType) { - super(highFreqOccur, lowFreqOccur, maxTermFrequency, disableCoord); + public ExtendedCommonTermsQuery(Occur highFreqOccur, Occur lowFreqOccur, float maxTermFrequency, MappedFieldType fieldType) { + super(highFreqOccur, lowFreqOccur, maxTermFrequency); this.fieldType = fieldType; } diff --git a/core/src/main/java/org/apache/lucene/queries/MinDocQuery.java b/core/src/main/java/org/apache/lucene/queries/MinDocQuery.java index a8b7dc9299f..d4f9ab72973 100644 --- a/core/src/main/java/org/apache/lucene/queries/MinDocQuery.java +++ b/core/src/main/java/org/apache/lucene/queries/MinDocQuery.java @@ -57,8 +57,8 @@ public final class MinDocQuery extends Query { } @Override - public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { - return new ConstantScoreWeight(this) { + public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException { + return new ConstantScoreWeight(this, boost) { @Override public Scorer scorer(LeafReaderContext context) throws IOException { final int maxDoc = context.reader().maxDoc(); diff --git a/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java b/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java index 2b592aa935f..79f522e8c1f 100644 --- a/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java +++ b/core/src/main/java/org/apache/lucene/queryparser/classic/MapperQueryParser.java @@ -25,9 +25,7 @@ import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; import org.apache.lucene.index.Term; -import org.apache.lucene.queryparser.analyzing.AnalyzingQueryParser; import org.apache.lucene.search.BooleanClause; -import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.DisjunctionMaxQuery; import org.apache.lucene.search.FuzzyQuery; @@ -70,7 +68,7 @@ import static org.elasticsearch.common.lucene.search.Queries.fixNegativeQueryIfN * Also breaks fields with [type].[name] into a boolean query that must include the type * as well as the query on the name. */ -public class MapperQueryParser extends AnalyzingQueryParser { +public class MapperQueryParser extends QueryParser { public static final Map FIELD_QUERY_EXTENSIONS; @@ -103,14 +101,13 @@ public class MapperQueryParser extends AnalyzingQueryParser { setAnalyzer(settings.analyzer()); setMultiTermRewriteMethod(settings.rewriteMethod()); setEnablePositionIncrements(settings.enablePositionIncrements()); + setSplitOnWhitespace(settings.splitOnWhitespace()); setAutoGeneratePhraseQueries(settings.autoGeneratePhraseQueries()); setMaxDeterminizedStates(settings.maxDeterminizedStates()); setAllowLeadingWildcard(settings.allowLeadingWildcard()); - setLowercaseExpandedTerms(false); setPhraseSlop(settings.phraseSlop()); setDefaultOperator(settings.defaultOperator()); setFuzzyPrefixLength(settings.fuzzyPrefixLength()); - setSplitOnWhitespace(settings.splitOnWhitespace()); } /** @@ -175,7 +172,7 @@ public class MapperQueryParser extends AnalyzingQueryParser { } } if (clauses.isEmpty()) return null; // happens for stopwords - return getBooleanQueryCoordDisabled(clauses); + return getBooleanQuery(clauses); } } else { return getFieldQuerySingle(field, queryText, quoted); @@ -277,7 +274,7 @@ public class MapperQueryParser extends AnalyzingQueryParser { } } if (clauses.isEmpty()) return null; // happens for stopwords - return getBooleanQueryCoordDisabled(clauses); + return getBooleanQuery(clauses); } } else { return super.getFieldQuery(field, queryText, slop); @@ -328,7 +325,7 @@ public class MapperQueryParser extends AnalyzingQueryParser { } } if (clauses.isEmpty()) return null; // happens for stopwords - return getBooleanQueryCoordDisabled(clauses); + return getBooleanQuery(clauses); } } @@ -386,7 +383,7 @@ public class MapperQueryParser extends AnalyzingQueryParser { clauses.add(new BooleanClause(applyBoost(mField, q), BooleanClause.Occur.SHOULD)); } } - return getBooleanQueryCoordDisabled(clauses); + return getBooleanQuery(clauses); } } else { return getFuzzyQuerySingle(field, termStr, minSimilarity); @@ -450,7 +447,7 @@ public class MapperQueryParser extends AnalyzingQueryParser { } } if (clauses.isEmpty()) return null; // happens for stopwords - return getBooleanQueryCoordDisabled(clauses); + return getBooleanQuery(clauses); } } else { return getPrefixQuerySingle(field, termStr); @@ -559,7 +556,7 @@ public class MapperQueryParser extends AnalyzingQueryParser { innerClauses.add(new BooleanClause(super.getPrefixQuery(field, token), BooleanClause.Occur.SHOULD)); } - posQuery = getBooleanQueryCoordDisabled(innerClauses); + posQuery = getBooleanQuery(innerClauses); } clauses.add(new BooleanClause(posQuery, getDefaultOperator() == Operator.AND ? BooleanClause.Occur.MUST : BooleanClause.Occur.SHOULD)); @@ -612,7 +609,7 @@ public class MapperQueryParser extends AnalyzingQueryParser { } } if (clauses.isEmpty()) return null; // happens for stopwords - return getBooleanQueryCoordDisabled(clauses); + return getBooleanQuery(clauses); } } else { return getWildcardQuerySingle(field, termStr); @@ -676,7 +673,7 @@ public class MapperQueryParser extends AnalyzingQueryParser { } } if (clauses.isEmpty()) return null; // happens for stopwords - return getBooleanQueryCoordDisabled(clauses); + return getBooleanQuery(clauses); } } else { return getRegexpQuerySingle(field, termStr); @@ -713,19 +710,6 @@ public class MapperQueryParser extends AnalyzingQueryParser { } } - /** - * @deprecated review all use of this, don't rely on coord - */ - @Deprecated - protected Query getBooleanQueryCoordDisabled(List clauses) throws ParseException { - BooleanQuery.Builder builder = new BooleanQuery.Builder(); - builder.setDisableCoord(true); - for (BooleanClause clause : clauses) { - builder.add(clause); - } - return fixNegativeQueryIfNeeded(builder.build()); - } - @Override protected Query getBooleanQuery(List clauses) throws ParseException { diff --git a/core/src/main/java/org/apache/lucene/search/grouping/CollapsingDocValuesSource.java b/core/src/main/java/org/apache/lucene/search/grouping/CollapsingDocValuesSource.java index 5bc8afb347c..e4877338902 100644 --- a/core/src/main/java/org/apache/lucene/search/grouping/CollapsingDocValuesSource.java +++ b/core/src/main/java/org/apache/lucene/search/grouping/CollapsingDocValuesSource.java @@ -22,31 +22,32 @@ import org.apache.lucene.index.DocValues; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.LeafReader; +import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.index.SortedSetDocValues; -import org.apache.lucene.util.ArrayUtil; -import org.apache.lucene.util.Bits; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.index.fielddata.AbstractNumericDocValues; +import org.elasticsearch.index.fielddata.AbstractSortedDocValues; import java.io.IOException; +import java.util.Collection; /** * Utility class that ensures that a single collapse key is extracted per document. */ -abstract class CollapsingDocValuesSource { +abstract class CollapsingDocValuesSource extends GroupSelector { protected final String field; CollapsingDocValuesSource(String field) throws IOException { this.field = field; } - abstract T get(int doc); - - abstract T copy(T value, T reuse); - - abstract void setNextReader(LeafReader reader) throws IOException; + @Override + public void setGroups(Collection> groups) { + throw new UnsupportedOperationException(); + } /** * Implementation for {@link NumericDocValues} and {@link SortedNumericDocValues}. @@ -54,35 +55,43 @@ abstract class CollapsingDocValuesSource { */ static class Numeric extends CollapsingDocValuesSource { private NumericDocValues values; - private Bits docsWithField; + private long value; + private boolean hasValue; Numeric(String field) throws IOException { super(field); } @Override - public Long get(int doc) { - if (docsWithField.get(doc)) { - return values.get(doc); + public State advanceTo(int doc) throws IOException { + if (values.advanceExact(doc)) { + hasValue = true; + value = values.longValue(); + return State.ACCEPT; } else { - return null; + hasValue = false; + return State.SKIP; } } @Override - public Long copy(Long value, Long reuse) { - return value; + public Long currentValue() { + return hasValue ? value : null; } @Override - public void setNextReader(LeafReader reader) throws IOException { + public Long copyValue() { + return currentValue(); + } + + @Override + public void setNextReader(LeafReaderContext readerContext) throws IOException { + LeafReader reader = readerContext.reader(); DocValuesType type = getDocValuesType(reader, field); if (type == null || type == DocValuesType.NONE) { values = DocValues.emptyNumeric(); - docsWithField = new Bits.MatchNoBits(reader.maxDoc()); return ; } - docsWithField = DocValues.getDocsWithField(reader, field); switch (type) { case NUMERIC: values = DocValues.getNumeric(reader, field); @@ -92,17 +101,34 @@ abstract class CollapsingDocValuesSource { final SortedNumericDocValues sorted = DocValues.getSortedNumeric(reader, field); values = DocValues.unwrapSingleton(sorted); if (values == null) { - values = new NumericDocValues() { + values = new AbstractNumericDocValues() { + + private long value; + @Override - public long get(int docID) { - sorted.setDocument(docID); - assert sorted.count() > 0; - if (sorted.count() > 1) { - throw new IllegalStateException("failed to collapse " + docID + - ", the collapse field must be single valued"); + public boolean advanceExact(int target) throws IOException { + if (sorted.advanceExact(target)) { + if (sorted.docValueCount() > 1) { + throw new IllegalStateException("failed to collapse " + target + + ", the collapse field must be single valued"); + } + value = sorted.nextValue(); + return true; + } else { + return false; } - return sorted.valueAt(0); } + + @Override + public int docID() { + return sorted.docID(); + } + + @Override + public long longValue() throws IOException { + return value; + } + }; } break; @@ -119,47 +145,56 @@ abstract class CollapsingDocValuesSource { * Fails with an {@link IllegalStateException} if a document contains multiple values for the specified field. */ static class Keyword extends CollapsingDocValuesSource { - private Bits docsWithField; private SortedDocValues values; + private int ord; Keyword(String field) throws IOException { super(field); } @Override - public BytesRef get(int doc) { - if (docsWithField.get(doc)) { - return values.get(doc); + public org.apache.lucene.search.grouping.GroupSelector.State advanceTo(int doc) + throws IOException { + if (values.advanceExact(doc)) { + ord = values.ordValue(); + return State.ACCEPT; } else { - return null; + ord = -1; + return State.SKIP; } } @Override - public BytesRef copy(BytesRef value, BytesRef reuse) { + public BytesRef currentValue() { + if (ord == -1) { + return null; + } else { + try { + return values.lookupOrd(ord); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + } + + @Override + public BytesRef copyValue() { + BytesRef value = currentValue(); if (value == null) { return null; - } - if (reuse != null) { - reuse.bytes = ArrayUtil.grow(reuse.bytes, value.length); - reuse.offset = 0; - reuse.length = value.length; - System.arraycopy(value.bytes, value.offset, reuse.bytes, 0, value.length); - return reuse; } else { return BytesRef.deepCopyOf(value); } } @Override - public void setNextReader(LeafReader reader) throws IOException { + public void setNextReader(LeafReaderContext readerContext) throws IOException { + LeafReader reader = readerContext.reader(); DocValuesType type = getDocValuesType(reader, field); if (type == null || type == DocValuesType.NONE) { values = DocValues.emptySorted(); - docsWithField = new Bits.MatchNoBits(reader.maxDoc()); return ; } - docsWithField = DocValues.getDocsWithField(reader, field); switch (type) { case SORTED: values = DocValues.getSorted(reader, field); @@ -169,20 +204,36 @@ abstract class CollapsingDocValuesSource { final SortedSetDocValues sorted = DocValues.getSortedSet(reader, field); values = DocValues.unwrapSingleton(sorted); if (values == null) { - values = new SortedDocValues() { + values = new AbstractSortedDocValues() { + + private int ord; + @Override - public int getOrd(int docID) { - sorted.setDocument(docID); - int ord = (int) sorted.nextOrd(); - if (sorted.nextOrd() != SortedSetDocValues.NO_MORE_ORDS) { - throw new IllegalStateException("failed to collapse " + docID + - ", the collapse field must be single valued"); + public boolean advanceExact(int target) throws IOException { + if (sorted.advanceExact(target)) { + ord = (int) sorted.nextOrd(); + if (sorted.nextOrd() != SortedSetDocValues.NO_MORE_ORDS) { + throw new IllegalStateException("failed to collapse " + target + + ", the collapse field must be single valued"); + } + return true; + } else { + return false; } + } + + @Override + public int docID() { + return sorted.docID(); + } + + @Override + public int ordValue() { return ord; } @Override - public BytesRef lookupOrd(int ord) { + public BytesRef lookupOrd(int ord) throws IOException { return sorted.lookupOrd(ord); } diff --git a/core/src/main/java/org/apache/lucene/search/grouping/CollapsingTopDocsCollector.java b/core/src/main/java/org/apache/lucene/search/grouping/CollapsingTopDocsCollector.java index 955a63e5483..b5cb02bcd65 100644 --- a/core/src/main/java/org/apache/lucene/search/grouping/CollapsingTopDocsCollector.java +++ b/core/src/main/java/org/apache/lucene/search/grouping/CollapsingTopDocsCollector.java @@ -18,13 +18,11 @@ */ package org.apache.lucene.search.grouping; -import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; -import org.apache.lucene.util.BytesRef; import java.io.IOException; import java.util.Collection; @@ -37,7 +35,7 @@ import static org.apache.lucene.search.SortField.Type.SCORE; * output. The collapsing is done in a single pass by selecting only the top sorted document per collapse key. * The value used for the collapse key of each group can be found in {@link CollapseTopFieldDocs#collapseValues}. */ -public abstract class CollapsingTopDocsCollector extends FirstPassGroupingCollector { +public final class CollapsingTopDocsCollector extends FirstPassGroupingCollector { protected final String collapseField; protected final Sort sort; @@ -47,9 +45,9 @@ public abstract class CollapsingTopDocsCollector extends FirstPassGroupingCol private float maxScore; private final boolean trackMaxScore; - private CollapsingTopDocsCollector(String collapseField, Sort sort, + CollapsingTopDocsCollector(GroupSelector groupSelector, String collapseField, Sort sort, int topN, boolean trackMaxScore) throws IOException { - super(sort, topN); + super(groupSelector, sort, topN); this.collapseField = collapseField; this.trackMaxScore = trackMaxScore; if (trackMaxScore) { @@ -65,7 +63,7 @@ public abstract class CollapsingTopDocsCollector extends FirstPassGroupingCol * {@link CollapseTopFieldDocs}. The collapsing needs only one pass so we can create the final top docs at the end * of the first pass. */ - public CollapseTopFieldDocs getTopDocs() { + public CollapseTopFieldDocs getTopDocs() throws IOException { Collection> groups = super.getTopGroups(0, true); if (groups == null) { return new CollapseTopFieldDocs(collapseField, totalHitCount, new ScoreDoc[0], @@ -121,57 +119,6 @@ public abstract class CollapsingTopDocsCollector extends FirstPassGroupingCol totalHitCount++; } - private static class Numeric extends CollapsingTopDocsCollector { - private final CollapsingDocValuesSource.Numeric source; - - private Numeric(String collapseField, Sort sort, int topN, boolean trackMaxScore) throws IOException { - super(collapseField, sort, topN, trackMaxScore); - source = new CollapsingDocValuesSource.Numeric(collapseField); - } - - @Override - protected void doSetNextReader(LeafReaderContext readerContext) throws IOException { - super.doSetNextReader(readerContext); - source.setNextReader(readerContext.reader()); - } - - @Override - protected Long getDocGroupValue(int doc) { - return source.get(doc); - } - - @Override - protected Long copyDocGroupValue(Long groupValue, Long reuse) { - return source.copy(groupValue, reuse); - } - } - - private static class Keyword extends CollapsingTopDocsCollector { - private final CollapsingDocValuesSource.Keyword source; - - private Keyword(String collapseField, Sort sort, int topN, boolean trackMaxScore) throws IOException { - super(collapseField, sort, topN, trackMaxScore); - source = new CollapsingDocValuesSource.Keyword(collapseField); - - } - - @Override - protected void doSetNextReader(LeafReaderContext readerContext) throws IOException { - super.doSetNextReader(readerContext); - source.setNextReader(readerContext.reader()); - } - - @Override - protected BytesRef getDocGroupValue(int doc) { - return source.get(doc); - } - - @Override - protected BytesRef copyDocGroupValue(BytesRef groupValue, BytesRef reuse) { - return source.copy(groupValue, reuse); - } - } - /** * Create a collapsing top docs collector on a {@link org.apache.lucene.index.NumericDocValues} field. * It accepts also {@link org.apache.lucene.index.SortedNumericDocValues} field but @@ -189,7 +136,8 @@ public abstract class CollapsingTopDocsCollector extends FirstPassGroupingCol */ public static CollapsingTopDocsCollector createNumeric(String collapseField, Sort sort, int topN, boolean trackMaxScore) throws IOException { - return new Numeric(collapseField, sort, topN, trackMaxScore); + return new CollapsingTopDocsCollector<>(new CollapsingDocValuesSource.Numeric(collapseField), + collapseField, sort, topN, trackMaxScore); } /** @@ -208,7 +156,8 @@ public abstract class CollapsingTopDocsCollector extends FirstPassGroupingCol */ public static CollapsingTopDocsCollector createKeyword(String collapseField, Sort sort, int topN, boolean trackMaxScore) throws IOException { - return new Keyword(collapseField, sort, topN, trackMaxScore); + return new CollapsingTopDocsCollector<>(new CollapsingDocValuesSource.Keyword(collapseField), + collapseField, sort, topN, trackMaxScore); } } diff --git a/core/src/main/java/org/elasticsearch/Version.java b/core/src/main/java/org/elasticsearch/Version.java index fad0d403d28..b3f38e57b52 100644 --- a/core/src/main/java/org/elasticsearch/Version.java +++ b/core/src/main/java/org/elasticsearch/Version.java @@ -82,7 +82,7 @@ public class Version implements Comparable { public static final Version V_5_5_0_UNRELEASED = new Version(V_5_5_0_ID_UNRELEASED, org.apache.lucene.util.Version.LUCENE_6_5_0); public static final int V_6_0_0_alpha1_ID_UNRELEASED = 6000001; public static final Version V_6_0_0_alpha1_UNRELEASED = - new Version(V_6_0_0_alpha1_ID_UNRELEASED, org.apache.lucene.util.Version.LUCENE_6_5_0); + new Version(V_6_0_0_alpha1_ID_UNRELEASED, org.apache.lucene.util.Version.LUCENE_7_0_0); public static final Version CURRENT = V_6_0_0_alpha1_UNRELEASED; // unreleased versions must be added to the above list with the suffix _UNRELEASED (with the exception of CURRENT) diff --git a/core/src/main/java/org/elasticsearch/common/geo/GeoHashUtils.java b/core/src/main/java/org/elasticsearch/common/geo/GeoHashUtils.java index 0f1ddcd376c..cb31940a49c 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/GeoHashUtils.java +++ b/core/src/main/java/org/elasticsearch/common/geo/GeoHashUtils.java @@ -18,13 +18,13 @@ */ package org.elasticsearch.common.geo; +import org.apache.lucene.geo.Rectangle; +import org.apache.lucene.spatial.util.MortonEncoder; +import org.apache.lucene.util.BitUtil; + import java.util.ArrayList; import java.util.Collection; -import org.apache.lucene.spatial.geopoint.document.GeoPointField; -import org.apache.lucene.geo.Rectangle; -import org.apache.lucene.util.BitUtil; - /** * Utilities for converting to/from the GeoHash standard * @@ -42,19 +42,35 @@ public class GeoHashUtils { /** maximum precision for geohash strings */ public static final int PRECISION = 12; - private static final short MORTON_OFFSET = (GeoPointField.BITS<<1) - (PRECISION*5); + /** number of bits used for quantizing latitude and longitude values */ + public static final short BITS = 31; + /** scaling factors to convert lat/lon into unsigned space */ + private static final double LAT_SCALE = (0x1L<>> 2; + } + /** * Encode lon/lat to the geohash based long format (lon/lat interleaved, 4 least significant bits = level) */ public static final long longEncode(final double lon, final double lat, final int level) { // shift to appropriate level final short msf = (short)(((12 - level) * 5) + MORTON_OFFSET); - return ((BitUtil.flipFlop(GeoPointField.encodeLatLon(lat, lon)) >>> msf) << 4) | level; + return ((BitUtil.flipFlop(encodeLatLon(lat, lon)) >>> msf) << 4) | level; } /** @@ -120,7 +136,7 @@ public class GeoHashUtils { */ public static final String stringEncode(final double lon, final double lat, final int level) { // convert to geohashlong - final long ghLong = fromMorton(GeoPointField.encodeLatLon(lat, lon), level); + final long ghLong = fromMorton(encodeLatLon(lat, lon), level); return stringEncode(ghLong); } @@ -141,7 +157,7 @@ public class GeoHashUtils { StringBuilder geoHash = new StringBuilder(); short precision = 0; - final short msf = (GeoPointField.BITS<<1)-5; + final short msf = (BITS<<1)-5; long mask = 31L<>>(msf-(precision*5)))]); @@ -303,13 +319,31 @@ public class GeoHashUtils { return neighbors; } + /** decode longitude value from morton encoded geo point */ + public static final double decodeLongitude(final long hash) { + return unscaleLon(BitUtil.deinterleave(hash)); + } + + /** decode latitude value from morton encoded geo point */ + public static final double decodeLatitude(final long hash) { + return unscaleLat(BitUtil.deinterleave(hash >>> 1)); + } + + private static double unscaleLon(final long val) { + return (val / LON_SCALE) - 180; + } + + private static double unscaleLat(final long val) { + return (val / LAT_SCALE) - 90; + } + /** returns the latitude value from the string based geohash */ public static final double decodeLatitude(final String geohash) { - return GeoPointField.decodeLatitude(mortonEncode(geohash)); + return decodeLatitude(mortonEncode(geohash)); } /** returns the latitude value from the string based geohash */ public static final double decodeLongitude(final String geohash) { - return GeoPointField.decodeLongitude(mortonEncode(geohash)); + return decodeLongitude(mortonEncode(geohash)); } } diff --git a/core/src/main/java/org/elasticsearch/common/geo/GeoPoint.java b/core/src/main/java/org/elasticsearch/common/geo/GeoPoint.java index f76720b9ed6..125bc5aefcf 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/GeoPoint.java +++ b/core/src/main/java/org/elasticsearch/common/geo/GeoPoint.java @@ -23,7 +23,6 @@ import org.apache.lucene.document.LatLonDocValuesField; import org.apache.lucene.document.LatLonPoint; import org.apache.lucene.geo.GeoEncodingUtils; import org.apache.lucene.index.IndexableField; -import org.apache.lucene.spatial.geopoint.document.GeoPointField; import org.apache.lucene.util.BitUtil; import org.apache.lucene.util.BytesRef; @@ -87,8 +86,8 @@ public final class GeoPoint { } public GeoPoint resetFromIndexHash(long hash) { - lon = GeoPointField.decodeLongitude(hash); - lat = GeoPointField.decodeLatitude(hash); + lon = GeoHashUtils.decodeLongitude(hash); + lat = GeoHashUtils.decodeLatitude(hash); return this; } @@ -112,7 +111,7 @@ public final class GeoPoint { public GeoPoint resetFromGeoHash(String geohash) { final long hash = mortonEncode(geohash); - return this.reset(GeoPointField.decodeLatitude(hash), GeoPointField.decodeLongitude(hash)); + return this.reset(GeoHashUtils.decodeLatitude(hash), GeoHashUtils.decodeLongitude(hash)); } public GeoPoint resetFromGeoHash(long geohashLong) { diff --git a/core/src/main/java/org/elasticsearch/common/geo/GeoUtils.java b/core/src/main/java/org/elasticsearch/common/geo/GeoUtils.java index 81215906a06..a5864146318 100644 --- a/core/src/main/java/org/elasticsearch/common/geo/GeoUtils.java +++ b/core/src/main/java/org/elasticsearch/common/geo/GeoUtils.java @@ -22,7 +22,6 @@ package org.elasticsearch.common.geo; import org.apache.lucene.geo.Rectangle; import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree; import org.apache.lucene.spatial.prefix.tree.QuadPrefixTree; -import org.apache.lucene.util.Bits; import org.apache.lucene.util.SloppyMath; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.unit.DistanceUnit; @@ -511,35 +510,40 @@ public class GeoUtils { final GeoPoint... fromPoints) { final GeoPointValues singleValues = FieldData.unwrapSingleton(geoPointValues); if (singleValues != null && fromPoints.length == 1) { - final Bits docsWithField = FieldData.unwrapSingletonBits(geoPointValues); return FieldData.singleton(new NumericDoubleValues() { @Override - public double get(int docID) { - if (docsWithField != null && !docsWithField.get(docID)) { - return 0d; - } - final GeoPoint to = singleValues.get(docID); + public boolean advanceExact(int doc) throws IOException { + return singleValues.advanceExact(doc); + } + + @Override + public double doubleValue() throws IOException { final GeoPoint from = fromPoints[0]; + final GeoPoint to = singleValues.geoPointValue(); return distance.calculate(from.lat(), from.lon(), to.lat(), to.lon(), unit); } - }, docsWithField); + }); } else { return new SortingNumericDoubleValues() { @Override - public void setDocument(int doc) { - geoPointValues.setDocument(doc); - resize(geoPointValues.count() * fromPoints.length); - int v = 0; - for (GeoPoint from : fromPoints) { - for (int i = 0; i < geoPointValues.count(); ++i) { - final GeoPoint point = geoPointValues.valueAt(i); - values[v] = distance.calculate(from.lat(), from.lon(), point.lat(), point.lon(), unit); - v++; + public boolean advanceExact(int target) throws IOException { + if (geoPointValues.advanceExact(target)) { + resize(geoPointValues.docValueCount() * fromPoints.length); + int v = 0; + for (int i = 0; i < geoPointValues.docValueCount(); ++i) { + final GeoPoint point = geoPointValues.nextValue(); + for (GeoPoint from : fromPoints) { + values[v] = distance.calculate(from.lat(), from.lon(), point.lat(), point.lon(), unit); + v++; + } } + sort(); + return true; + } else { + return false; } - sort(); } }; } diff --git a/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java b/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java index bf448b61539..c213c384611 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/Lucene.java @@ -51,14 +51,14 @@ import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.SimpleCollector; import org.apache.lucene.search.SortField; +import org.apache.lucene.search.SortedNumericSortField; +import org.apache.lucene.search.SortedSetSortField; import org.apache.lucene.search.TimeLimitingCollector; import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopFieldDocs; import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; import org.apache.lucene.search.grouping.CollapseTopFieldDocs; -import org.apache.lucene.search.SortedNumericSortField; -import org.apache.lucene.search.SortedSetSortField; import org.apache.lucene.store.Directory; import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; @@ -89,9 +89,9 @@ import java.util.Map; import java.util.Objects; public class Lucene { - public static final String LATEST_DOC_VALUES_FORMAT = "Lucene54"; + public static final String LATEST_DOC_VALUES_FORMAT = "Lucene70"; public static final String LATEST_POSTINGS_FORMAT = "Lucene50"; - public static final String LATEST_CODEC = "Lucene62"; + public static final String LATEST_CODEC = "Lucene70"; static { Deprecated annotation = PostingsFormat.forName(LATEST_POSTINGS_FORMAT).getClass().getAnnotation(Deprecated.class); diff --git a/core/src/main/java/org/elasticsearch/common/lucene/ShardCoreKeyMap.java b/core/src/main/java/org/elasticsearch/common/lucene/ShardCoreKeyMap.java index 38e7691f398..73255aef133 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/ShardCoreKeyMap.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/ShardCoreKeyMap.java @@ -19,8 +19,8 @@ package org.elasticsearch.common.lucene; +import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReader; -import org.apache.lucene.index.LeafReader.CoreClosedListener; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardUtils; @@ -46,8 +46,8 @@ import java.util.concurrent.ConcurrentHashMap; */ public final class ShardCoreKeyMap { - private final Map coreKeyToShard; - private final Map> indexToCoreKey; + private final Map coreKeyToShard; + private final Map> indexToCoreKey; public ShardCoreKeyMap() { coreKeyToShard = new ConcurrentHashMap<>(); @@ -63,7 +63,11 @@ public final class ShardCoreKeyMap { if (shardId == null) { throw new IllegalArgumentException("Could not extract shard id from " + reader); } - final Object coreKey = reader.getCoreCacheKey(); + final IndexReader.CacheHelper cacheHelper = reader.getCoreCacheHelper(); + if (cacheHelper == null) { + throw new IllegalArgumentException("Reader " + reader + " does not support caching"); + } + final IndexReader.CacheKey coreKey = cacheHelper.getKey(); if (coreKeyToShard.containsKey(coreKey)) { // Do this check before entering the synchronized block in order to @@ -75,18 +79,18 @@ public final class ShardCoreKeyMap { final String index = shardId.getIndexName(); synchronized (this) { if (coreKeyToShard.containsKey(coreKey) == false) { - Set objects = indexToCoreKey.get(index); + Set objects = indexToCoreKey.get(index); if (objects == null) { objects = new HashSet<>(); indexToCoreKey.put(index, objects); } final boolean added = objects.add(coreKey); assert added; - CoreClosedListener listener = ownerCoreCacheKey -> { + IndexReader.ClosedListener listener = ownerCoreCacheKey -> { assert coreKey == ownerCoreCacheKey; synchronized (ShardCoreKeyMap.this) { coreKeyToShard.remove(ownerCoreCacheKey); - final Set coreKeys = indexToCoreKey.get(index); + final Set coreKeys = indexToCoreKey.get(index); final boolean removed = coreKeys.remove(coreKey); assert removed; if (coreKeys.isEmpty()) { @@ -96,7 +100,7 @@ public final class ShardCoreKeyMap { }; boolean addedListener = false; try { - reader.addCoreClosedListener(listener); + cacheHelper.addClosedListener(listener); addedListener = true; // Only add the core key to the map as a last operation so that @@ -131,7 +135,7 @@ public final class ShardCoreKeyMap { * Get the set of core cache keys associated with the given index. */ public synchronized Set getCoreKeysForIndex(String index) { - final Set objects = indexToCoreKey.get(index); + final Set objects = indexToCoreKey.get(index); if (objects == null) { return Collections.emptySet(); } @@ -154,9 +158,9 @@ public final class ShardCoreKeyMap { if (assertionsEnabled == false) { throw new AssertionError("only run this if assertions are enabled"); } - Collection> values = indexToCoreKey.values(); + Collection> values = indexToCoreKey.values(); int size = 0; - for (Set value : values) { + for (Set value : values) { size += value.size(); } return size == coreKeyToShard.size(); diff --git a/core/src/main/java/org/elasticsearch/common/lucene/all/AllTermQuery.java b/core/src/main/java/org/elasticsearch/common/lucene/all/AllTermQuery.java index 5307a417e10..7df146a11c2 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/all/AllTermQuery.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/all/AllTermQuery.java @@ -105,27 +105,17 @@ public final class AllTermQuery extends Query { } @Override - public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { + public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException { if (needsScores == false) { - return new TermQuery(term).createWeight(searcher, needsScores); + return new TermQuery(term).createWeight(searcher, needsScores, boost); } final TermContext termStates = TermContext.build(searcher.getTopReaderContext(), term); final CollectionStatistics collectionStats = searcher.collectionStatistics(term.field()); final TermStatistics termStats = searcher.termStatistics(term, termStates); final Similarity similarity = searcher.getSimilarity(needsScores); - final SimWeight stats = similarity.computeWeight(collectionStats, termStats); + final SimWeight stats = similarity.computeWeight(boost, collectionStats, termStats); return new Weight(this) { - @Override - public float getValueForNormalization() throws IOException { - return stats.getValueForNormalization(); - } - - @Override - public void normalize(float norm, float topLevelBoost) { - stats.normalize(norm, topLevelBoost); - } - @Override public void extractTerms(Set terms) { terms.add(term); diff --git a/core/src/main/java/org/elasticsearch/common/lucene/index/ElasticsearchDirectoryReader.java b/core/src/main/java/org/elasticsearch/common/lucene/index/ElasticsearchDirectoryReader.java index b9caecea965..0f83e07874f 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/index/ElasticsearchDirectoryReader.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/index/ElasticsearchDirectoryReader.java @@ -49,6 +49,12 @@ public final class ElasticsearchDirectoryReader extends FilterDirectoryReader { return this.shardId; } + @Override + public CacheHelper getReaderCacheHelper() { + // safe to delegate since this reader does not alter the index + return in.getReaderCacheHelper(); + } + @Override protected DirectoryReader doWrapDirectoryReader(DirectoryReader in) throws IOException { return new ElasticsearchDirectoryReader(in, wrapper, shardId); @@ -84,14 +90,17 @@ public final class ElasticsearchDirectoryReader extends FilterDirectoryReader { * @throws IllegalArgumentException if the reader doesn't contain an {@link ElasticsearchDirectoryReader} in it's hierarchy */ @SuppressForbidden(reason = "This is the only sane way to add a ReaderClosedListener") - public static void addReaderCloseListener(DirectoryReader reader, IndexReader.ReaderClosedListener listener) { + public static void addReaderCloseListener(DirectoryReader reader, IndexReader.ClosedListener listener) { ElasticsearchDirectoryReader elasticsearchDirectoryReader = getElasticsearchDirectoryReader(reader); - if (elasticsearchDirectoryReader != null) { - assert reader.getCoreCacheKey() == elasticsearchDirectoryReader.getCoreCacheKey(); - elasticsearchDirectoryReader.addReaderClosedListener(listener); - return; + if (elasticsearchDirectoryReader == null) { + throw new IllegalArgumentException("Can't install close listener reader is not an ElasticsearchDirectoryReader/ElasticsearchLeafReader"); } - throw new IllegalArgumentException("Can't install close listener reader is not an ElasticsearchDirectoryReader/ElasticsearchLeafReader"); + IndexReader.CacheHelper cacheHelper = elasticsearchDirectoryReader.getReaderCacheHelper(); + if (cacheHelper == null) { + throw new IllegalArgumentException("Reader " + elasticsearchDirectoryReader + " does not support caching"); + } + assert cacheHelper.getKey() == reader.getReaderCacheHelper().getKey(); + cacheHelper.addClosedListener(listener); } /** diff --git a/core/src/main/java/org/elasticsearch/common/lucene/index/ElasticsearchLeafReader.java b/core/src/main/java/org/elasticsearch/common/lucene/index/ElasticsearchLeafReader.java index 1094a7b1840..0f13ee43d30 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/index/ElasticsearchLeafReader.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/index/ElasticsearchLeafReader.java @@ -49,8 +49,13 @@ public final class ElasticsearchLeafReader extends FilterLeafReader { } @Override - public Object getCoreCacheKey() { - return in.getCoreCacheKey(); + public CacheHelper getCoreCacheHelper() { + return in.getCoreCacheHelper(); + } + + @Override + public CacheHelper getReaderCacheHelper() { + return in.getReaderCacheHelper(); } public static ElasticsearchLeafReader getElasticsearchLeafReader(LeafReader reader) { diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/Queries.java b/core/src/main/java/org/elasticsearch/common/lucene/search/Queries.java index 68a02ed256d..acf3f9ffdf8 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/Queries.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/Queries.java @@ -121,7 +121,6 @@ public class Queries { if (isNegativeQuery(q)) { BooleanQuery bq = (BooleanQuery) q; BooleanQuery.Builder builder = new BooleanQuery.Builder(); - builder.setDisableCoord(bq.isCoordDisabled()); for (BooleanClause clause : bq) { builder.add(clause); } @@ -154,7 +153,6 @@ public class Queries { int msm = calculateMinShouldMatch(optionalClauses, minimumShouldMatch); if (0 < msm) { BooleanQuery.Builder builder = new BooleanQuery.Builder(); - builder.setDisableCoord(query.isCoordDisabled()); for (BooleanClause clause : query) { builder.add(clause); } @@ -170,10 +168,7 @@ public class Queries { * otherwise return the original query. */ public static Query maybeApplyMinimumShouldMatch(Query query, @Nullable String minimumShouldMatch) { - // If the coordination factor is disabled on a boolean query we don't apply the minimum should match. - // This is done to make sure that the minimum_should_match doesn't get applied when there is only one word - // and multiple variations of the same word in the query (synonyms for instance). - if (query instanceof BooleanQuery && !((BooleanQuery) query).isCoordDisabled()) { + if (query instanceof BooleanQuery) { return applyMinimumShouldMatch((BooleanQuery) query, minimumShouldMatch); } else if (query instanceof ExtendedCommonTermsQuery) { ((ExtendedCommonTermsQuery)query).setLowFreqMinimumNumberShouldMatch(minimumShouldMatch); diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/function/FieldValueFactorFunction.java b/core/src/main/java/org/elasticsearch/common/lucene/search/function/FieldValueFactorFunction.java index 3bc5542c2aa..e225df040ab 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/function/FieldValueFactorFunction.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/function/FieldValueFactorFunction.java @@ -62,7 +62,7 @@ public class FieldValueFactorFunction extends ScoreFunction { public LeafScoreFunction getLeafScoreFunction(LeafReaderContext ctx) { final SortedNumericDoubleValues values; if(indexFieldData == null) { - values = FieldData.emptySortedNumericDoubles(ctx.reader().maxDoc()); + values = FieldData.emptySortedNumericDoubles(); } else { values = this.indexFieldData.load(ctx).getDoubleValues(); } @@ -70,16 +70,16 @@ public class FieldValueFactorFunction extends ScoreFunction { return new LeafScoreFunction() { @Override - public double score(int docId, float subQueryScore) { - values.setDocument(docId); - final int numValues = values.count(); + public double score(int docId, float subQueryScore) throws IOException { double value; - if (numValues > 0) { - value = values.valueAt(0); - } else if (missing != null) { - value = missing; + if (values.advanceExact(docId)) { + value = values.nextValue(); } else { - throw new ElasticsearchException("Missing value for field [" + field + "]"); + if (missing != null) { + value = missing; + } else { + throw new ElasticsearchException("Missing value for field [" + field + "]"); + } } double val = value * boostFactor; double result = modifier.apply(val); @@ -91,7 +91,7 @@ public class FieldValueFactorFunction extends ScoreFunction { } @Override - public Explanation explainScore(int docId, Explanation subQueryScore) { + public Explanation explainScore(int docId, Explanation subQueryScore) throws IOException { String modifierStr = modifier != null ? modifier.toString() : ""; String defaultStr = missing != null ? "?:" + missing : ""; double score = score(docId, subQueryScore.getValue()); diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/function/FiltersFunctionScoreQuery.java b/core/src/main/java/org/elasticsearch/common/lucene/search/function/FiltersFunctionScoreQuery.java index fe12622748e..abf145406c5 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/function/FiltersFunctionScoreQuery.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/function/FiltersFunctionScoreQuery.java @@ -135,9 +135,9 @@ public class FiltersFunctionScoreQuery extends Query { } @Override - public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { + public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException { if (needsScores == false && minScore == null) { - return subQuery.createWeight(searcher, needsScores); + return subQuery.createWeight(searcher, needsScores, boost); } boolean subQueryNeedsScores = combineFunction != CombineFunction.REPLACE; @@ -146,7 +146,7 @@ public class FiltersFunctionScoreQuery extends Query { subQueryNeedsScores |= filterFunctions[i].function.needsScores(); filterWeights[i] = searcher.createNormalizedWeight(filterFunctions[i].filter, false); } - Weight subQueryWeight = subQuery.createWeight(searcher, subQueryNeedsScores); + Weight subQueryWeight = subQuery.createWeight(searcher, subQueryNeedsScores, boost); return new CustomBoostFactorWeight(this, subQueryWeight, filterWeights, subQueryNeedsScores); } @@ -168,16 +168,6 @@ public class FiltersFunctionScoreQuery extends Query { subQueryWeight.extractTerms(terms); } - @Override - public float getValueForNormalization() throws IOException { - return subQueryWeight.getValueForNormalization(); - } - - @Override - public void normalize(float norm, float boost) { - subQueryWeight.normalize(norm, boost); - } - private FiltersFunctionFactorScorer functionScorer(LeafReaderContext context) throws IOException { Scorer subQueryScorer = subQueryWeight.scorer(context); if (subQueryScorer == null) { @@ -281,7 +271,7 @@ public class FiltersFunctionScoreQuery extends Query { return scoreCombiner.combine(subQueryScore, factor, maxBoost); } - protected double computeScore(int docId, float subQueryScore) { + protected double computeScore(int docId, float subQueryScore) throws IOException { double factor = 1d; switch(scoreMode) { case FIRST: diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/function/FunctionScoreQuery.java b/core/src/main/java/org/elasticsearch/common/lucene/search/function/FunctionScoreQuery.java index 61de1ab303f..95fc58dce59 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/function/FunctionScoreQuery.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/function/FunctionScoreQuery.java @@ -91,16 +91,16 @@ public class FunctionScoreQuery extends Query { } @Override - public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { + public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException { if (needsScores == false && minScore == null) { - return subQuery.createWeight(searcher, needsScores); + return subQuery.createWeight(searcher, needsScores, boost); } boolean subQueryNeedsScores = combineFunction != CombineFunction.REPLACE // if we don't replace we need the original score || function == null // when the function is null, we just multiply the score, so we need it || function.needsScores(); // some scripts can replace with a script that returns eg. 1/_score - Weight subQueryWeight = subQuery.createWeight(searcher, subQueryNeedsScores); + Weight subQueryWeight = subQuery.createWeight(searcher, subQueryNeedsScores, boost); return new CustomBoostFactorWeight(this, subQueryWeight, subQueryNeedsScores); } @@ -120,16 +120,6 @@ public class FunctionScoreQuery extends Query { subQueryWeight.extractTerms(terms); } - @Override - public float getValueForNormalization() throws IOException { - return subQueryWeight.getValueForNormalization(); - } - - @Override - public void normalize(float norm, float boost) { - subQueryWeight.normalize(norm, boost); - } - private FunctionFactorScorer functionScorer(LeafReaderContext context) throws IOException { Scorer subQueryScorer = subQueryWeight.scorer(context); if (subQueryScorer == null) { diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/function/LeafScoreFunction.java b/core/src/main/java/org/elasticsearch/common/lucene/search/function/LeafScoreFunction.java index 7df35fa580f..c00431a34ba 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/function/LeafScoreFunction.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/function/LeafScoreFunction.java @@ -26,7 +26,7 @@ import java.io.IOException; /** Per-leaf {@link ScoreFunction}. */ public abstract class LeafScoreFunction { - public abstract double score(int docId, float subQueryScore); + public abstract double score(int docId, float subQueryScore) throws IOException; public abstract Explanation explainScore(int docId, Explanation subQueryScore) throws IOException; diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/function/RandomScoreFunction.java b/core/src/main/java/org/elasticsearch/common/lucene/search/function/RandomScoreFunction.java index 3810c16bc0e..43e71f4dff3 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/function/RandomScoreFunction.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/function/RandomScoreFunction.java @@ -25,6 +25,7 @@ import org.elasticsearch.index.fielddata.AtomicFieldData; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.SortedBinaryDocValues; +import java.io.IOException; import java.util.Objects; /** @@ -68,14 +69,16 @@ public class RandomScoreFunction extends ScoreFunction { return new LeafScoreFunction() { @Override - public double score(int docId, float subQueryScore) { - uidByteData.setDocument(docId); - int hash = StringHelper.murmurhash3_x86_32(uidByteData.valueAt(0), saltedSeed); + public double score(int docId, float subQueryScore) throws IOException { + if (uidByteData.advanceExact(docId) == false) { + throw new AssertionError("Document without a _uid"); + } + int hash = StringHelper.murmurhash3_x86_32(uidByteData.nextValue(), saltedSeed); return (hash & 0x00FFFFFF) / (float)(1 << 24); // only use the lower 24 bits to construct a float from 0.0-1.0 } @Override - public Explanation explainScore(int docId, Explanation subQueryScore) { + public Explanation explainScore(int docId, Explanation subQueryScore) throws IOException { return Explanation.match( CombineFunction.toFloat(score(docId, subQueryScore.getValue())), "random score function (seed: " + originalSeed + ")"); diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/function/WeightFactorFunction.java b/core/src/main/java/org/elasticsearch/common/lucene/search/function/WeightFactorFunction.java index 44c6245a812..c69a5efdd53 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/function/WeightFactorFunction.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/function/WeightFactorFunction.java @@ -52,7 +52,7 @@ public class WeightFactorFunction extends ScoreFunction { final LeafScoreFunction leafFunction = scoreFunction.getLeafScoreFunction(ctx); return new LeafScoreFunction() { @Override - public double score(int docId, float subQueryScore) { + public double score(int docId, float subQueryScore) throws IOException { return leafFunction.score(docId, subQueryScore) * getWeight(); } diff --git a/core/src/main/java/org/elasticsearch/common/lucene/uid/PerThreadIDVersionAndSeqNoLookup.java b/core/src/main/java/org/elasticsearch/common/lucene/uid/PerThreadIDVersionAndSeqNoLookup.java index 80977618c4b..96c9f30a954 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/uid/PerThreadIDVersionAndSeqNoLookup.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/uid/PerThreadIDVersionAndSeqNoLookup.java @@ -52,12 +52,7 @@ final class PerThreadIDVersionAndSeqNoLookup { /** terms enum for uid field */ private final TermsEnum termsEnum; - /** _version data */ - private final NumericDocValues versions; - /** _seq_no data */ - private final NumericDocValues seqNos; - /** _primary_term data */ - private final NumericDocValues primaryTerms; + /** Reused for iteration (when the term exists) */ private PostingsEnum docsEnum; @@ -72,30 +67,33 @@ final class PerThreadIDVersionAndSeqNoLookup { Terms terms = fields.terms(UidFieldMapper.NAME); termsEnum = terms.iterator(); if (termsEnum == null) { - throw new IllegalArgumentException("reader misses the [" + UidFieldMapper.NAME + - "] field"); + throw new IllegalArgumentException("reader misses the [" + UidFieldMapper.NAME + "] field"); } - versions = reader.getNumericDocValues(VersionFieldMapper.NAME); - if (versions == null) { - throw new IllegalArgumentException("reader misses the [" + VersionFieldMapper.NAME + - "] field"); + if (reader.getNumericDocValues(VersionFieldMapper.NAME) == null) { + throw new IllegalArgumentException("reader misses the [" + VersionFieldMapper.NAME + "] field"); } - seqNos = reader.getNumericDocValues(SeqNoFieldMapper.NAME); - primaryTerms = reader.getNumericDocValues(SeqNoFieldMapper.PRIMARY_TERM_NAME); + Object readerKey = null; - assert (readerKey = reader.getCoreCacheKey()) != null; + assert (readerKey = reader.getCoreCacheHelper().getKey()) != null; this.readerKey = readerKey; } /** Return null if id is not found. */ public DocIdAndVersion lookupVersion(BytesRef id, Bits liveDocs, LeafReaderContext context) throws IOException { - assert context.reader().getCoreCacheKey().equals(readerKey) : + assert context.reader().getCoreCacheHelper().getKey().equals(readerKey) : "context's reader is not the same as the reader class was initialized on."; int docID = getDocID(id, liveDocs); if (docID != DocIdSetIterator.NO_MORE_DOCS) { - return new DocIdAndVersion(docID, versions.get(docID), context); + final NumericDocValues versions = context.reader().getNumericDocValues(VersionFieldMapper.NAME); + if (versions == null) { + throw new IllegalArgumentException("reader misses the [" + VersionFieldMapper.NAME + "] field"); + } + if (versions.advanceExact(docID) == false) { + throw new IllegalArgumentException("Document [" + docID + "] misses the [" + VersionFieldMapper.NAME + "] field"); + } + return new DocIdAndVersion(docID, versions.longValue(), context); } else { return null; } @@ -124,11 +122,18 @@ final class PerThreadIDVersionAndSeqNoLookup { /** Return null if id is not found. */ DocIdAndSeqNo lookupSeqNo(BytesRef id, Bits liveDocs, LeafReaderContext context) throws IOException { - assert context.reader().getCoreCacheKey().equals(readerKey) : + assert context.reader().getCoreCacheHelper().getKey().equals(readerKey) : "context's reader is not the same as the reader class was initialized on."; int docID = getDocID(id, liveDocs); if (docID != DocIdSetIterator.NO_MORE_DOCS) { - return new DocIdAndSeqNo(docID, seqNos == null ? SequenceNumbersService.UNASSIGNED_SEQ_NO : seqNos.get(docID), context); + NumericDocValues seqNos = context.reader().getNumericDocValues(SeqNoFieldMapper.NAME); + long seqNo; + if (seqNos != null && seqNos.advanceExact(docID)) { + seqNo = seqNos.longValue(); + } else { + seqNo = SequenceNumbersService.UNASSIGNED_SEQ_NO; + } + return new DocIdAndSeqNo(docID, seqNo, context); } else { return null; } @@ -139,7 +144,12 @@ final class PerThreadIDVersionAndSeqNoLookup { * * Note that 0 is an illegal primary term. See {@link org.elasticsearch.cluster.metadata.IndexMetaData#primaryTerm(int)} **/ - long lookUpPrimaryTerm(int docID) throws IOException { - return primaryTerms == null ? 0 : primaryTerms.get(docID); + long lookUpPrimaryTerm(int docID, LeafReader reader) throws IOException { + NumericDocValues primaryTerms = reader.getNumericDocValues(SeqNoFieldMapper.PRIMARY_TERM_NAME); + if (primaryTerms != null && primaryTerms.advanceExact(docID)) { + return primaryTerms.longValue(); + } else { + return 0; + } } } diff --git a/core/src/main/java/org/elasticsearch/common/lucene/uid/VersionsAndSeqNoResolver.java b/core/src/main/java/org/elasticsearch/common/lucene/uid/VersionsAndSeqNoResolver.java index 1cbae29a3da..409ce8dec29 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/uid/VersionsAndSeqNoResolver.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/uid/VersionsAndSeqNoResolver.java @@ -21,7 +21,6 @@ package org.elasticsearch.common.lucene.uid; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReader; -import org.apache.lucene.index.LeafReader.CoreClosedListener; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.Term; import org.apache.lucene.util.CloseableThreadLocal; @@ -41,7 +40,7 @@ public final class VersionsAndSeqNoResolver { ConcurrentCollections.newConcurrentMapWithAggressiveConcurrency(); // Evict this reader from lookupStates once it's closed: - private static final CoreClosedListener removeLookupState = key -> { + private static final IndexReader.ClosedListener removeLookupState = key -> { CloseableThreadLocal ctl = lookupStates.remove(key); if (ctl != null) { ctl.close(); @@ -49,15 +48,15 @@ public final class VersionsAndSeqNoResolver { }; private static PerThreadIDVersionAndSeqNoLookup getLookupState(LeafReader reader) throws IOException { - Object key = reader.getCoreCacheKey(); - CloseableThreadLocal ctl = lookupStates.get(key); + IndexReader.CacheHelper cacheHelper = reader.getCoreCacheHelper(); + CloseableThreadLocal ctl = lookupStates.get(cacheHelper.getKey()); if (ctl == null) { // First time we are seeing this reader's core; make a new CTL: ctl = new CloseableThreadLocal<>(); - CloseableThreadLocal other = lookupStates.putIfAbsent(key, ctl); + CloseableThreadLocal other = lookupStates.putIfAbsent(cacheHelper.getKey(), ctl); if (other == null) { // Our CTL won, we must remove it when the core is closed: - reader.addCoreClosedListener(removeLookupState); + cacheHelper.addClosedListener(removeLookupState); } else { // Another thread beat us to it: just use their CTL: ctl = other; @@ -161,7 +160,7 @@ public final class VersionsAndSeqNoResolver { public static long loadPrimaryTerm(DocIdAndSeqNo docIdAndSeqNo) throws IOException { LeafReader leaf = docIdAndSeqNo.context.reader(); PerThreadIDVersionAndSeqNoLookup lookup = getLookupState(leaf); - long result = lookup.lookUpPrimaryTerm(docIdAndSeqNo.docId); + long result = lookup.lookUpPrimaryTerm(docIdAndSeqNo.docId, leaf); assert result > 0 : "should always resolve a primary term for a resolved sequence number. primary_term [" + result + "]" + " docId [" + docIdAndSeqNo.docId + "] seqNo [" + docIdAndSeqNo.seqNo + "]"; return result; diff --git a/core/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java b/core/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java index 0e4c54e7a7d..04d2ac47d18 100644 --- a/core/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java +++ b/core/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java @@ -21,8 +21,8 @@ package org.elasticsearch.index.cache.bitset; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; +import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReaderContext; -import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.ReaderUtil; import org.apache.lucene.search.IndexSearcher; @@ -71,13 +71,13 @@ import java.util.concurrent.Executor; * and require that it should always be around should use this cache, otherwise the * {@link org.elasticsearch.index.cache.query.QueryCache} should be used instead. */ -public final class BitsetFilterCache extends AbstractIndexComponent implements LeafReader.CoreClosedListener, RemovalListener>, Closeable { +public final class BitsetFilterCache extends AbstractIndexComponent implements IndexReader.ClosedListener, RemovalListener>, Closeable { public static final Setting INDEX_LOAD_RANDOM_ACCESS_FILTERS_EAGERLY_SETTING = Setting.boolSetting("index.load_fixed_bitset_filters_eagerly", true, Property.IndexScope); private final boolean loadRandomAccessFiltersEagerly; - private final Cache> loadedFilters; + private final Cache> loadedFilters; private final Listener listener; public BitsetFilterCache(IndexSettings indexSettings, Listener listener) { @@ -86,7 +86,7 @@ public final class BitsetFilterCache extends AbstractIndexComponent implements L throw new IllegalArgumentException("listener must not be null"); } this.loadRandomAccessFiltersEagerly = this.indexSettings.getValue(INDEX_LOAD_RANDOM_ACCESS_FILTERS_EAGERLY_SETTING); - this.loadedFilters = CacheBuilder.>builder().removalListener(this).build(); + this.loadedFilters = CacheBuilder.>builder().removalListener(this).build(); this.listener = listener; } @@ -100,7 +100,7 @@ public final class BitsetFilterCache extends AbstractIndexComponent implements L } @Override - public void onClose(Object ownerCoreCacheKey) { + public void onClose(IndexReader.CacheKey ownerCoreCacheKey) { loadedFilters.invalidate(ownerCoreCacheKey); } @@ -115,7 +115,11 @@ public final class BitsetFilterCache extends AbstractIndexComponent implements L } private BitSet getAndLoadIfNotPresent(final Query query, final LeafReaderContext context) throws IOException, ExecutionException { - final Object coreCacheReader = context.reader().getCoreCacheKey(); + final IndexReader.CacheHelper cacheHelper = context.reader().getCoreCacheHelper(); + if (cacheHelper == null) { + throw new IllegalArgumentException("Reader " + context.reader() + " does not support caching"); + } + final IndexReader.CacheKey coreCacheReader = cacheHelper.getKey(); final ShardId shardId = ShardUtils.extractShardId(context.reader()); if (shardId != null // can't require it because of the percolator && indexSettings.getIndex().equals(shardId.getIndex()) == false) { @@ -124,7 +128,7 @@ public final class BitsetFilterCache extends AbstractIndexComponent implements L + " with cache of index " + indexSettings.getIndex()); } Cache filterToFbs = loadedFilters.computeIfAbsent(coreCacheReader, key -> { - context.reader().addCoreClosedListener(BitsetFilterCache.this); + cacheHelper.addClosedListener(BitsetFilterCache.this); return CacheBuilder.builder().build(); }); @@ -148,7 +152,7 @@ public final class BitsetFilterCache extends AbstractIndexComponent implements L } @Override - public void onRemoval(RemovalNotification> notification) { + public void onRemoval(RemovalNotification> notification) { if (notification.getKey() == null) { return; } @@ -272,7 +276,7 @@ public final class BitsetFilterCache extends AbstractIndexComponent implements L } - Cache> getLoadedFilters() { + Cache> getLoadedFilters() { return loadedFilters; } diff --git a/core/src/main/java/org/elasticsearch/index/codec/CodecService.java b/core/src/main/java/org/elasticsearch/index/codec/CodecService.java index 59be64a85d6..30342848974 100644 --- a/core/src/main/java/org/elasticsearch/index/codec/CodecService.java +++ b/core/src/main/java/org/elasticsearch/index/codec/CodecService.java @@ -22,7 +22,7 @@ package org.elasticsearch.index.codec; import org.apache.logging.log4j.Logger; import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat.Mode; -import org.apache.lucene.codecs.lucene62.Lucene62Codec; +import org.apache.lucene.codecs.lucene70.Lucene70Codec; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.index.mapper.MapperService; @@ -47,8 +47,8 @@ public class CodecService { public CodecService(@Nullable MapperService mapperService, Logger logger) { final MapBuilder codecs = MapBuilder.newMapBuilder(); if (mapperService == null) { - codecs.put(DEFAULT_CODEC, new Lucene62Codec()); - codecs.put(BEST_COMPRESSION_CODEC, new Lucene62Codec(Mode.BEST_COMPRESSION)); + codecs.put(DEFAULT_CODEC, new Lucene70Codec()); + codecs.put(BEST_COMPRESSION_CODEC, new Lucene70Codec(Mode.BEST_COMPRESSION)); } else { codecs.put(DEFAULT_CODEC, new PerFieldMappingPostingFormatCodec(Mode.BEST_SPEED, mapperService, logger)); diff --git a/core/src/main/java/org/elasticsearch/index/codec/PerFieldMappingPostingFormatCodec.java b/core/src/main/java/org/elasticsearch/index/codec/PerFieldMappingPostingFormatCodec.java index 490c837e85a..bf1e48e7a6b 100644 --- a/core/src/main/java/org/elasticsearch/index/codec/PerFieldMappingPostingFormatCodec.java +++ b/core/src/main/java/org/elasticsearch/index/codec/PerFieldMappingPostingFormatCodec.java @@ -23,7 +23,7 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.codecs.Codec; import org.apache.lucene.codecs.PostingsFormat; import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat; -import org.apache.lucene.codecs.lucene62.Lucene62Codec; +import org.apache.lucene.codecs.lucene70.Lucene70Codec; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.index.mapper.CompletionFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; @@ -38,7 +38,7 @@ import org.elasticsearch.index.mapper.MapperService; * configured for a specific field the default postings format is used. */ // LUCENE UPGRADE: make sure to move to a new codec depending on the lucene version -public class PerFieldMappingPostingFormatCodec extends Lucene62Codec { +public class PerFieldMappingPostingFormatCodec extends Lucene70Codec { private final Logger logger; private final MapperService mapperService; diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/AbstractBinaryDocValues.java b/core/src/main/java/org/elasticsearch/index/fielddata/AbstractBinaryDocValues.java new file mode 100644 index 00000000000..a3ce33a4e6d --- /dev/null +++ b/core/src/main/java/org/elasticsearch/index/fielddata/AbstractBinaryDocValues.java @@ -0,0 +1,55 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.fielddata; + +import org.apache.lucene.index.BinaryDocValues; +import org.apache.lucene.search.DocIdSetIterator; + +import java.io.IOException; + +/** + * Base implementation that throws an {@link IOException} for the + * {@link DocIdSetIterator} APIs. This impl is safe to use for sorting and + * aggregations, which only use {@link #advanceExact(int)} and + * {@link #binaryValue()}. + */ +public abstract class AbstractBinaryDocValues extends BinaryDocValues { + + @Override + public int docID() { + throw new UnsupportedOperationException(); + } + + @Override + public int nextDoc() throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public int advance(int target) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public long cost() { + throw new UnsupportedOperationException(); + } + +} diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/AbstractNumericDocValues.java b/core/src/main/java/org/elasticsearch/index/fielddata/AbstractNumericDocValues.java new file mode 100644 index 00000000000..21c96f30902 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/index/fielddata/AbstractNumericDocValues.java @@ -0,0 +1,50 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.fielddata; + +import org.apache.lucene.index.NumericDocValues; +import org.apache.lucene.search.DocIdSetIterator; + +import java.io.IOException; + +/** + * Base implementation that throws an {@link IOException} for the + * {@link DocIdSetIterator} APIs. This impl is safe to use for sorting and + * aggregations, which only use {@link #advanceExact(int)} and + * {@link #longValue()}. + */ +public abstract class AbstractNumericDocValues extends NumericDocValues { + + @Override + public int nextDoc() throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public int advance(int target) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public long cost() { + throw new UnsupportedOperationException(); + } + +} diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/AbstractRandomAccessOrds.java b/core/src/main/java/org/elasticsearch/index/fielddata/AbstractSortedDocValues.java similarity index 55% rename from core/src/main/java/org/elasticsearch/index/fielddata/AbstractRandomAccessOrds.java rename to core/src/main/java/org/elasticsearch/index/fielddata/AbstractSortedDocValues.java index ef8954b97e1..b1ace7e932b 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/AbstractRandomAccessOrds.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/AbstractSortedDocValues.java @@ -19,30 +19,32 @@ package org.elasticsearch.index.fielddata; -import org.apache.lucene.index.RandomAccessOrds; +import org.apache.lucene.index.SortedDocValues; +import org.apache.lucene.search.DocIdSetIterator; + +import java.io.IOException; /** - * Base implementation of a {@link RandomAccessOrds} instance. + * Base implementation that throws an {@link IOException} for the + * {@link DocIdSetIterator} APIs. This impl is safe to use for sorting and + * aggregations, which only use {@link #advanceExact(int)} and + * {@link #ordValue()}. */ -public abstract class AbstractRandomAccessOrds extends RandomAccessOrds { - - int i = 0; - - protected abstract void doSetDocument(int docID); +public abstract class AbstractSortedDocValues extends SortedDocValues { @Override - public final void setDocument(int docID) { - doSetDocument(docID); - i = 0; + public int nextDoc() throws IOException { + throw new UnsupportedOperationException(); } @Override - public long nextOrd() { - if (i < cardinality()) { - return ordAt(i++); - } else { - return NO_MORE_ORDS; - } + public int advance(int target) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public long cost() { + throw new UnsupportedOperationException(); } } diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/AbstractSortedNumericDocValues.java b/core/src/main/java/org/elasticsearch/index/fielddata/AbstractSortedNumericDocValues.java new file mode 100644 index 00000000000..c7f78147797 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/index/fielddata/AbstractSortedNumericDocValues.java @@ -0,0 +1,55 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.fielddata; + +import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.search.DocIdSetIterator; + +import java.io.IOException; + +/** + * Base implementation that throws an {@link IOException} for the + * {@link DocIdSetIterator} APIs. This impl is safe to use for sorting and + * aggregations, which only use {@link #advanceExact(int)} and + * {@link #docValueCount()} and {@link #nextValue()}. + */ +public abstract class AbstractSortedNumericDocValues extends SortedNumericDocValues { + + @Override + public int docID() { + throw new UnsupportedOperationException(); + } + + @Override + public int nextDoc() throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public int advance(int target) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public long cost() { + throw new UnsupportedOperationException(); + } + +} diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/AbstractSortedSetDocValues.java b/core/src/main/java/org/elasticsearch/index/fielddata/AbstractSortedSetDocValues.java new file mode 100644 index 00000000000..fedf815f261 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/index/fielddata/AbstractSortedSetDocValues.java @@ -0,0 +1,56 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.fielddata; + +import org.apache.lucene.index.SortedSetDocValues; +import org.apache.lucene.search.DocIdSetIterator; + +import java.io.IOException; + +/** + * Base implementation that throws an {@link IOException} for the + * {@link DocIdSetIterator} APIs. This impl is safe to use for sorting and + * aggregations, which only use {@link #advanceExact(int)} and + * {@link #getValueCount()} and {@link #nextOrd()} and {@link #lookupOrd(long)}. + */ +public abstract class AbstractSortedSetDocValues extends SortedSetDocValues { + + @Override + public int docID() { + throw new UnsupportedOperationException(); + } + + @Override + public int nextDoc() throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public int advance(int target) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public long cost() { + throw new UnsupportedOperationException(); + } + + +} diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/AbstractSortingNumericDocValues.java b/core/src/main/java/org/elasticsearch/index/fielddata/AbstractSortingNumericDocValues.java new file mode 100644 index 00000000000..ad4b8c08cc9 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/index/fielddata/AbstractSortingNumericDocValues.java @@ -0,0 +1,54 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.fielddata; + +import org.apache.lucene.search.DocIdSetIterator; + +import java.io.IOException; + +/** + * Base implementation that throws an {@link IOException} for the + * {@link DocIdSetIterator} APIs. This impl is safe to use for sorting and + * aggregations, which only use {@link #advanceExact(int)} and + * {@link #docValueCount()} and {@link #nextValue()}. + */ +public abstract class AbstractSortingNumericDocValues extends SortingNumericDocValues { + + @Override + public int docID() { + throw new UnsupportedOperationException(); + } + + @Override + public int nextDoc() throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public int advance(int target) throws IOException { + throw new UnsupportedOperationException(); + } + + @Override + public long cost() { + throw new UnsupportedOperationException(); + } + +} diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/AtomicOrdinalsFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/AtomicOrdinalsFieldData.java index 8d1fd310571..14ed16ecb3e 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/AtomicOrdinalsFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/AtomicOrdinalsFieldData.java @@ -19,7 +19,7 @@ package org.elasticsearch.index.fielddata; -import org.apache.lucene.index.RandomAccessOrds; +import org.apache.lucene.index.SortedSetDocValues; /** * Specialization of {@link AtomicFieldData} for data that is indexed with @@ -30,6 +30,6 @@ public interface AtomicOrdinalsFieldData extends AtomicFieldData { /** * Return the ordinals values for the current atomic reader. */ - RandomAccessOrds getOrdinalsValues(); + SortedSetDocValues getOrdinalsValues(); } diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/FieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/FieldData.java index fce29c09ba4..a1ac341b56f 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/FieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/FieldData.java @@ -22,15 +22,13 @@ package org.elasticsearch.index.fielddata; import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.DocValues; import org.apache.lucene.index.NumericDocValues; -import org.apache.lucene.index.RandomAccessOrds; import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.index.SortedSetDocValues; -import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.Bits; import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.NumericUtils; import org.elasticsearch.common.geo.GeoPoint; +import java.io.IOException; import java.util.ArrayList; import java.util.List; @@ -43,8 +41,8 @@ public enum FieldData { /** * Return a {@link SortedBinaryDocValues} that doesn't contain any value. */ - public static SortedBinaryDocValues emptySortedBinary(int maxDoc) { - return singleton(DocValues.emptyBinary(), new Bits.MatchNoBits(maxDoc)); + public static SortedBinaryDocValues emptySortedBinary() { + return singleton(DocValues.emptyBinary()); } /** @@ -53,8 +51,13 @@ public enum FieldData { public static NumericDoubleValues emptyNumericDouble() { return new NumericDoubleValues() { @Override - public double get(int docID) { - return 0; + public boolean advanceExact(int doc) throws IOException { + return false; + } + + @Override + public double doubleValue() throws IOException { + throw new UnsupportedOperationException(); } }; @@ -63,16 +66,20 @@ public enum FieldData { /** * Return a {@link SortedNumericDoubleValues} that doesn't contain any value. */ - public static SortedNumericDoubleValues emptySortedNumericDoubles(int maxDoc) { - return singleton(emptyNumericDouble(), new Bits.MatchNoBits(maxDoc)); + public static SortedNumericDoubleValues emptySortedNumericDoubles() { + return singleton(emptyNumericDouble()); } public static GeoPointValues emptyGeoPoint() { - final GeoPoint point = new GeoPoint(); return new GeoPointValues() { @Override - public GeoPoint get(int docID) { - return point; + public boolean advanceExact(int doc) throws IOException { + return false; + } + + @Override + public GeoPoint geoPointValue() { + throw new UnsupportedOperationException(); } }; } @@ -80,68 +87,123 @@ public enum FieldData { /** * Return a {@link SortedNumericDoubleValues} that doesn't contain any value. */ - public static MultiGeoPointValues emptyMultiGeoPoints(int maxDoc) { - return singleton(emptyGeoPoint(), new Bits.MatchNoBits(maxDoc)); + public static MultiGeoPointValues emptyMultiGeoPoints() { + return singleton(emptyGeoPoint()); } /** * Returns a {@link Bits} representing all documents from dv that have a value. */ public static Bits docsWithValue(final SortedBinaryDocValues dv, final int maxDoc) { - return new Bits() { - @Override - public boolean get(int index) { - dv.setDocument(index); - return dv.count() != 0; - } + return new Bits() { + @Override + public boolean get(int index) { + try { + return dv.advanceExact(index); + } catch (IOException e) { + throw new RuntimeException(e); + } + } - @Override - public int length() { - return maxDoc; - } - }; + @Override + public int length() { + return maxDoc; + } + }; } /** - * Returns a Bits representing all documents from dv that have a value. + * Returns a {@link Bits} representing all documents from dv + * that have a value. + */ + public static Bits docsWithValue(final SortedSetDocValues dv, final int maxDoc) { + return new Bits() { + @Override + public boolean get(int index) { + try { + return dv.advanceExact(index); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + @Override + public int length() { + return maxDoc; + } + }; + } + + /** + * Returns a Bits representing all documents from dv that have + * a value. */ public static Bits docsWithValue(final MultiGeoPointValues dv, final int maxDoc) { - return new Bits() { - @Override - public boolean get(int index) { - dv.setDocument(index); - return dv.count() != 0; - } + return new Bits() { + @Override + public boolean get(int index) { + try { + return dv.advanceExact(index); + } catch (IOException e) { + throw new RuntimeException(e); + } + } - @Override - public int length() { - return maxDoc; - } - }; + @Override + public int length() { + return maxDoc; + } + }; } /** * Returns a Bits representing all documents from dv that have a value. */ public static Bits docsWithValue(final SortedNumericDoubleValues dv, final int maxDoc) { - return new Bits() { - @Override - public boolean get(int index) { - dv.setDocument(index); - return dv.count() != 0; - } + return new Bits() { + @Override + public boolean get(int index) { + try { + return dv.advanceExact(index); + } catch (IOException e) { + throw new RuntimeException(e); + } + } - @Override - public int length() { - return maxDoc; - } - }; + @Override + public int length() { + return maxDoc; + } + }; } /** - * Given a {@link SortedNumericDoubleValues}, return a {@link SortedNumericDocValues} - * instance that will translate double values to sortable long bits using - * {@link NumericUtils#doubleToSortableLong(double)}. + * Returns a Bits representing all documents from dv that have + * a value. + */ + public static Bits docsWithValue(final SortedNumericDocValues dv, final int maxDoc) { + return new Bits() { + @Override + public boolean get(int index) { + try { + return dv.advanceExact(index); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + @Override + public int length() { + return maxDoc; + } + }; + } + + /** + * Given a {@link SortedNumericDoubleValues}, return a + * {@link SortedNumericDocValues} instance that will translate double values + * to sortable long bits using + * {@link org.apache.lucene.util.NumericUtils#doubleToSortableLong(double)}. */ public static SortedNumericDocValues toSortableLongBits(SortedNumericDoubleValues values) { final NumericDoubleValues singleton = unwrapSingleton(values); @@ -152,8 +214,7 @@ public enum FieldData { } else { longBits = new SortableLongBitsNumericDocValues(singleton); } - final Bits docsWithField = unwrapSingletonBits(values); - return DocValues.singleton(longBits, docsWithField); + return DocValues.singleton(longBits); } else { if (values instanceof SortableLongBitsToSortedNumericDoubleValues) { return ((SortableLongBitsToSortedNumericDoubleValues) values).getLongValues(); @@ -166,7 +227,7 @@ public enum FieldData { /** * Given a {@link SortedNumericDocValues}, return a {@link SortedNumericDoubleValues} * instance that will translate long values to doubles using - * {@link NumericUtils#sortableLongToDouble(long)}. + * {@link org.apache.lucene.util.NumericUtils#sortableLongToDouble(long)}. */ public static SortedNumericDoubleValues sortableLongBitsToDoubles(SortedNumericDocValues values) { final NumericDocValues singleton = DocValues.unwrapSingleton(values); @@ -177,8 +238,7 @@ public enum FieldData { } else { doubles = new SortableLongBitsToNumericDoubleValues(singleton); } - final Bits docsWithField = DocValues.unwrapSingletonBits(values); - return singleton(doubles, docsWithField); + return singleton(doubles); } else { if (values instanceof SortableLongBitsSortedNumericDocValues) { return ((SortableLongBitsSortedNumericDocValues) values).getDoubleValues(); @@ -194,8 +254,7 @@ public enum FieldData { public static SortedNumericDoubleValues castToDouble(final SortedNumericDocValues values) { final NumericDocValues singleton = DocValues.unwrapSingleton(values); if (singleton != null) { - final Bits docsWithField = DocValues.unwrapSingletonBits(values); - return singleton(new DoubleCastedValues(singleton), docsWithField); + return singleton(new DoubleCastedValues(singleton)); } else { return new SortedDoubleCastedValues(values); } @@ -207,8 +266,7 @@ public enum FieldData { public static SortedNumericDocValues castToLong(final SortedNumericDoubleValues values) { final NumericDoubleValues singleton = unwrapSingleton(values); if (singleton != null) { - final Bits docsWithField = unwrapSingletonBits(values); - return DocValues.singleton(new LongCastedValues(singleton), docsWithField); + return DocValues.singleton(new LongCastedValues(singleton)); } else { return new SortedLongCastedValues(values); } @@ -217,15 +275,14 @@ public enum FieldData { /** * Returns a multi-valued view over the provided {@link NumericDoubleValues}. */ - public static SortedNumericDoubleValues singleton(NumericDoubleValues values, Bits docsWithField) { - return new SingletonSortedNumericDoubleValues(values, docsWithField); + public static SortedNumericDoubleValues singleton(NumericDoubleValues values) { + return new SingletonSortedNumericDoubleValues(values); } /** * Returns a single-valued view of the {@link SortedNumericDoubleValues}, - * if it was previously wrapped with {@link DocValues#singleton(NumericDocValues, Bits)}, + * if it was previously wrapped with {@link DocValues#singleton(NumericDocValues)}, * or null. - * @see DocValues#unwrapSingletonBits(SortedNumericDocValues) */ public static NumericDoubleValues unwrapSingleton(SortedNumericDoubleValues values) { if (values instanceof SingletonSortedNumericDoubleValues) { @@ -234,31 +291,17 @@ public enum FieldData { return null; } - /** - * Returns the documents with a value for the {@link SortedNumericDoubleValues}, - * if it was previously wrapped with {@link #singleton(NumericDoubleValues, Bits)}, - * or null. - */ - public static Bits unwrapSingletonBits(SortedNumericDoubleValues dv) { - if (dv instanceof SingletonSortedNumericDoubleValues) { - return ((SingletonSortedNumericDoubleValues)dv).getDocsWithField(); - } else { - return null; - } - } - /** * Returns a multi-valued view over the provided {@link GeoPointValues}. */ - public static MultiGeoPointValues singleton(GeoPointValues values, Bits docsWithField) { - return new SingletonMultiGeoPointValues(values, docsWithField); + public static MultiGeoPointValues singleton(GeoPointValues values) { + return new SingletonMultiGeoPointValues(values); } /** * Returns a single-valued view of the {@link MultiGeoPointValues}, - * if it was previously wrapped with {@link #singleton(GeoPointValues, Bits)}, + * if it was previously wrapped with {@link #singleton(GeoPointValues)}, * or null. - * @see #unwrapSingletonBits(MultiGeoPointValues) */ public static GeoPointValues unwrapSingleton(MultiGeoPointValues values) { if (values instanceof SingletonMultiGeoPointValues) { @@ -267,30 +310,17 @@ public enum FieldData { return null; } - /** - * Returns the documents with a value for the {@link MultiGeoPointValues}, - * if it was previously wrapped with {@link #singleton(GeoPointValues, Bits)}, - * or null. - */ - public static Bits unwrapSingletonBits(MultiGeoPointValues values) { - if (values instanceof SingletonMultiGeoPointValues) { - return ((SingletonMultiGeoPointValues) values).getDocsWithField(); - } - return null; - } - /** * Returns a multi-valued view over the provided {@link BinaryDocValues}. */ - public static SortedBinaryDocValues singleton(BinaryDocValues values, Bits docsWithField) { - return new SingletonSortedBinaryDocValues(values, docsWithField); + public static SortedBinaryDocValues singleton(BinaryDocValues values) { + return new SingletonSortedBinaryDocValues(values); } /** * Returns a single-valued view of the {@link SortedBinaryDocValues}, - * if it was previously wrapped with {@link #singleton(BinaryDocValues, Bits)}, + * if it was previously wrapped with {@link #singleton(BinaryDocValues)}, * or null. - * @see #unwrapSingletonBits(SortedBinaryDocValues) */ public static BinaryDocValues unwrapSingleton(SortedBinaryDocValues values) { if (values instanceof SingletonSortedBinaryDocValues) { @@ -299,18 +329,6 @@ public enum FieldData { return null; } - /** - * Returns the documents with a value for the {@link SortedBinaryDocValues}, - * if it was previously wrapped with {@link #singleton(BinaryDocValues, Bits)}, - * or null. - */ - public static Bits unwrapSingletonBits(SortedBinaryDocValues values) { - if (values instanceof SingletonSortedBinaryDocValues) { - return ((SingletonSortedBinaryDocValues) values).getDocsWithField(); - } - return null; - } - /** * Returns whether the provided values *might* be multi-valued. There is no * guarantee that this method will return false in the single-valued case. @@ -359,10 +377,13 @@ public enum FieldData { public static SortedBinaryDocValues toString(final SortedNumericDocValues values) { return toString(new ToStringValues() { @Override - public void get(int docID, List list) { - values.setDocument(docID); - for (int i = 0, count = values.count(); i < count; ++i) { - list.add(Long.toString(values.valueAt(i))); + public boolean advanceExact(int doc) throws IOException { + return values.advanceExact(doc); + } + @Override + public void get(List list) throws IOException { + for (int i = 0, count = values.docValueCount(); i < count; ++i) { + list.add(Long.toString(values.nextValue())); } } }); @@ -376,10 +397,13 @@ public enum FieldData { public static SortedBinaryDocValues toString(final SortedNumericDoubleValues values) { return toString(new ToStringValues() { @Override - public void get(int docID, List list) { - values.setDocument(docID); - for (int i = 0, count = values.count(); i < count; ++i) { - list.add(Double.toString(values.valueAt(i))); + public boolean advanceExact(int doc) throws IOException { + return values.advanceExact(doc); + } + @Override + public void get(List list) throws IOException { + for (int i = 0, count = values.docValueCount(); i < count; ++i) { + list.add(Double.toString(values.nextValue())); } } }); @@ -390,23 +414,37 @@ public enum FieldData { * typically used for scripts or for the `map` execution mode of terms aggs. * NOTE: this is slow! */ - public static SortedBinaryDocValues toString(final RandomAccessOrds values) { + public static SortedBinaryDocValues toString(final SortedSetDocValues values) { return new SortedBinaryDocValues() { + private int count = 0; @Override - public BytesRef valueAt(int index) { - return values.lookupOrd(values.ordAt(index)); + public boolean advanceExact(int doc) throws IOException { + if (values.advanceExact(doc) == false) { + return false; + } + for (int i = 0; ; ++i) { + if (values.nextOrd() == SortedSetDocValues.NO_MORE_ORDS) { + count = i; + break; + } + } + // reset the iterator on the current doc + boolean advanced = values.advanceExact(doc); + assert advanced; + return true; } @Override - public void setDocument(int docId) { - values.setDocument(docId); + public int docValueCount() { + return count; } @Override - public int count() { - return values.cardinality(); + public BytesRef nextValue() throws IOException { + return values.lookupOrd(values.nextOrd()); } + }; } @@ -418,78 +456,30 @@ public enum FieldData { public static SortedBinaryDocValues toString(final MultiGeoPointValues values) { return toString(new ToStringValues() { @Override - public void get(int docID, List list) { - values.setDocument(docID); - for (int i = 0, count = values.count(); i < count; ++i) { - list.add(values.valueAt(i).toString()); + public boolean advanceExact(int doc) throws IOException { + return values.advanceExact(doc); + } + @Override + public void get(List list) throws IOException { + for (int i = 0, count = values.docValueCount(); i < count; ++i) { + list.add(values.nextValue().toString()); } } }); } - /** - * If dv is an instance of {@link RandomAccessOrds}, then return - * it, otherwise wrap it into a slow wrapper that implements random access. - */ - public static RandomAccessOrds maybeSlowRandomAccessOrds(final SortedSetDocValues dv) { - if (dv instanceof RandomAccessOrds) { - return (RandomAccessOrds) dv; - } else { - assert DocValues.unwrapSingleton(dv) == null : "this method expect singleton to return random-access ords"; - return new RandomAccessOrds() { - - int cardinality; - long[] ords = new long[0]; - int ord; - - @Override - public void setDocument(int docID) { - cardinality = 0; - dv.setDocument(docID); - for (long ord = dv.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = dv.nextOrd()) { - ords = ArrayUtil.grow(ords, cardinality + 1); - ords[cardinality++] = ord; - } - ord = 0; - } - - @Override - public long nextOrd() { - return ords[ord++]; - } - - @Override - public BytesRef lookupOrd(long ord) { - return dv.lookupOrd(ord); - } - - @Override - public long getValueCount() { - return dv.getValueCount(); - } - - @Override - public long ordAt(int index) { - return ords[index]; - } - - @Override - public int cardinality() { - return cardinality; - } - }; - } - } - private static SortedBinaryDocValues toString(final ToStringValues toStringValues) { return new SortingBinaryDocValues() { final List list = new ArrayList<>(); @Override - public void setDocument(int docID) { + public boolean advanceExact(int docID) throws IOException { + if (toStringValues.advanceExact(docID) == false) { + return false; + } list.clear(); - toStringValues.get(docID, list); + toStringValues.get(list); count = list.size(); grow(); for (int i = 0; i < count; ++i) { @@ -497,6 +487,7 @@ public enum FieldData { values[i].copyChars(s); } sort(); + return true; } }; @@ -504,7 +495,14 @@ public enum FieldData { private interface ToStringValues { - void get(int docID, List values); + /** + * Advance this instance to the given document id + * @return true if there is a value for this document + */ + boolean advanceExact(int doc) throws IOException; + + /** Fill the list of charsquences with the list of values for the current document. */ + void get(List values) throws IOException; } @@ -517,8 +515,13 @@ public enum FieldData { } @Override - public double get(int docID) { - return values.get(docID); + public double doubleValue() throws IOException { + return values.longValue(); + } + + @Override + public boolean advanceExact(int doc) throws IOException { + return values.advanceExact(doc); } } @@ -532,38 +535,49 @@ public enum FieldData { } @Override - public double valueAt(int index) { - return values.valueAt(index); + public boolean advanceExact(int target) throws IOException { + return values.advanceExact(target); } @Override - public void setDocument(int doc) { - values.setDocument(doc); + public double nextValue() throws IOException { + return values.nextValue(); } @Override - public int count() { - return values.count(); + public int docValueCount() { + return values.docValueCount(); } } - private static class LongCastedValues extends NumericDocValues { + private static class LongCastedValues extends AbstractNumericDocValues { private final NumericDoubleValues values; + private int docID = -1; LongCastedValues(NumericDoubleValues values) { this.values = values; } @Override - public long get(int docID) { - return (long) values.get(docID); + public boolean advanceExact(int target) throws IOException { + docID = target; + return values.advanceExact(target); } + @Override + public long longValue() throws IOException { + return (long) values.doubleValue(); + } + + @Override + public int docID() { + return docID; + } } - private static class SortedLongCastedValues extends SortedNumericDocValues { + private static class SortedLongCastedValues extends AbstractSortedNumericDocValues { private final SortedNumericDoubleValues values; @@ -572,18 +586,18 @@ public enum FieldData { } @Override - public long valueAt(int index) { - return (long) values.valueAt(index); + public boolean advanceExact(int target) throws IOException { + return values.advanceExact(target); } @Override - public void setDocument(int doc) { - values.setDocument(doc); + public int docValueCount() { + return values.docValueCount(); } @Override - public int count() { - return values.count(); + public long nextValue() throws IOException { + return (long) values.nextValue(); } } diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/GeoPointValues.java b/core/src/main/java/org/elasticsearch/index/fielddata/GeoPointValues.java index 8e8c8e67c9b..8a679f18cfd 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/GeoPointValues.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/GeoPointValues.java @@ -21,17 +21,23 @@ package org.elasticsearch.index.fielddata; import org.elasticsearch.common.geo.GeoPoint; +import java.io.IOException; + /** * Per-document geo-point values. */ public abstract class GeoPointValues { /** - * Get the {@link GeoPoint} associated with docID. - * The returned {@link GeoPoint} might be reused across calls. - * If the given docID does not have a value then the returned - * geo point mught have both latitude and longitude set to 0. + * Advance this instance to the given document id + * @return true if there is a value for this document */ - public abstract GeoPoint get(int docID); + public abstract boolean advanceExact(int doc) throws IOException; + + /** + * Get the {@link GeoPoint} associated with the current document. + * The returned {@link GeoPoint} might be reused across calls. + */ + public abstract GeoPoint geoPointValue(); } diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/MultiGeoPointValues.java b/core/src/main/java/org/elasticsearch/index/fielddata/MultiGeoPointValues.java index 6fa9c799dd7..c80c337c6d0 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/MultiGeoPointValues.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/MultiGeoPointValues.java @@ -20,6 +20,8 @@ package org.elasticsearch.index.fielddata; import org.elasticsearch.common.geo.GeoPoint; +import java.io.IOException; + /** * A stateful lightweight per document set of {@link GeoPoint} values. * To iterate over values in a document use the following pattern: @@ -44,28 +46,24 @@ public abstract class MultiGeoPointValues { } /** - * Sets iteration to the specified docID. - * @param docId document ID - * - * @see #valueAt(int) - * @see #count() + * Advance this instance to the given document id + * @return true if there is a value for this document */ - public abstract void setDocument(int docId); + public abstract boolean advanceExact(int doc) throws IOException; /** * Return the number of geo points the current document has. */ - public abstract int count(); + public abstract int docValueCount(); /** - * Return the i-th value associated with the current document. - * Behavior is undefined when i is undefined or greater than - * or equal to {@link #count()}. + * Return the next value associated with the current document. This must not be + * called more than {@link #docValueCount()} times. * * Note: the returned {@link GeoPoint} might be shared across invocations. * - * @return the next value for the current docID set to {@link #setDocument(int)}. + * @return the next value for the current docID set to {@link #advanceExact(int)}. */ - public abstract GeoPoint valueAt(int i); + public abstract GeoPoint nextValue() throws IOException; } diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/NumericDoubleValues.java b/core/src/main/java/org/elasticsearch/index/fielddata/NumericDoubleValues.java index 2cbbb0064f4..0d78d3c7905 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/NumericDoubleValues.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/NumericDoubleValues.java @@ -20,43 +20,58 @@ package org.elasticsearch.index.fielddata; import org.apache.lucene.index.NumericDocValues; +import org.apache.lucene.search.DoubleValues; + +import java.io.IOException; /** * A per-document numeric value. */ -public abstract class NumericDoubleValues { +public abstract class NumericDoubleValues extends DoubleValues { /** Sole constructor. (For invocation by subclass * constructors, typically implicit.) */ protected NumericDoubleValues() {} - - /** - * Returns the numeric value for the specified document ID. This must return - * 0d if the given doc ID has no value. - * @param docID document ID to lookup - * @return numeric value - */ - public abstract double get(int docID); // TODO: this interaction with sort comparators is really ugly... /** Returns numeric docvalues view of raw double bits */ public NumericDocValues getRawDoubleValues() { - return new NumericDocValues() { - @Override - public long get(int docID) { - return Double.doubleToRawLongBits(NumericDoubleValues.this.get(docID)); - } + return new AbstractNumericDocValues() { + private int docID = -1; + @Override + public boolean advanceExact(int target) throws IOException { + docID = target; + return NumericDoubleValues.this.advanceExact(target); + } + @Override + public long longValue() throws IOException { + return Double.doubleToRawLongBits(NumericDoubleValues.this.doubleValue()); + } + @Override + public int docID() { + return docID; + } }; } // yes... this is doing what the previous code was doing... /** Returns numeric docvalues view of raw float bits */ public NumericDocValues getRawFloatValues() { - return new NumericDocValues() { - @Override - public long get(int docID) { - return Float.floatToRawIntBits((float)NumericDoubleValues.this.get(docID)); - } + return new AbstractNumericDocValues() { + private int docID = -1; + @Override + public boolean advanceExact(int target) throws IOException { + docID = target; + return NumericDoubleValues.this.advanceExact(target); + } + @Override + public long longValue() throws IOException { + return Float.floatToRawIntBits((float)NumericDoubleValues.this.doubleValue()); + } + @Override + public int docID() { + return docID; + } }; } } diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/ScriptDocValues.java b/core/src/main/java/org/elasticsearch/index/fielddata/ScriptDocValues.java index bfa23eb8f04..339e70c50b1 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/ScriptDocValues.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/ScriptDocValues.java @@ -21,7 +21,9 @@ package org.elasticsearch.index.fielddata; import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.BytesRefBuilder; import org.elasticsearch.common.geo.GeoHashUtils; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoUtils; @@ -32,7 +34,9 @@ import org.joda.time.DateTimeZone; import org.joda.time.MutableDateTime; import org.joda.time.ReadableDateTime; +import java.io.IOException; import java.util.AbstractList; +import java.util.Arrays; import java.util.Comparator; import java.util.List; import java.util.function.UnaryOperator; @@ -46,7 +50,7 @@ public abstract class ScriptDocValues extends AbstractList { /** * Set the current doc ID. */ - public abstract void setNextDocId(int docId); + public abstract void setNextDocId(int docId) throws IOException; /** * Return a copy of the list of the values for the current document. @@ -83,24 +87,48 @@ public abstract class ScriptDocValues extends AbstractList { public static final class Strings extends ScriptDocValues { - private final SortedBinaryDocValues values; + private final SortedBinaryDocValues in; + private BytesRefBuilder[] values = new BytesRefBuilder[0]; + private int count; - public Strings(SortedBinaryDocValues values) { - this.values = values; + public Strings(SortedBinaryDocValues in) { + this.in = in; } @Override - public void setNextDocId(int docId) { - values.setDocument(docId); + public void setNextDocId(int docId) throws IOException { + if (in.advanceExact(docId)) { + resize(in.docValueCount()); + for (int i = 0; i < count; i++) { + values[i].copyBytes(in.nextValue()); + } + } else { + resize(0); + } + } + + /** + * Set the {@link #size()} and ensure that the {@link #values} array can + * store at least that many entries. + */ + protected void resize(int newSize) { + count = newSize; + if (newSize > values.length) { + final int oldLength = values.length; + values = ArrayUtil.grow(values, count); + for (int i = oldLength; i < values.length; ++i) { + values[i] = new BytesRefBuilder(); + } + } } public SortedBinaryDocValues getInternalValues() { - return this.values; + return this.in; } public BytesRef getBytesValue() { - if (values.count() > 0) { - return values.valueAt(0); + if (size() > 0) { + return values[0].get(); } else { return null; } @@ -117,12 +145,12 @@ public abstract class ScriptDocValues extends AbstractList { @Override public String get(int index) { - return values.valueAt(index).utf8ToString(); + return values[index].get().utf8ToString(); } @Override public int size() { - return values.count(); + return count; } } @@ -130,61 +158,81 @@ public abstract class ScriptDocValues extends AbstractList { public static final class Longs extends ScriptDocValues { protected static final DeprecationLogger deprecationLogger = new DeprecationLogger(ESLoggerFactory.getLogger(Longs.class)); - private final SortedNumericDocValues values; + private final SortedNumericDocValues in; + private long[] values = new long[0]; + private int count; private Dates dates; + private int docId = -1; + + public Longs(SortedNumericDocValues in) { + this.in = in; - public Longs(SortedNumericDocValues values) { - this.values = values; } @Override - public void setNextDocId(int docId) { - values.setDocument(docId); - if (dates != null) { - dates.refreshArray(); + public void setNextDocId(int docId) throws IOException { + this.docId = docId; + if (in.advanceExact(docId)) { + resize(in.docValueCount()); + for (int i = 0; i < count; i++) { + values[i] = in.nextValue(); + } + } else { + resize(0); } + if (dates != null) { + dates.setNextDocId(docId); + } + } + + /** + * Set the {@link #size()} and ensure that the {@link #values} array can + * store at least that many entries. + */ + protected void resize(int newSize) { + count = newSize; + values = ArrayUtil.grow(values, count); } public SortedNumericDocValues getInternalValues() { - return this.values; + return this.in; } public long getValue() { - int numValues = values.count(); - if (numValues == 0) { + if (count == 0) { return 0L; } - return values.valueAt(0); + return values[0]; } @Deprecated - public ReadableDateTime getDate() { + public ReadableDateTime getDate() throws IOException { deprecationLogger.deprecated("getDate on numeric fields is deprecated. Use a date field to get dates."); if (dates == null) { - dates = new Dates(values); - dates.refreshArray(); + dates = new Dates(in); + dates.setNextDocId(docId); } return dates.getValue(); } @Deprecated - public List getDates() { + public List getDates() throws IOException { deprecationLogger.deprecated("getDates on numeric fields is deprecated. Use a date field to get dates."); if (dates == null) { - dates = new Dates(values); - dates.refreshArray(); + dates = new Dates(in); + dates.setNextDocId(docId); } return dates; } @Override public Long get(int index) { - return values.valueAt(index); + return values[index]; } @Override public int size() { - return values.count(); + return count; } } @@ -193,22 +241,24 @@ public abstract class ScriptDocValues extends AbstractList { private static final ReadableDateTime EPOCH = new DateTime(0, DateTimeZone.UTC); - private final SortedNumericDocValues values; + private final SortedNumericDocValues in; /** * Values wrapped in {@link MutableDateTime}. Null by default an allocated on first usage so we allocate a reasonably size. We keep * this array so we don't have allocate new {@link MutableDateTime}s on every usage. Instead we reuse them for every document. */ private MutableDateTime[] dates; + private int count; - public Dates(SortedNumericDocValues values) { - this.values = values; + public Dates(SortedNumericDocValues in) { + this.in = in; } /** - * Fetch the first field value or 0 millis after epoch if there are no values. + * Fetch the first field value or 0 millis after epoch if there are no + * in. */ public ReadableDateTime getValue() { - if (values.count() == 0) { + if (count == 0) { return EPOCH; } return get(0); @@ -234,113 +284,159 @@ public abstract class ScriptDocValues extends AbstractList { @Override public ReadableDateTime get(int index) { - if (index >= values.count()) { + if (index >= count) { throw new IndexOutOfBoundsException( - "attempted to fetch the [" + index + "] date when there are only [" + values.count() + "] dates."); + "attempted to fetch the [" + index + "] date when there are only [" + + count + "] dates."); } return dates[index]; } @Override public int size() { - return values.count(); + return count; } @Override - public void setNextDocId(int docId) { - values.setDocument(docId); + public void setNextDocId(int docId) throws IOException { + if (in.advanceExact(docId)) { + count = in.docValueCount(); + } else { + count = 0; + } refreshArray(); } /** * Refresh the backing array. Package private so it can be called when {@link Longs} loads dates. */ - void refreshArray() { - if (values.count() == 0) { + void refreshArray() throws IOException { + if (count == 0) { return; } if (dates == null) { // Happens for the document. We delay allocating dates so we can allocate it with a reasonable size. - dates = new MutableDateTime[values.count()]; + dates = new MutableDateTime[count]; for (int i = 0; i < dates.length; i++) { - dates[i] = new MutableDateTime(values.valueAt(i), DateTimeZone.UTC); + dates[i] = new MutableDateTime(in.nextValue(), DateTimeZone.UTC); } return; } - if (values.count() > dates.length) { + if (count > dates.length) { // Happens when we move to a new document and it has more dates than any documents before it. MutableDateTime[] backup = dates; - dates = new MutableDateTime[values.count()]; + dates = new MutableDateTime[count]; System.arraycopy(backup, 0, dates, 0, backup.length); for (int i = 0; i < backup.length; i++) { - dates[i].setMillis(values.valueAt(i)); + dates[i].setMillis(in.nextValue()); } for (int i = backup.length; i < dates.length; i++) { - dates[i] = new MutableDateTime(values.valueAt(i), DateTimeZone.UTC); + dates[i] = new MutableDateTime(in.nextValue(), DateTimeZone.UTC); } return; } - for (int i = 0; i < values.count(); i++) { - dates[i].setMillis(values.valueAt(i)); + for (int i = 0; i < count; i++) { + dates[i].setMillis(in.nextValue()); } } } public static final class Doubles extends ScriptDocValues { - private final SortedNumericDoubleValues values; + private final SortedNumericDoubleValues in; + private double[] values = new double[0]; + private int count; - public Doubles(SortedNumericDoubleValues values) { - this.values = values; + public Doubles(SortedNumericDoubleValues in) { + this.in = in; } @Override - public void setNextDocId(int docId) { - values.setDocument(docId); + public void setNextDocId(int docId) throws IOException { + if (in.advanceExact(docId)) { + resize(in.docValueCount()); + for (int i = 0; i < count; i++) { + values[i] = in.nextValue(); + } + } else { + resize(0); + } + } + + /** + * Set the {@link #size()} and ensure that the {@link #values} array can + * store at least that many entries. + */ + protected void resize(int newSize) { + count = newSize; + values = ArrayUtil.grow(values, count); } public SortedNumericDoubleValues getInternalValues() { - return this.values; + return this.in; } public double getValue() { - int numValues = values.count(); - if (numValues == 0) { + if (count == 0) { return 0d; } - return values.valueAt(0); + return values[0]; } @Override public Double get(int index) { - return values.valueAt(index); + return values[index]; } @Override public int size() { - return values.count(); + return count; } } public static final class GeoPoints extends ScriptDocValues { - private final MultiGeoPointValues values; + private final MultiGeoPointValues in; + private GeoPoint[] values = new GeoPoint[0]; + private int count; - public GeoPoints(MultiGeoPointValues values) { - this.values = values; + public GeoPoints(MultiGeoPointValues in) { + this.in = in; } @Override - public void setNextDocId(int docId) { - values.setDocument(docId); + public void setNextDocId(int docId) throws IOException { + if (in.advanceExact(docId)) { + resize(in.docValueCount()); + for (int i = 0; i < count; i++) { + GeoPoint point = in.nextValue(); + values[i].reset(point.lat(), point.lon()); + } + } else { + resize(0); + } + } + + /** + * Set the {@link #size()} and ensure that the {@link #values} array can + * store at least that many entries. + */ + protected void resize(int newSize) { + count = newSize; + if (newSize > values.length) { + int oldLength = values.length; + values = ArrayUtil.grow(values, count); + for (int i = oldLength; i < values.length; ++i) { + values[i] = new GeoPoint(); + } + } } public GeoPoint getValue() { - int numValues = values.count(); - if (numValues == 0) { + if (count == 0) { return null; } - return values.valueAt(0); + return values[0]; } public double getLat() { @@ -371,13 +467,13 @@ public abstract class ScriptDocValues extends AbstractList { @Override public GeoPoint get(int index) { - final GeoPoint point = values.valueAt(index); + final GeoPoint point = values[index]; return new GeoPoint(point.lat(), point.lon()); } @Override public int size() { - return values.count(); + return count; } public double arcDistance(double lat, double lon) { @@ -420,66 +516,114 @@ public abstract class ScriptDocValues extends AbstractList { public static final class Booleans extends ScriptDocValues { - private final SortedNumericDocValues values; + private final SortedNumericDocValues in; + private boolean[] values = new boolean[0]; + private int count; - public Booleans(SortedNumericDocValues values) { - this.values = values; + public Booleans(SortedNumericDocValues in) { + this.in = in; } @Override - public void setNextDocId(int docId) { - values.setDocument(docId); + public void setNextDocId(int docId) throws IOException { + if (in.advanceExact(docId)) { + resize(in.docValueCount()); + for (int i = 0; i < count; i++) { + values[i] = in.nextValue() == 1; + } + } else { + resize(0); + } + } + + /** + * Set the {@link #size()} and ensure that the {@link #values} array can + * store at least that many entries. + */ + protected void resize(int newSize) { + count = newSize; + values = grow(values, count); } public boolean getValue() { - return values.count() != 0 && values.valueAt(0) == 1; + return count != 0 && values[0]; } @Override public Boolean get(int index) { - return values.valueAt(index) == 1; + return values[index]; } @Override public int size() { - return values.count(); + return count; + } + + private static boolean[] grow(boolean[] array, int minSize) { + assert minSize >= 0 : "size must be positive (got " + minSize + + "): likely integer overflow?"; + if (array.length < minSize) { + return Arrays.copyOf(array, ArrayUtil.oversize(minSize, 1)); + } else + return array; } } public static final class BytesRefs extends ScriptDocValues { - private final SortedBinaryDocValues values; + private final SortedBinaryDocValues in; + private BytesRef[] values; + private int count; - public BytesRefs(SortedBinaryDocValues values) { - this.values = values; + public BytesRefs(SortedBinaryDocValues in) { + this.in = in; } @Override - public void setNextDocId(int docId) { - values.setDocument(docId); + public void setNextDocId(int docId) throws IOException { + if (in.advanceExact(docId)) { + resize(in.docValueCount()); + for (int i = 0; i < count; i++) { + values[i] = in.nextValue(); + } + } else { + resize(0); + } + } + + /** + * Set the {@link #size()} and ensure that the {@link #values} array can + * store at least that many entries. + */ + protected void resize(int newSize) { + count = newSize; + if (values == null) { + values = new BytesRef[newSize]; + } else { + values = ArrayUtil.grow(values, count); + } } public SortedBinaryDocValues getInternalValues() { - return this.values; + return this.in; } public BytesRef getValue() { - int numValues = values.count(); - if (numValues == 0) { + if (count == 0) { return new BytesRef(); } - return values.valueAt(0); + return values[0]; } @Override public BytesRef get(int index) { - return values.valueAt(index); + return values[index]; } @Override public int size() { - return values.count(); + return count; } } } diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/SingletonMultiGeoPointValues.java b/core/src/main/java/org/elasticsearch/index/fielddata/SingletonMultiGeoPointValues.java index 883444fabc7..bae522e7b50 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/SingletonMultiGeoPointValues.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/SingletonMultiGeoPointValues.java @@ -19,48 +19,34 @@ package org.elasticsearch.index.fielddata; -import org.apache.lucene.util.Bits; import org.elasticsearch.common.geo.GeoPoint; +import java.io.IOException; + final class SingletonMultiGeoPointValues extends MultiGeoPointValues { private final GeoPointValues in; - private final Bits docsWithField; - private GeoPoint value; - private int count; - SingletonMultiGeoPointValues(GeoPointValues in, Bits docsWithField) { + SingletonMultiGeoPointValues(GeoPointValues in) { this.in = in; - this.docsWithField = docsWithField; } @Override - public void setDocument(int docID) { - value = in.get(docID); - if (value.lat() == Double.NaN && value.lon() == Double.NaN || (docsWithField != null && !docsWithField.get(docID))) { - count = 0; - } else { - count = 1; - } + public boolean advanceExact(int doc) throws IOException { + return in.advanceExact(doc); } @Override - public int count() { - return count; + public int docValueCount() { + return 1; } @Override - public GeoPoint valueAt(int index) { - assert index == 0; - return value; + public GeoPoint nextValue() { + return in.geoPointValue(); } - public GeoPointValues getGeoPointValues() { + GeoPointValues getGeoPointValues() { return in; } - - public Bits getDocsWithField() { - return docsWithField; - } - } diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/SingletonSortedBinaryDocValues.java b/core/src/main/java/org/elasticsearch/index/fielddata/SingletonSortedBinaryDocValues.java index 1a35f056f9f..01f110a7b38 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/SingletonSortedBinaryDocValues.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/SingletonSortedBinaryDocValues.java @@ -20,49 +20,35 @@ package org.elasticsearch.index.fielddata; import org.apache.lucene.index.BinaryDocValues; -import org.apache.lucene.util.Bits; -import org.apache.lucene.util.Bits.MatchAllBits; import org.apache.lucene.util.BytesRef; +import java.io.IOException; + final class SingletonSortedBinaryDocValues extends SortedBinaryDocValues { private final BinaryDocValues in; - private final Bits docsWithField; - private BytesRef value; - private int count; - SingletonSortedBinaryDocValues(BinaryDocValues in, Bits docsWithField) { + SingletonSortedBinaryDocValues(BinaryDocValues in) { this.in = in; - this.docsWithField = docsWithField instanceof MatchAllBits ? null : docsWithField; } @Override - public void setDocument(int docID) { - value = in.get(docID); - if (value.length == 0 && docsWithField != null && !docsWithField.get(docID)) { - count = 0; - } else { - count = 1; - } + public boolean advanceExact(int doc) throws IOException { + return in.advanceExact(doc); } @Override - public int count() { - return count; + public int docValueCount() { + return 1; } @Override - public BytesRef valueAt(int index) { - assert index == 0; - return value; + public BytesRef nextValue() throws IOException { + return in.binaryValue(); } public BinaryDocValues getBinaryDocValues() { return in; } - public Bits getDocsWithField() { - return docsWithField; - } - } diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/SingletonSortedNumericDoubleValues.java b/core/src/main/java/org/elasticsearch/index/fielddata/SingletonSortedNumericDoubleValues.java index 4207ac73a1a..af0768ac2c3 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/SingletonSortedNumericDoubleValues.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/SingletonSortedNumericDoubleValues.java @@ -19,8 +19,7 @@ package org.elasticsearch.index.fielddata; -import org.apache.lucene.util.Bits; -import org.apache.lucene.util.Bits.MatchAllBits; +import java.io.IOException; /** * Exposes multi-valued view over a single-valued instance. @@ -29,43 +28,30 @@ import org.apache.lucene.util.Bits.MatchAllBits; * that works for single or multi-valued types. */ final class SingletonSortedNumericDoubleValues extends SortedNumericDoubleValues { - private final NumericDoubleValues in; - private final Bits docsWithField; - private double value; - private int count; + private final NumericDoubleValues in; - SingletonSortedNumericDoubleValues(NumericDoubleValues in, Bits docsWithField) { - this.in = in; - this.docsWithField = docsWithField instanceof MatchAllBits ? null : docsWithField; - } - - /** Return the wrapped {@link NumericDoubleValues} */ - public NumericDoubleValues getNumericDoubleValues() { - return in; - } - - /** Return the wrapped {@link Bits} */ - public Bits getDocsWithField() { - return docsWithField; - } - - @Override - public void setDocument(int doc) { - value = in.get(doc); - if (docsWithField != null && value == 0 && docsWithField.get(doc) == false) { - count = 0; - } else { - count = 1; + SingletonSortedNumericDoubleValues(NumericDoubleValues in) { + this.in = in; } - } - @Override - public double valueAt(int index) { - return value; - } + /** Return the wrapped {@link NumericDoubleValues} */ + public NumericDoubleValues getNumericDoubleValues() { + return in; + } + + @Override + public boolean advanceExact(int target) throws IOException { + return in.advanceExact(target); + } + + @Override + public int docValueCount() { + return 1; + } + + @Override + public double nextValue() throws IOException { + return in.doubleValue(); + } - @Override - public int count() { - return count; - } } diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/SortableLongBitsNumericDocValues.java b/core/src/main/java/org/elasticsearch/index/fielddata/SortableLongBitsNumericDocValues.java index 678b4206133..259070cfc92 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/SortableLongBitsNumericDocValues.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/SortableLongBitsNumericDocValues.java @@ -22,13 +22,16 @@ package org.elasticsearch.index.fielddata; import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.util.NumericUtils; +import java.io.IOException; + /** * {@link NumericDocValues} instance that wraps a {@link NumericDoubleValues} * and converts the doubles to sortable long bits using * {@link NumericUtils#doubleToSortableLong(double)}. */ -final class SortableLongBitsNumericDocValues extends NumericDocValues { +final class SortableLongBitsNumericDocValues extends AbstractNumericDocValues { + private int docID = -1; private final NumericDoubleValues values; SortableLongBitsNumericDocValues(NumericDoubleValues values) { @@ -36,8 +39,19 @@ final class SortableLongBitsNumericDocValues extends NumericDocValues { } @Override - public long get(int docID) { - return NumericUtils.doubleToSortableLong(values.get(docID)); + public long longValue() throws IOException { + return NumericUtils.doubleToSortableLong(values.doubleValue()); + } + + @Override + public boolean advanceExact(int target) throws IOException { + docID = target; + return values.advanceExact(target); + } + + @Override + public int docID() { + return docID; } /** Return the wrapped values. */ diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/SortableLongBitsSortedNumericDocValues.java b/core/src/main/java/org/elasticsearch/index/fielddata/SortableLongBitsSortedNumericDocValues.java index 9dbf44f14f2..81d33bc432f 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/SortableLongBitsSortedNumericDocValues.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/SortableLongBitsSortedNumericDocValues.java @@ -22,12 +22,14 @@ package org.elasticsearch.index.fielddata; import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.util.NumericUtils; +import java.io.IOException; + /** * {@link SortedNumericDocValues} instance that wraps a {@link SortedNumericDoubleValues} * and converts the doubles to sortable long bits using * {@link NumericUtils#doubleToSortableLong(double)}. */ -final class SortableLongBitsSortedNumericDocValues extends SortedNumericDocValues { +final class SortableLongBitsSortedNumericDocValues extends AbstractSortedNumericDocValues { private final SortedNumericDoubleValues values; @@ -36,18 +38,18 @@ final class SortableLongBitsSortedNumericDocValues extends SortedNumericDocValue } @Override - public void setDocument(int doc) { - values.setDocument(doc); + public boolean advanceExact(int target) throws IOException { + return values.advanceExact(target); } @Override - public long valueAt(int index) { - return NumericUtils.doubleToSortableLong(values.valueAt(index)); + public long nextValue() throws IOException { + return NumericUtils.doubleToSortableLong(values.nextValue()); } @Override - public int count() { - return values.count(); + public int docValueCount() { + return values.docValueCount(); } /** Return the wrapped values. */ diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/SortableLongBitsToNumericDoubleValues.java b/core/src/main/java/org/elasticsearch/index/fielddata/SortableLongBitsToNumericDoubleValues.java index 7c150cf319b..94bc7168dca 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/SortableLongBitsToNumericDoubleValues.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/SortableLongBitsToNumericDoubleValues.java @@ -22,6 +22,8 @@ package org.elasticsearch.index.fielddata; import org.apache.lucene.index.NumericDocValues; import org.apache.lucene.util.NumericUtils; +import java.io.IOException; + /** * {@link NumericDoubleValues} instance that wraps a {@link NumericDocValues} * and converts the doubles to sortable long bits using @@ -36,8 +38,13 @@ final class SortableLongBitsToNumericDoubleValues extends NumericDoubleValues { } @Override - public double get(int docID) { - return NumericUtils.sortableLongToDouble(values.get(docID)); + public double doubleValue() throws IOException { + return NumericUtils.sortableLongToDouble(values.longValue()); + } + + @Override + public boolean advanceExact(int doc) throws IOException { + return values.advanceExact(doc); } /** Return the wrapped values. */ diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/SortableLongBitsToSortedNumericDoubleValues.java b/core/src/main/java/org/elasticsearch/index/fielddata/SortableLongBitsToSortedNumericDoubleValues.java index 2669463170b..d78b66114b3 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/SortableLongBitsToSortedNumericDoubleValues.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/SortableLongBitsToSortedNumericDoubleValues.java @@ -22,6 +22,8 @@ package org.elasticsearch.index.fielddata; import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.util.NumericUtils; +import java.io.IOException; + /** * {@link SortedNumericDoubleValues} instance that wraps a {@link SortedNumericDocValues} * and converts the doubles to sortable long bits using @@ -36,18 +38,18 @@ final class SortableLongBitsToSortedNumericDoubleValues extends SortedNumericDou } @Override - public void setDocument(int doc) { - values.setDocument(doc); + public boolean advanceExact(int target) throws IOException { + return values.advanceExact(target); } @Override - public double valueAt(int index) { - return NumericUtils.sortableLongToDouble(values.valueAt(index)); + public double nextValue() throws IOException { + return NumericUtils.sortableLongToDouble(values.nextValue()); } @Override - public int count() { - return values.count(); + public int docValueCount() { + return values.docValueCount(); } /** Return the wrapped values. */ diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/SortedBinaryDocValues.java b/core/src/main/java/org/elasticsearch/index/fielddata/SortedBinaryDocValues.java index b3c51141e20..119fda1bda8 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/SortedBinaryDocValues.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/SortedBinaryDocValues.java @@ -21,28 +21,35 @@ package org.elasticsearch.index.fielddata; import org.apache.lucene.util.BytesRef; +import java.io.IOException; + /** * A list of per-document binary values, sorted * according to {@link BytesRef#compareTo(BytesRef)}. * There might be dups however. */ +// TODO: Should it expose a count (current approach) or return null when there are no more values? public abstract class SortedBinaryDocValues { /** - * Positions to the specified document + * Advance this instance to the given document id + * @return true if there is a value for this document */ - public abstract void setDocument(int docId); + public abstract boolean advanceExact(int doc) throws IOException; - /** - * Return the number of values of the current document. + /** + * Retrieves the number of values for the current document. This must always + * be greater than zero. + * It is illegal to call this method after {@link #advanceExact(int)} + * returned {@code false}. */ - public abstract int count(); + public abstract int docValueCount(); - /** - * Retrieve the value for the current document at the specified index. - * An index ranges from {@code 0} to {@code count()-1}. + /** + * Iterates to the next value in the current document. Do not call this more than + * {@link #docValueCount} times for the document. * Note that the returned {@link BytesRef} might be reused across invocations. */ - public abstract BytesRef valueAt(int index); + public abstract BytesRef nextValue() throws IOException; } diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/SortedNumericDoubleValues.java b/core/src/main/java/org/elasticsearch/index/fielddata/SortedNumericDoubleValues.java index 23cdfa7c0d1..d0d9fc4b4c7 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/SortedNumericDoubleValues.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/SortedNumericDoubleValues.java @@ -21,6 +21,8 @@ package org.elasticsearch.index.fielddata; import org.apache.lucene.index.SortedNumericDocValues; +import java.io.IOException; + /** * Clone of {@link SortedNumericDocValues} for double values. */ @@ -30,21 +32,25 @@ public abstract class SortedNumericDoubleValues { * constructors, typically implicit.) */ protected SortedNumericDoubleValues() {} - /** - * Positions to the specified document - */ - public abstract void setDocument(int doc); + /** Advance the iterator to exactly {@code target} and return whether + * {@code target} has a value. + * {@code target} must be greater than or equal to the current + * doc ID and must be a valid doc ID, ie. ≥ 0 and + * < {@code maxDoc}.*/ + public abstract boolean advanceExact(int target) throws IOException; - /** - * Retrieve the value for the current document at the specified index. - * An index ranges from {@code 0} to {@code count()-1}. + /** + * Iterates to the next value in the current document. Do not call this more than + * {@link #docValueCount} times for the document. */ - public abstract double valueAt(int index); - - /** - * Retrieves the count of values for the current document. - * This may be zero if a document has no values. + public abstract double nextValue() throws IOException; + + /** + * Retrieves the number of values for the current document. This must always + * be greater than zero. + * It is illegal to call this method after {@link #advanceExact(int)} + * returned {@code false}. */ - public abstract int count(); + public abstract int docValueCount(); } diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/SortingBinaryDocValues.java b/core/src/main/java/org/elasticsearch/index/fielddata/SortingBinaryDocValues.java index c35d59fc692..99f0d477254 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/SortingBinaryDocValues.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/SortingBinaryDocValues.java @@ -33,6 +33,7 @@ import java.util.Arrays; */ public abstract class SortingBinaryDocValues extends SortedBinaryDocValues { + private int index; protected int count; protected BytesRefBuilder[] values; private final Sorter sorter; @@ -73,15 +74,17 @@ public abstract class SortingBinaryDocValues extends SortedBinaryDocValues { */ protected final void sort() { sorter.sort(0, count); + index = 0; } @Override - public final int count() { + public int docValueCount() { return count; } @Override - public final BytesRef valueAt(int index) { - return values[index].get(); + public final BytesRef nextValue() { + assert index < count; + return values[index++].get(); } } diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/SortingNumericDocValues.java b/core/src/main/java/org/elasticsearch/index/fielddata/SortingNumericDocValues.java index 7c1d1fff2ae..0049faaf2b5 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/SortingNumericDocValues.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/SortingNumericDocValues.java @@ -31,10 +31,12 @@ public abstract class SortingNumericDocValues extends SortedNumericDocValues { private int count; protected long[] values; + protected int valuesCursor; private final Sorter sorter; protected SortingNumericDocValues() { values = new long[1]; + valuesCursor = 0; sorter = new InPlaceMergeSorter() { @Override @@ -52,12 +54,13 @@ public abstract class SortingNumericDocValues extends SortedNumericDocValues { } /** - * Set the {@link #count()} and ensure that the {@link #values} array can + * Set the {@link #docValueCount()} and ensure that the {@link #values} array can * store at least that many entries. */ protected final void resize(int newSize) { count = newSize; values = ArrayUtil.grow(values, count); + valuesCursor = 0; } /** @@ -69,12 +72,12 @@ public abstract class SortingNumericDocValues extends SortedNumericDocValues { } @Override - public final int count() { + public final int docValueCount() { return count; } @Override - public final long valueAt(int index) { - return values[index]; + public final long nextValue() { + return values[valuesCursor++]; } } diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/SortingNumericDoubleValues.java b/core/src/main/java/org/elasticsearch/index/fielddata/SortingNumericDoubleValues.java index 674a86719ec..7a39f647456 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/SortingNumericDoubleValues.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/SortingNumericDoubleValues.java @@ -29,11 +29,13 @@ import org.apache.lucene.util.Sorter; public abstract class SortingNumericDoubleValues extends SortedNumericDoubleValues { private int count; + private int valuesCursor; protected double[] values; private final Sorter sorter; protected SortingNumericDoubleValues() { values = new double[1]; + valuesCursor = 0; sorter = new InPlaceMergeSorter() { @Override @@ -51,29 +53,30 @@ public abstract class SortingNumericDoubleValues extends SortedNumericDoubleValu } /** - * Set the {@link #count()} and ensure that the {@link #values} array can + * Set the {@link #docValueCount()} and ensure that the {@link #values} array can * store at least that many entries. */ protected final void resize(int newSize) { count = newSize; values = ArrayUtil.grow(values, count); + valuesCursor = 0; } /** * Sort values that are stored between offsets 0 and - * {@link #count} of {@link #values}. + * {@link #docValueCount} of {@link #values}. */ protected final void sort() { sorter.sort(0, count); } @Override - public final int count() { + public final int docValueCount() { return count; } @Override - public final double valueAt(int index) { - return values[index]; + public final double nextValue() { + return values[valuesCursor++]; } } diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java b/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java index 48b6a1127a5..c433836417a 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java @@ -21,15 +21,15 @@ package org.elasticsearch.index.fielddata.fieldcomparator; import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.RandomAccessOrds; import org.apache.lucene.index.SortedDocValues; +import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.FieldComparator; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.SortField; import org.apache.lucene.util.BitSet; -import org.apache.lucene.util.Bits; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.index.fielddata.AbstractSortedDocValues; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexOrdinalsFieldData; import org.elasticsearch.index.fielddata.SortedBinaryDocValues; @@ -89,7 +89,7 @@ public class BytesRefFieldComparatorSource extends IndexFieldData.XFieldComparat @Override protected SortedDocValues getSortedDocValues(LeafReaderContext context, String field) throws IOException { - final RandomAccessOrds values = ((IndexOrdinalsFieldData) indexFieldData).load(context).getOrdinalsValues(); + final SortedSetDocValues values = ((IndexOrdinalsFieldData) indexFieldData).load(context).getOrdinalsValues(); final SortedDocValues selectedValues; if (nested == null) { selectedValues = sortMode.select(values); @@ -113,8 +113,6 @@ public class BytesRefFieldComparatorSource extends IndexFieldData.XFieldComparat }; } - final BytesRef nullPlaceHolder = new BytesRef(); - final BytesRef nonNullMissingBytes = missingBytes == null ? nullPlaceHolder : missingBytes; return new FieldComparator.TermValComparator(numHits, null, sortMissingLast) { @Override @@ -122,25 +120,15 @@ public class BytesRefFieldComparatorSource extends IndexFieldData.XFieldComparat final SortedBinaryDocValues values = getValues(context); final BinaryDocValues selectedValues; if (nested == null) { - selectedValues = sortMode.select(values, nonNullMissingBytes); + selectedValues = sortMode.select(values, missingBytes); } else { final BitSet rootDocs = nested.rootDocs(context); final DocIdSetIterator innerDocs = nested.innerDocs(context); - selectedValues = sortMode.select(values, nonNullMissingBytes, rootDocs, innerDocs, context.reader().maxDoc()); + selectedValues = sortMode.select(values, missingBytes, rootDocs, innerDocs, context.reader().maxDoc()); } return selectedValues; } - @Override - protected Bits getDocsWithField(LeafReaderContext context, String field) throws IOException { - return new Bits.MatchAllBits(context.reader().maxDoc()); - } - - @Override - protected boolean isNull(int doc, BytesRef term) { - return term == nullPlaceHolder; - } - @Override public void setScorer(Scorer scorer) { BytesRefFieldComparatorSource.this.setScorer(scorer); @@ -154,13 +142,14 @@ public class BytesRefFieldComparatorSource extends IndexFieldData.XFieldComparat * are replaced with the specified term */ // TODO: move this out if we need it for other reasons - static class ReplaceMissing extends SortedDocValues { + static class ReplaceMissing extends AbstractSortedDocValues { final SortedDocValues in; final int substituteOrd; final BytesRef substituteTerm; final boolean exists; + boolean hasValue; - ReplaceMissing(SortedDocValues in, BytesRef term) { + ReplaceMissing(SortedDocValues in, BytesRef term) throws IOException { this.in = in; this.substituteTerm = term; int sub = in.lookupTerm(term); @@ -174,17 +163,29 @@ public class BytesRefFieldComparatorSource extends IndexFieldData.XFieldComparat } @Override - public int getOrd(int docID) { - int ord = in.getOrd(docID); - if (ord < 0) { + public int ordValue() throws IOException { + if (hasValue == false) { return substituteOrd; - } else if (exists == false && ord >= substituteOrd) { + } + int ord = in.ordValue(); + if (exists == false && ord >= substituteOrd) { return ord + 1; } else { return ord; } } + @Override + public boolean advanceExact(int target) throws IOException { + hasValue = in.advanceExact(target); + return true; + } + + @Override + public int docID() { + return in.docID(); + } + @Override public int getValueCount() { if (exists) { @@ -195,7 +196,7 @@ public class BytesRefFieldComparatorSource extends IndexFieldData.XFieldComparat } @Override - public BytesRef lookupOrd(int ord) { + public BytesRef lookupOrd(int ord) throws IOException { if (ord == substituteOrd) { return substituteTerm; } else if (exists == false && ord > substituteOrd) { diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalMapping.java b/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalMapping.java index 293ddf430df..3eda112674d 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalMapping.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalMapping.java @@ -20,22 +20,23 @@ package org.elasticsearch.index.fielddata.ordinals; import org.apache.lucene.index.MultiDocValues.OrdinalMap; -import org.apache.lucene.index.RandomAccessOrds; +import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.LongValues; -import org.elasticsearch.index.fielddata.AbstractRandomAccessOrds; + +import java.io.IOException; /** - * A {@link RandomAccessOrds} implementation that returns ordinals that are global. + * A {@link SortedSetDocValues} implementation that returns ordinals that are global. */ -public class GlobalOrdinalMapping extends AbstractRandomAccessOrds { +public class GlobalOrdinalMapping extends SortedSetDocValues { - private final RandomAccessOrds values; + private final SortedSetDocValues values; private final OrdinalMap ordinalMap; private final LongValues mapping; - private final RandomAccessOrds[] bytesValues; + private final SortedSetDocValues[] bytesValues; - GlobalOrdinalMapping(OrdinalMap ordinalMap, RandomAccessOrds[] bytesValues, int segmentIndex) { + GlobalOrdinalMapping(OrdinalMap ordinalMap, SortedSetDocValues[] bytesValues, int segmentIndex) { super(); this.values = bytesValues[segmentIndex]; this.bytesValues = bytesValues; @@ -53,25 +54,45 @@ public class GlobalOrdinalMapping extends AbstractRandomAccessOrds { } @Override - public long ordAt(int index) { - return getGlobalOrd(values.ordAt(index)); + public boolean advanceExact(int target) throws IOException { + return values.advanceExact(target); } @Override - public void doSetDocument(int docId) { - values.setDocument(docId); + public long nextOrd() throws IOException { + long segmentOrd = values.nextOrd(); + if (segmentOrd == SortedSetDocValues.NO_MORE_ORDS) { + return SortedSetDocValues.NO_MORE_ORDS; + } else { + return getGlobalOrd(segmentOrd); + } } @Override - public int cardinality() { - return values.cardinality(); - } - - @Override - public BytesRef lookupOrd(long globalOrd) { + public BytesRef lookupOrd(long globalOrd) throws IOException { final long segmentOrd = ordinalMap.getFirstSegmentOrd(globalOrd); int readerIndex = ordinalMap.getFirstSegmentNumber(globalOrd); return bytesValues[readerIndex].lookupOrd(segmentOrd); } + @Override + public int docID() { + return values.docID(); + } + + @Override + public int nextDoc() throws IOException { + return values.nextDoc(); + } + + @Override + public int advance(int target) throws IOException { + return values.advance(target); + } + + @Override + public long cost() { + return values.cost(); + } + } diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalsBuilder.java b/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalsBuilder.java index 7b3b0646891..49140968ca7 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalsBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/GlobalOrdinalsBuilder.java @@ -23,7 +23,7 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.index.DocValues; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.MultiDocValues.OrdinalMap; -import org.apache.lucene.index.RandomAccessOrds; +import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.util.Accountable; import org.apache.lucene.util.packed.PackedInts; import org.elasticsearch.common.breaker.CircuitBreaker; @@ -52,12 +52,12 @@ public enum GlobalOrdinalsBuilder { */ public static IndexOrdinalsFieldData build(final IndexReader indexReader, IndexOrdinalsFieldData indexFieldData, IndexSettings indexSettings, CircuitBreakerService breakerService, Logger logger, - Function> scriptFunction) throws IOException { + Function> scriptFunction) throws IOException { assert indexReader.leaves().size() > 1; long startTimeNS = System.nanoTime(); final AtomicOrdinalsFieldData[] atomicFD = new AtomicOrdinalsFieldData[indexReader.leaves().size()]; - final RandomAccessOrds[] subs = new RandomAccessOrds[indexReader.leaves().size()]; + final SortedSetDocValues[] subs = new SortedSetDocValues[indexReader.leaves().size()]; for (int i = 0; i < indexReader.leaves().size(); ++i) { atomicFD[i] = indexFieldData.load(indexReader.leaves().get(i)); subs[i] = atomicFD[i].getOrdinalsValues(); @@ -83,11 +83,11 @@ public enum GlobalOrdinalsBuilder { assert indexReader.leaves().size() > 1; final AtomicOrdinalsFieldData[] atomicFD = new AtomicOrdinalsFieldData[indexReader.leaves().size()]; - final RandomAccessOrds[] subs = new RandomAccessOrds[indexReader.leaves().size()]; + final SortedSetDocValues[] subs = new SortedSetDocValues[indexReader.leaves().size()]; for (int i = 0; i < indexReader.leaves().size(); ++i) { atomicFD[i] = new AbstractAtomicOrdinalsFieldData(AbstractAtomicOrdinalsFieldData.DEFAULT_SCRIPT_FUNCTION) { @Override - public RandomAccessOrds getOrdinalsValues() { + public SortedSetDocValues getOrdinalsValues() { return DocValues.emptySortedSet(); } diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/InternalGlobalOrdinalsIndexFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/InternalGlobalOrdinalsIndexFieldData.java index 284e22b21d4..23ecc06fed6 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/InternalGlobalOrdinalsIndexFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/InternalGlobalOrdinalsIndexFieldData.java @@ -20,7 +20,7 @@ package org.elasticsearch.index.fielddata.ordinals; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.MultiDocValues.OrdinalMap; -import org.apache.lucene.index.RandomAccessOrds; +import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.util.Accountable; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.fielddata.AtomicOrdinalsFieldData; @@ -36,10 +36,10 @@ import java.util.function.Function; final class InternalGlobalOrdinalsIndexFieldData extends GlobalOrdinalsIndexFieldData { private final Atomic[] atomicReaders; - private final Function> scriptFunction; + private final Function> scriptFunction; InternalGlobalOrdinalsIndexFieldData(IndexSettings indexSettings, String fieldName, AtomicOrdinalsFieldData[] segmentAfd, - OrdinalMap ordinalMap, long memorySizeInBytes, Function> scriptFunction) { + OrdinalMap ordinalMap, long memorySizeInBytes, Function> scriptFunction) { super(indexSettings, fieldName, memorySizeInBytes); this.atomicReaders = new Atomic[segmentAfd.length]; for (int i = 0; i < segmentAfd.length; i++) { @@ -67,13 +67,13 @@ final class InternalGlobalOrdinalsIndexFieldData extends GlobalOrdinalsIndexFiel } @Override - public RandomAccessOrds getOrdinalsValues() { - final RandomAccessOrds values = afd.getOrdinalsValues(); + public SortedSetDocValues getOrdinalsValues() { + final SortedSetDocValues values = afd.getOrdinalsValues(); if (values.getValueCount() == ordinalMap.getValueCount()) { // segment ordinals match global ordinals return values; } - final RandomAccessOrds[] bytesValues = new RandomAccessOrds[atomicReaders.length]; + final SortedSetDocValues[] bytesValues = new SortedSetDocValues[atomicReaders.length]; for (int i = 0; i < bytesValues.length; i++) { bytesValues[i] = atomicReaders[i].afd.getOrdinalsValues(); } diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/MultiOrdinals.java b/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/MultiOrdinals.java index d85073a0688..86e2787658c 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/MultiOrdinals.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/MultiOrdinals.java @@ -20,16 +20,17 @@ package org.elasticsearch.index.fielddata.ordinals; import org.apache.lucene.index.DocValues; -import org.apache.lucene.index.RandomAccessOrds; -import org.apache.lucene.index.SortedDocValues; +import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.util.Accountable; import org.apache.lucene.util.Accountables; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.LongsRef; import org.apache.lucene.util.packed.PackedInts; import org.apache.lucene.util.packed.PackedLongValues; -import org.elasticsearch.index.fielddata.AbstractRandomAccessOrds; +import org.elasticsearch.index.fielddata.AbstractSortedDocValues; +import org.elasticsearch.index.fielddata.AbstractSortedSetDocValues; +import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -100,21 +101,25 @@ public class MultiOrdinals extends Ordinals { } @Override - public RandomAccessOrds ordinals(ValuesHolder values) { + public SortedSetDocValues ordinals(ValuesHolder values) { if (multiValued) { return new MultiDocs(this, values); } else { - return (RandomAccessOrds) DocValues.singleton(new SingleDocs(this, values)); + return (SortedSetDocValues) DocValues.singleton(new SingleDocs(this, values)); } } - private static class SingleDocs extends SortedDocValues { + private static class SingleDocs extends AbstractSortedDocValues { private final int valueCount; private final PackedLongValues endOffsets; private final PackedLongValues ords; private final ValuesHolder values; + private int currentDoc = -1; + private long currentStartOffset; + private long currentEndOffset; + SingleDocs(MultiOrdinals ordinals, ValuesHolder values) { this.valueCount = (int) ordinals.valueCount; this.endOffsets = ordinals.endOffsets; @@ -123,10 +128,21 @@ public class MultiOrdinals extends Ordinals { } @Override - public int getOrd(int docId) { - final long startOffset = docId != 0 ? endOffsets.get(docId - 1) : 0; - final long endOffset = endOffsets.get(docId); - return startOffset == endOffset ? -1 : (int) ords.get(startOffset); + public int ordValue() { + return (int) ords.get(currentStartOffset); + } + + @Override + public boolean advanceExact(int docId) throws IOException { + currentDoc = docId; + currentStartOffset = docId != 0 ? endOffsets.get(docId - 1) : 0; + currentEndOffset = endOffsets.get(docId); + return currentStartOffset != currentEndOffset; + } + + @Override + public int docID() { + return currentDoc; } @Override @@ -141,15 +157,16 @@ public class MultiOrdinals extends Ordinals { } - private static class MultiDocs extends AbstractRandomAccessOrds { + private static class MultiDocs extends AbstractSortedSetDocValues { private final long valueCount; private final PackedLongValues endOffsets; private final PackedLongValues ords; - private long offset; - private int cardinality; private final ValuesHolder values; + private long currentOffset; + private long currentEndOffset; + MultiDocs(MultiOrdinals ordinals, ValuesHolder values) { this.valueCount = ordinals.valueCount; this.endOffsets = ordinals.endOffsets; @@ -163,21 +180,19 @@ public class MultiOrdinals extends Ordinals { } @Override - public void doSetDocument(int docId) { - final long startOffset = docId != 0 ? endOffsets.get(docId - 1) : 0; - final long endOffset = endOffsets.get(docId); - offset = startOffset; - cardinality = (int) (endOffset - startOffset); + public boolean advanceExact(int docId) throws IOException { + currentOffset = docId != 0 ? endOffsets.get(docId - 1) : 0; + currentEndOffset = endOffsets.get(docId); + return currentOffset != currentEndOffset; } @Override - public int cardinality() { - return cardinality; - } - - @Override - public long ordAt(int index) { - return ords.get(offset + index); + public long nextOrd() throws IOException { + if (currentOffset == currentEndOffset) { + return SortedSetDocValues.NO_MORE_ORDS; + } else { + return ords.get(currentOffset++); + } } @Override diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/Ordinals.java b/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/Ordinals.java index 77c0fd6bf5b..7e59b708236 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/Ordinals.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/Ordinals.java @@ -19,7 +19,7 @@ package org.elasticsearch.index.fielddata.ordinals; -import org.apache.lucene.index.RandomAccessOrds; +import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.util.Accountable; import org.apache.lucene.util.BytesRef; @@ -41,9 +41,9 @@ public abstract class Ordinals implements Accountable { @Override public abstract long ramBytesUsed(); - public abstract RandomAccessOrds ordinals(ValuesHolder values); + public abstract SortedSetDocValues ordinals(ValuesHolder values); - public final RandomAccessOrds ordinals() { + public final SortedSetDocValues ordinals() { return ordinals(NO_VALUES); } diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/OrdinalsBuilder.java b/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/OrdinalsBuilder.java index 967d07174b9..b2e1b9e3104 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/OrdinalsBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/OrdinalsBuilder.java @@ -19,17 +19,14 @@ package org.elasticsearch.index.fielddata.ordinals; -import org.apache.lucene.index.FilteredTermsEnum; import org.apache.lucene.index.PostingsEnum; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.DocIdSetIterator; -import org.apache.lucene.spatial.geopoint.document.GeoPointField; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.BitSet; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefIterator; import org.apache.lucene.util.FixedBitSet; -import org.apache.lucene.util.LegacyNumericUtils; import org.apache.lucene.util.LongsRef; import org.apache.lucene.util.packed.GrowableWriter; import org.apache.lucene.util.packed.PackedInts; @@ -414,23 +411,6 @@ public final class OrdinalsBuilder implements Closeable { } } - /** - * A {@link TermsEnum} that iterates only highest resolution geo prefix coded terms. - * - * @see #buildFromTerms(TermsEnum) - */ - public static TermsEnum wrapGeoPointTerms(TermsEnum termsEnum) { - return new FilteredTermsEnum(termsEnum, false) { - @Override - protected AcceptStatus accept(BytesRef term) throws IOException { - // accept only the max resolution terms - // todo is this necessary? - return GeoPointField.getPrefixCodedShift(term) == GeoPointField.PRECISION_STEP * 4 ? - AcceptStatus.YES : AcceptStatus.END; - } - }; - } - /** * Returns the maximum document ID this builder can associate with an ordinal @@ -439,51 +419,12 @@ public final class OrdinalsBuilder implements Closeable { return maxDoc; } - /** - * A {@link TermsEnum} that iterates only full precision prefix coded 64 bit values. - * - * @see #buildFromTerms(TermsEnum) - */ - public static TermsEnum wrapNumeric64Bit(TermsEnum termsEnum) { - return new FilteredTermsEnum(termsEnum, false) { - @Override - protected AcceptStatus accept(BytesRef term) throws IOException { - // we stop accepting terms once we moved across the prefix codec terms - redundant values! - return LegacyNumericUtils.getPrefixCodedLongShift(term) == 0 ? AcceptStatus.YES : AcceptStatus.END; - } - }; - } - - /** - * A {@link TermsEnum} that iterates only full precision prefix coded 32 bit values. - * - * @see #buildFromTerms(TermsEnum) - */ - public static TermsEnum wrapNumeric32Bit(TermsEnum termsEnum) { - return new FilteredTermsEnum(termsEnum, false) { - - @Override - protected AcceptStatus accept(BytesRef term) throws IOException { - // we stop accepting terms once we moved across the prefix codec terms - redundant values! - return LegacyNumericUtils.getPrefixCodedIntShift(term) == 0 ? AcceptStatus.YES : AcceptStatus.END; - } - }; - } - /** * This method iterates all terms in the given {@link TermsEnum} and * associates each terms ordinal with the terms documents. The caller must * exhaust the returned {@link BytesRefIterator} which returns all values * where the first returned value is associated with the ordinal 1 * etc. - *

- * If the {@link TermsEnum} contains prefix coded numerical values the terms - * enum should be wrapped with either {@link #wrapNumeric32Bit(TermsEnum)} - * or {@link #wrapNumeric64Bit(TermsEnum)} depending on its precision. If - * the {@link TermsEnum} is not wrapped the returned - * {@link BytesRefIterator} will contain partial precision terms rather than - * only full-precision terms. - *

*/ public BytesRefIterator buildFromTerms(final TermsEnum termsEnum) throws IOException { return new BytesRefIterator() { diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/SinglePackedOrdinals.java b/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/SinglePackedOrdinals.java index f7db724c356..27f0aadee87 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/SinglePackedOrdinals.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/ordinals/SinglePackedOrdinals.java @@ -20,14 +20,15 @@ package org.elasticsearch.index.fielddata.ordinals; import org.apache.lucene.index.DocValues; -import org.apache.lucene.index.RandomAccessOrds; -import org.apache.lucene.index.SortedDocValues; +import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.util.Accountable; import org.apache.lucene.util.Accountables; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.RamUsageEstimator; import org.apache.lucene.util.packed.PackedInts; +import org.elasticsearch.index.fielddata.AbstractSortedDocValues; +import java.io.IOException; import java.util.Collection; import java.util.Collections; @@ -57,16 +58,19 @@ public class SinglePackedOrdinals extends Ordinals { } @Override - public RandomAccessOrds ordinals(ValuesHolder values) { - return (RandomAccessOrds) DocValues.singleton(new Docs(this, values)); + public SortedSetDocValues ordinals(ValuesHolder values) { + return (SortedSetDocValues) DocValues.singleton(new Docs(this, values)); } - private static class Docs extends SortedDocValues { + private static class Docs extends AbstractSortedDocValues { private final int maxOrd; private final PackedInts.Reader reader; private final ValuesHolder values; + private int currentDoc = -1; + private int currentOrd; + Docs(SinglePackedOrdinals parent, ValuesHolder values) { this.maxOrd = parent.valueCount; this.reader = parent.reader; @@ -84,8 +88,20 @@ public class SinglePackedOrdinals extends Ordinals { } @Override - public int getOrd(int docID) { - return (int) (reader.get(docID) - 1); + public int ordValue() { + return currentOrd; + } + + @Override + public boolean advanceExact(int docID) throws IOException { + currentDoc = docID; + currentOrd = (int) (reader.get(docID) - 1); + return currentOrd != -1; + } + + @Override + public int docID() { + return currentDoc; } } } diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractAtomicGeoPointFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractAtomicGeoPointFieldData.java index 9660d9f8684..5d6575e4378 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractAtomicGeoPointFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractAtomicGeoPointFieldData.java @@ -59,7 +59,7 @@ public abstract class AbstractAtomicGeoPointFieldData implements AtomicGeoPointF @Override public MultiGeoPointValues getGeoPointValues() { - return FieldData.emptyMultiGeoPoints(maxDoc); + return FieldData.emptyMultiGeoPoints(); } }; } diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractAtomicOrdinalsFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractAtomicOrdinalsFieldData.java index 7959bf3578a..9130e19fb26 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractAtomicOrdinalsFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractAtomicOrdinalsFieldData.java @@ -20,7 +20,7 @@ package org.elasticsearch.index.fielddata.plain; import org.apache.lucene.index.DocValues; -import org.apache.lucene.index.RandomAccessOrds; +import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.util.Accountable; import org.elasticsearch.index.fielddata.AtomicOrdinalsFieldData; import org.elasticsearch.index.fielddata.FieldData; @@ -34,13 +34,13 @@ import java.util.function.Function; public abstract class AbstractAtomicOrdinalsFieldData implements AtomicOrdinalsFieldData { - public static final Function> DEFAULT_SCRIPT_FUNCTION = - ((Function) FieldData::toString) + public static final Function> DEFAULT_SCRIPT_FUNCTION = + ((Function) FieldData::toString) .andThen(ScriptDocValues.Strings::new); - private final Function> scriptFunction; + private final Function> scriptFunction; - protected AbstractAtomicOrdinalsFieldData(Function> scriptFunction) { + protected AbstractAtomicOrdinalsFieldData(Function> scriptFunction) { this.scriptFunction = scriptFunction; } @@ -72,7 +72,7 @@ public abstract class AbstractAtomicOrdinalsFieldData implements AtomicOrdinalsF } @Override - public RandomAccessOrds getOrdinalsValues() { + public SortedSetDocValues getOrdinalsValues() { return DocValues.emptySortedSet(); } }; diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractAtomicParentChildFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractAtomicParentChildFieldData.java index 7c03e1a7942..2df5aa6bb63 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractAtomicParentChildFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractAtomicParentChildFieldData.java @@ -28,6 +28,7 @@ import org.elasticsearch.index.fielddata.AtomicParentChildFieldData; import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.index.fielddata.SortedBinaryDocValues; +import java.io.IOException; import java.util.Collection; import java.util.Collections; import java.util.Set; @@ -46,16 +47,28 @@ abstract class AbstractAtomicParentChildFieldData implements AtomicParentChildFi public final SortedBinaryDocValues getBytesValues() { return new SortedBinaryDocValues() { + private final SortedDocValues[] perTypeValues; private final BytesRef[] terms = new BytesRef[2]; private int count; + private int termsCursor; + + { + Set types = types(); + perTypeValues = new SortedDocValues[types.size()]; + int i = 0; + for (String type : types) { + perTypeValues[i++] = getOrdinalsValues(type); + } + } @Override - public void setDocument(int docId) { + public boolean advanceExact(int docId) throws IOException { count = 0; - for (String type : types()) { - final SortedDocValues values = getOrdinalsValues(type); - final int ord = values.getOrd(docId); - if (ord >= 0) { + termsCursor = 0; + + for (SortedDocValues values : perTypeValues) { + if (values.advanceExact(docId)) { + final int ord = values.ordValue(); terms[count++] = values.lookupOrd(ord); } } @@ -69,16 +82,17 @@ abstract class AbstractAtomicParentChildFieldData implements AtomicParentChildFi count = 1; } } + return count != 0; } @Override - public int count() { + public int docValueCount() { return count; } @Override - public BytesRef valueAt(int index) { - return terms[index]; + public BytesRef nextValue() throws IOException { + return terms[termsCursor++]; } }; } diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractGeoPointDVIndexFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractGeoPointDVIndexFieldData.java deleted file mode 100644 index 8db38e59ce0..00000000000 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractGeoPointDVIndexFieldData.java +++ /dev/null @@ -1,83 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.fielddata.plain; - -import org.apache.lucene.index.DocValues; -import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.search.SortField; -import org.elasticsearch.common.Nullable; -import org.elasticsearch.index.Index; -import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.fielddata.AtomicGeoPointFieldData; -import org.elasticsearch.index.fielddata.IndexFieldData; -import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; -import org.elasticsearch.index.fielddata.IndexFieldDataCache; -import org.elasticsearch.index.fielddata.IndexGeoPointFieldData; -import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.MapperService; -import org.elasticsearch.indices.breaker.CircuitBreakerService; -import org.elasticsearch.search.MultiValueMode; - -import java.io.IOException; - -public abstract class AbstractGeoPointDVIndexFieldData extends DocValuesIndexFieldData implements IndexGeoPointFieldData { - - AbstractGeoPointDVIndexFieldData(Index index, String fieldName) { - super(index, fieldName); - } - - @Override - public SortField sortField(@Nullable Object missingValue, MultiValueMode sortMode, Nested nested, boolean reverse) { - throw new IllegalArgumentException("can't sort on geo_point field without using specific sorting feature, like geo_distance"); - } - - /** - * Lucene 5.4 GeoPointFieldType - */ - public static class GeoPointDVIndexFieldData extends AbstractGeoPointDVIndexFieldData { - - public GeoPointDVIndexFieldData(Index index, String fieldName) { - super(index, fieldName); - } - - @Override - public AtomicGeoPointFieldData load(LeafReaderContext context) { - try { - return new GeoPointDVAtomicFieldData(DocValues.getSortedNumeric(context.reader(), fieldName)); - } catch (IOException e) { - throw new IllegalStateException("Cannot load doc values", e); - } - } - - @Override - public AtomicGeoPointFieldData loadDirect(LeafReaderContext context) throws Exception { - return load(context); - } - } - - public static class Builder implements IndexFieldData.Builder { - @Override - public IndexFieldData build(IndexSettings indexSettings, MappedFieldType fieldType, IndexFieldDataCache cache, - CircuitBreakerService breakerService, MapperService mapperService) { - // Ignore breaker - return new GeoPointDVIndexFieldData(indexSettings.getIndex(), fieldType.name()); - } - } -} diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexGeoPointFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexGeoPointFieldData.java deleted file mode 100644 index bdf1bbac332..00000000000 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractIndexGeoPointFieldData.java +++ /dev/null @@ -1,117 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.fielddata.plain; - -import org.apache.lucene.search.SortField; -import org.apache.lucene.spatial.geopoint.document.GeoPointField; -import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.BytesRefIterator; -import org.apache.lucene.util.CharsRefBuilder; -import org.apache.lucene.util.LegacyNumericUtils; -import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.fielddata.AtomicGeoPointFieldData; -import org.elasticsearch.index.fielddata.IndexFieldData; -import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; -import org.elasticsearch.index.fielddata.IndexFieldDataCache; -import org.elasticsearch.index.fielddata.IndexGeoPointFieldData; -import org.elasticsearch.search.MultiValueMode; - -import java.io.IOException; - -abstract class AbstractIndexGeoPointFieldData extends AbstractIndexFieldData implements IndexGeoPointFieldData { - protected abstract static class BaseGeoPointTermsEnum { - protected final BytesRefIterator termsEnum; - - protected BaseGeoPointTermsEnum(BytesRefIterator termsEnum) { - this.termsEnum = termsEnum; - } - } - - protected static class GeoPointTermsEnum extends BaseGeoPointTermsEnum { - private final GeoPointField.TermEncoding termEncoding; - protected GeoPointTermsEnum(BytesRefIterator termsEnum, GeoPointField.TermEncoding termEncoding) { - super(termsEnum); - this.termEncoding = termEncoding; - } - - public Long next() throws IOException { - final BytesRef term = termsEnum.next(); - if (term == null) { - return null; - } - if (termEncoding == GeoPointField.TermEncoding.PREFIX) { - return GeoPointField.prefixCodedToGeoCoded(term); - } else if (termEncoding == GeoPointField.TermEncoding.NUMERIC) { - return LegacyNumericUtils.prefixCodedToLong(term); - } - throw new IllegalArgumentException("GeoPoint.TermEncoding should be one of: " + GeoPointField.TermEncoding.PREFIX - + " or " + GeoPointField.TermEncoding.NUMERIC + " found: " + termEncoding); - } - } - - protected static class GeoPointTermsEnumLegacy extends BaseGeoPointTermsEnum { - private final GeoPoint next; - private final CharsRefBuilder spare; - - protected GeoPointTermsEnumLegacy(BytesRefIterator termsEnum) { - super(termsEnum); - next = new GeoPoint(); - spare = new CharsRefBuilder(); - } - - public GeoPoint next() throws IOException { - final BytesRef term = termsEnum.next(); - if (term == null) { - return null; - } - spare.copyUTF8Bytes(term); - int commaIndex = -1; - for (int i = 0; i < spare.length(); i++) { - if (spare.charAt(i) == ',') { // saves a string creation - commaIndex = i; - break; - } - } - if (commaIndex == -1) { - assert false; - return next.reset(0, 0); - } - final double lat = Double.parseDouble(new String(spare.chars(), 0, commaIndex)); - final double lon = Double.parseDouble(new String(spare.chars(), commaIndex + 1, spare.length() - (commaIndex + 1))); - return next.reset(lat, lon); - } - } - - AbstractIndexGeoPointFieldData(IndexSettings indexSettings, String fieldName, IndexFieldDataCache cache) { - super(indexSettings, fieldName, cache); - } - - @Override - public SortField sortField(@Nullable Object missingValue, MultiValueMode sortMode, Nested nested, boolean reverse) { - throw new IllegalArgumentException("can't sort on geo_point field without using specific sorting feature, like geo_distance"); - } - - @Override - protected AtomicGeoPointFieldData empty(int maxDoc) { - return AbstractAtomicGeoPointFieldData.empty(maxDoc); - } -} diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractLatLonPointDVIndexFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractLatLonPointDVIndexFieldData.java index 3b4ac58e0e8..6c92d571196 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractLatLonPointDVIndexFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/AbstractLatLonPointDVIndexFieldData.java @@ -19,13 +19,11 @@ package org.elasticsearch.index.fielddata.plain; import org.apache.lucene.document.LatLonDocValuesField; -import org.apache.lucene.index.DocValues; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.SortField; -import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.Nullable; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; @@ -38,8 +36,6 @@ import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.search.MultiValueMode; -import java.io.IOException; - public abstract class AbstractLatLonPointDVIndexFieldData extends DocValuesIndexFieldData implements IndexGeoPointFieldData { AbstractLatLonPointDVIndexFieldData(Index index, String fieldName) { @@ -58,16 +54,12 @@ public abstract class AbstractLatLonPointDVIndexFieldData extends DocValuesIndex @Override public AtomicGeoPointFieldData load(LeafReaderContext context) { - try { - LeafReader reader = context.reader(); - FieldInfo info = reader.getFieldInfos().fieldInfo(fieldName); - if (info != null) { - checkCompatible(info); - } - return new LatLonPointDVAtomicFieldData(DocValues.getSortedNumeric(reader, fieldName)); - } catch (IOException e) { - throw new IllegalStateException("Cannot load doc values", e); + LeafReader reader = context.reader(); + FieldInfo info = reader.getFieldInfos().fieldInfo(fieldName); + if (info != null) { + checkCompatible(info); } + return new LatLonPointDVAtomicFieldData(reader, fieldName); } @Override diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/AtomicDoubleFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/AtomicDoubleFieldData.java index 821fcc1bf29..82ae0bb5bf1 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/AtomicDoubleFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/AtomicDoubleFieldData.java @@ -67,7 +67,7 @@ abstract class AtomicDoubleFieldData implements AtomicNumericFieldData { @Override public SortedNumericDoubleValues getDoubleValues() { - return FieldData.emptySortedNumericDoubles(maxDoc); + return FieldData.emptySortedNumericDoubles(); } @Override diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/BinaryDVAtomicFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/BinaryDVAtomicFieldData.java index c82ac69f282..014220304d6 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/BinaryDVAtomicFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/BinaryDVAtomicFieldData.java @@ -23,7 +23,6 @@ import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.DocValues; import org.apache.lucene.index.LeafReader; import org.apache.lucene.util.Accountable; -import org.apache.lucene.util.Bits; import org.elasticsearch.index.fielddata.AtomicFieldData; import org.elasticsearch.index.fielddata.FieldData; import org.elasticsearch.index.fielddata.ScriptDocValues; @@ -49,8 +48,7 @@ public class BinaryDVAtomicFieldData implements AtomicFieldData { public SortedBinaryDocValues getBytesValues() { try { final BinaryDocValues values = DocValues.getBinary(reader, field); - final Bits docsWithField = DocValues.getDocsWithField(reader, field); - return FieldData.singleton(values, docsWithField); + return FieldData.singleton(values); } catch (IOException e) { throw new IllegalStateException("Cannot load doc values", e); } diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/BytesBinaryDVAtomicFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/BytesBinaryDVAtomicFieldData.java index 8d43241ba75..aa2775046ff 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/BytesBinaryDVAtomicFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/BytesBinaryDVAtomicFieldData.java @@ -22,15 +22,12 @@ package org.elasticsearch.index.fielddata.plain; import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.store.ByteArrayDataInput; import org.apache.lucene.util.Accountable; -import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.BytesRefBuilder; -import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.index.fielddata.AtomicFieldData; import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.index.fielddata.SortedBinaryDocValues; -import java.util.Arrays; +import java.io.IOException; import java.util.Collection; import java.util.Collections; @@ -58,42 +55,34 @@ final class BytesBinaryDVAtomicFieldData implements AtomicFieldData { return new SortedBinaryDocValues() { int count; - BytesRefBuilder[] refs = new BytesRefBuilder[0]; final ByteArrayDataInput in = new ByteArrayDataInput(); + final BytesRef scratch = new BytesRef(); @Override - public void setDocument(int docId) { - final BytesRef bytes = values.get(docId); - in.reset(bytes.bytes, bytes.offset, bytes.length); - if (bytes.length == 0) { - count = 0; - } else { + public boolean advanceExact(int doc) throws IOException { + if (values.advanceExact(doc)) { + final BytesRef bytes = values.binaryValue(); + assert bytes.length > 0; + in.reset(bytes.bytes, bytes.offset, bytes.length); count = in.readVInt(); - if (count > refs.length) { - final int previousLength = refs.length; - refs = Arrays.copyOf(refs, ArrayUtil.oversize(count, RamUsageEstimator.NUM_BYTES_OBJECT_REF)); - for (int i = previousLength; i < refs.length; ++i) { - refs[i] = new BytesRefBuilder(); - } - } - for (int i = 0; i < count; ++i) { - final int length = in.readVInt(); - final BytesRefBuilder scratch = refs[i]; - scratch.grow(length); - in.readBytes(scratch.bytes(), 0, length); - scratch.setLength(length); - } + scratch.bytes = bytes.bytes; + return true; + } else { + return false; } } @Override - public int count() { + public int docValueCount() { return count; } @Override - public BytesRef valueAt(int index) { - return refs[index].get(); + public BytesRef nextValue() throws IOException { + scratch.length = in.readVInt(); + scratch.offset = in.getPosition(); + in.setPosition(scratch.offset + scratch.length); + return scratch; } }; diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/DocValuesIndexFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/DocValuesIndexFieldData.java index c77ceb57457..4a066328bc2 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/DocValuesIndexFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/DocValuesIndexFieldData.java @@ -21,7 +21,7 @@ package org.elasticsearch.index.fielddata.plain; import org.apache.logging.log4j.Logger; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.RandomAccessOrds; +import org.apache.lucene.index.SortedSetDocValues; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; @@ -75,14 +75,14 @@ public abstract class DocValuesIndexFieldData { private static final Set BINARY_INDEX_FIELD_NAMES = unmodifiableSet(newHashSet(UidFieldMapper.NAME, IdFieldMapper.NAME)); private NumericType numericType; - private Function> scriptFunction = AbstractAtomicOrdinalsFieldData.DEFAULT_SCRIPT_FUNCTION; + private Function> scriptFunction = AbstractAtomicOrdinalsFieldData.DEFAULT_SCRIPT_FUNCTION; public Builder numericType(NumericType type) { this.numericType = type; return this; } - public Builder scriptFunction(Function> scriptFunction) { + public Builder scriptFunction(Function> scriptFunction) { this.scriptFunction = scriptFunction; return this; } diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointDVAtomicFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointDVAtomicFieldData.java deleted file mode 100644 index a71ea3a552e..00000000000 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/GeoPointDVAtomicFieldData.java +++ /dev/null @@ -1,90 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.fielddata.plain; - -import org.apache.lucene.index.SortedNumericDocValues; -import org.apache.lucene.util.Accountable; -import org.apache.lucene.util.ArrayUtil; -import org.apache.lucene.util.RamUsageEstimator; -import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.index.fielddata.MultiGeoPointValues; - -import java.util.Arrays; -import java.util.Collection; -import java.util.Collections; - -final class GeoPointDVAtomicFieldData extends AbstractAtomicGeoPointFieldData { - - private final SortedNumericDocValues values; - - GeoPointDVAtomicFieldData(SortedNumericDocValues values) { - super(); - this.values = values; - } - - @Override - public long ramBytesUsed() { - return 0; // not exposed by Lucene - } - - @Override - public Collection getChildResources() { - return Collections.emptyList(); - } - - @Override - public void close() { - // no-op - } - - @Override - public MultiGeoPointValues getGeoPointValues() { - return new MultiGeoPointValues() { - GeoPoint[] points = new GeoPoint[0]; - private int count = 0; - - @Override - public void setDocument(int docId) { - values.setDocument(docId); - count = values.count(); - if (count > points.length) { - final int previousLength = points.length; - points = Arrays.copyOf(points, ArrayUtil.oversize(count, RamUsageEstimator.NUM_BYTES_OBJECT_REF)); - for (int i = previousLength; i < points.length; ++i) { - points[i] = new GeoPoint(Double.NaN, Double.NaN); - } - } - for (int i=0; i points.length) { - final int previousLength = points.length; - points = Arrays.copyOf(points, ArrayUtil.oversize(count, RamUsageEstimator.NUM_BYTES_OBJECT_REF)); - for (int i = previousLength; i < points.length; ++i) { - points[i] = new GeoPoint(Double.NaN, Double.NaN); - } + final GeoPoint point = new GeoPoint(); + + @Override + public boolean advanceExact(int doc) throws IOException { + return numericValues.advanceExact(doc); } - long encoded; - for (int i=0; i>> 32)), GeoEncodingUtils.decodeLongitude((int)encoded)); + + @Override + public int docValueCount() { + return numericValues.docValueCount(); } - } - @Override - public int count() { - return count; - } - - @Override - public GeoPoint valueAt(int index) { - return points[index]; - } - }; + @Override + public GeoPoint nextValue() throws IOException { + final long encoded = numericValues.nextValue(); + point.reset(GeoEncodingUtils.decodeLatitude((int) (encoded >>> 32)), + GeoEncodingUtils.decodeLongitude((int) encoded)); + return point; + } + }; + } catch (IOException e) { + throw new IllegalStateException("Cannot load doc values", e); + } } } diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/PagedBytesAtomicFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/PagedBytesAtomicFieldData.java index 02e7da88869..c9e81ca4f2d 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/PagedBytesAtomicFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/PagedBytesAtomicFieldData.java @@ -18,7 +18,7 @@ */ package org.elasticsearch.index.fielddata.plain; -import org.apache.lucene.index.RandomAccessOrds; +import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.util.Accountable; import org.apache.lucene.util.Accountables; import org.apache.lucene.util.BytesRef; @@ -68,7 +68,7 @@ public class PagedBytesAtomicFieldData extends AbstractAtomicOrdinalsFieldData { } @Override - public RandomAccessOrds getOrdinalsValues() { + public SortedSetDocValues getOrdinalsValues() { return ordinals.ordinals(new ValuesHolder(bytes, termOrdToBytesOffset)); } diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/ParentChildIndexFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/ParentChildIndexFieldData.java index ee451ff0233..74b180f2c1b 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/ParentChildIndexFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/ParentChildIndexFieldData.java @@ -40,6 +40,7 @@ import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.fielddata.AbstractSortedDocValues; import org.elasticsearch.index.fielddata.AtomicParentChildFieldData; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; @@ -254,10 +255,10 @@ public class ParentChildIndexFieldData extends AbstractIndexFieldData= 0) { - return (int) globalOrds.get(segmentOrd); - } else { - return segmentOrd; - } + public int ordValue() throws IOException { + return (int) globalOrds.get(segmentValues.ordValue()); + } + + @Override + public boolean advanceExact(int target) throws IOException { + return segmentValues.advanceExact(target); + } + + @Override + public int docID() { + return segmentValues.docID(); } }; } @@ -313,7 +318,7 @@ public class ParentChildIndexFieldData extends AbstractIndexFieldData ordinalMapPerType; GlobalFieldData(IndexReader reader, AtomicParentChildFieldData[] fielddata, long ramBytesUsed, Map ordinalMapPerType) { - this.coreCacheKey = reader.getCoreCacheKey(); + this.coreCacheKey = reader.getReaderCacheHelper().getKey(); this.leaves = reader.leaves(); this.ramBytesUsed = ramBytesUsed; this.fielddata = fielddata; @@ -327,7 +332,8 @@ public class ParentChildIndexFieldData extends AbstractIndexFieldData> scriptFunction) { super(scriptFunction); this.reader = reader; @@ -48,9 +47,9 @@ public final class SortedSetDVBytesAtomicFieldData extends AbstractAtomicOrdinal } @Override - public RandomAccessOrds getOrdinalsValues() { + public SortedSetDocValues getOrdinalsValues() { try { - return FieldData.maybeSlowRandomAccessOrds(DocValues.getSortedSet(reader, field)); + return DocValues.getSortedSet(reader, field); } catch (IOException e) { throw new IllegalStateException("cannot load docvalues", e); } diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/plain/SortedSetDVOrdinalsIndexFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/plain/SortedSetDVOrdinalsIndexFieldData.java index 2c59e8559c2..ea076d476de 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/plain/SortedSetDVOrdinalsIndexFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/plain/SortedSetDVOrdinalsIndexFieldData.java @@ -21,10 +21,10 @@ package org.elasticsearch.index.fielddata.plain; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.RandomAccessOrds; +import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.search.SortField; -import org.apache.lucene.search.SortedSetSortField; import org.apache.lucene.search.SortedSetSelector; +import org.apache.lucene.search.SortedSetSortField; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.Nullable; import org.elasticsearch.index.IndexSettings; @@ -46,10 +46,10 @@ public class SortedSetDVOrdinalsIndexFieldData extends DocValuesIndexFieldData i private final IndexSettings indexSettings; private final IndexFieldDataCache cache; private final CircuitBreakerService breakerService; - private final Function> scriptFunction; + private final Function> scriptFunction; public SortedSetDVOrdinalsIndexFieldData(IndexSettings indexSettings, IndexFieldDataCache cache, String fieldName, - CircuitBreakerService breakerService, Function> scriptFunction) { + CircuitBreakerService breakerService, Function> scriptFunction) { super(indexSettings.getIndex(), fieldName); this.indexSettings = indexSettings; this.cache = cache; diff --git a/core/src/main/java/org/elasticsearch/index/mapper/CustomDocValuesField.java b/core/src/main/java/org/elasticsearch/index/mapper/CustomDocValuesField.java index 8e6cee222d4..60fbfc0698c 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/CustomDocValuesField.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/CustomDocValuesField.java @@ -53,11 +53,6 @@ abstract class CustomDocValuesField implements IndexableField { return TYPE; } - @Override - public float boost() { - return 1f; - } - @Override public String stringValue() { return null; diff --git a/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java index 8b7355dca4b..39280bcee20 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/DocumentMapper.java @@ -277,7 +277,7 @@ public class DocumentMapper implements ToXContent { } // We can pass down 'null' as acceptedDocs, because nestedDocId is a doc to be fetched and // therefor is guaranteed to be a live doc. - final Weight nestedWeight = filter.createWeight(sc.searcher(), false); + final Weight nestedWeight = filter.createWeight(sc.searcher(), false, 1f); Scorer scorer = nestedWeight.scorer(context); if (scorer == null) { continue; diff --git a/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java index 0ed093b3a88..b8c958f41f3 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/FieldMapper.java @@ -21,7 +21,6 @@ package org.elasticsearch.index.mapper; import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; -import org.apache.lucene.document.Field; import org.apache.lucene.document.FieldType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; @@ -286,12 +285,6 @@ public abstract class FieldMapper extends Mapper implements Cloneable { try { parseCreateField(context, fields); for (IndexableField field : fields) { - if (!customBoost() - // don't set boosts eg. on dv fields - && field.fieldType().indexOptions() != IndexOptions.NONE - && indexCreatedVersion.before(Version.V_5_0_0_alpha1)) { - ((Field)(field)).setBoost(fieldType().boost()); - } context.doc().add(field); } } catch (Exception e) { @@ -306,13 +299,6 @@ public abstract class FieldMapper extends Mapper implements Cloneable { */ protected abstract void parseCreateField(ParseContext context, List fields) throws IOException; - /** - * Derived classes can override it to specify that boost value is set by derived classes. - */ - protected boolean customBoost() { - return false; - } - @Override public Iterator iterator() { return multiFields.iterator(); diff --git a/core/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java index c46fe227d6f..2f09027c4ff 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/GeoShapeFieldMapper.java @@ -31,7 +31,6 @@ import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree; import org.apache.lucene.spatial.prefix.tree.PackedQuadPrefixTree; import org.apache.lucene.spatial.prefix.tree.QuadPrefixTree; import org.apache.lucene.spatial.prefix.tree.SpatialPrefixTree; -import org.elasticsearch.Version; import org.elasticsearch.action.fieldstats.FieldStats; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.geo.GeoUtils; @@ -463,10 +462,6 @@ public class GeoShapeFieldMapper extends FieldMapper { return null; } for (Field field : fields) { - if (!customBoost() && - fieldType.boost() != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0_alpha1)) { - field.setBoost(fieldType().boost()); - } context.doc().add(field); } } catch (Exception e) { diff --git a/core/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java index a15432c635d..3d6f4efba14 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java @@ -27,9 +27,10 @@ import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.PointValues; -import org.apache.lucene.index.RandomAccessOrds; +import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; +import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.BytesRef; import org.elasticsearch.action.fieldstats.FieldStats; import org.elasticsearch.common.Explicit; @@ -233,19 +234,27 @@ public class IpFieldMapper extends FieldMapper { public static final class IpScriptDocValues extends ScriptDocValues { - private final RandomAccessOrds values; + private final SortedSetDocValues in; + private long[] ords = new long[0]; + private int count; - public IpScriptDocValues(RandomAccessOrds values) { - this.values = values; + public IpScriptDocValues(SortedSetDocValues in) { + this.in = in; } @Override - public void setNextDocId(int docId) { - values.setDocument(docId); + public void setNextDocId(int docId) throws IOException { + count = 0; + if (in.advanceExact(docId)) { + for (long ord = in.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = in.nextOrd()) { + ords = ArrayUtil.grow(ords, count + 1); + ords[count++] = ord; + } + } } public String getValue() { - if (isEmpty()) { + if (count == 0) { return null; } else { return get(0); @@ -254,15 +263,19 @@ public class IpFieldMapper extends FieldMapper { @Override public String get(int index) { - BytesRef encoded = values.lookupOrd(values.ordAt(0)); - InetAddress address = InetAddressPoint.decode( - Arrays.copyOfRange(encoded.bytes, encoded.offset, encoded.offset + encoded.length)); - return InetAddresses.toAddrString(address); + try { + BytesRef encoded = in.lookupOrd(ords[index]); + InetAddress address = InetAddressPoint.decode( + Arrays.copyOfRange(encoded.bytes, encoded.offset, encoded.offset + encoded.length)); + return InetAddresses.toAddrString(address); + } catch (IOException e) { + throw new RuntimeException(e); + } } @Override public int size() { - return values.cardinality(); + return count; } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapper.java index 226ab905a27..c69e68b24b6 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/ScaledFloatFieldMapper.java @@ -219,7 +219,7 @@ public class ScaledFloatFieldMapper extends FieldMapper { } @Override - public Query termsQuery(List values, QueryShardContext context) { + public Query termsQuery(List values, QueryShardContext context) { failIfNotIndexed(); List scaledValues = new ArrayList<>(values.size()); for (Object value : values) { @@ -561,26 +561,30 @@ public class ScaledFloatFieldMapper extends FieldMapper { if (singleValues != null) { return FieldData.singleton(new NumericDoubleValues() { @Override - public double get(int docID) { - return singleValues.get(docID) * scalingFactorInverse; + public boolean advanceExact(int doc) throws IOException { + return singleValues.advanceExact(doc); } - }, DocValues.unwrapSingletonBits(values)); + @Override + public double doubleValue() throws IOException { + return singleValues.longValue() * scalingFactorInverse; + } + }); } else { return new SortedNumericDoubleValues() { @Override - public double valueAt(int index) { - return values.valueAt(index) * scalingFactorInverse; + public boolean advanceExact(int target) throws IOException { + return values.advanceExact(target); } @Override - public void setDocument(int doc) { - values.setDocument(doc); + public double nextValue() throws IOException { + return values.nextValue() * scalingFactorInverse; } @Override - public int count() { - return values.count(); + public int docValueCount() { + return values.docValueCount(); } }; } diff --git a/core/src/main/java/org/elasticsearch/index/query/BoolQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/BoolQueryBuilder.java index 5eb8b81009c..dcab99ce6a1 100644 --- a/core/src/main/java/org/elasticsearch/index/query/BoolQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/BoolQueryBuilder.java @@ -24,6 +24,7 @@ import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; +import org.elasticsearch.Version; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; @@ -48,14 +49,14 @@ public class BoolQueryBuilder extends AbstractQueryBuilder { public static final String NAME = "bool"; public static final boolean ADJUST_PURE_NEGATIVE_DEFAULT = true; - public static final boolean DISABLE_COORD_DEFAULT = false; private static final String MUSTNOT = "mustNot"; private static final String MUST_NOT = "must_not"; private static final String FILTER = "filter"; private static final String SHOULD = "should"; private static final String MUST = "must"; - private static final ParseField DISABLE_COORD_FIELD = new ParseField("disable_coord"); + private static final ParseField DISABLE_COORD_FIELD = new ParseField("disable_coord") + .withAllDeprecated("disable_coord has been removed"); private static final ParseField MINIMUM_SHOULD_MATCH = new ParseField("minimum_should_match"); private static final ParseField ADJUST_PURE_NEGATIVE = new ParseField("adjust_pure_negative"); @@ -67,8 +68,6 @@ public class BoolQueryBuilder extends AbstractQueryBuilder { private final List shouldClauses = new ArrayList<>(); - private boolean disableCoord = DISABLE_COORD_DEFAULT; - private boolean adjustPureNegative = ADJUST_PURE_NEGATIVE_DEFAULT; private String minimumShouldMatch; @@ -89,7 +88,9 @@ public class BoolQueryBuilder extends AbstractQueryBuilder { shouldClauses.addAll(readQueries(in)); filterClauses.addAll(readQueries(in)); adjustPureNegative = in.readBoolean(); - disableCoord = in.readBoolean(); + if (in.getVersion().before(Version.V_6_0_0_alpha1_UNRELEASED)) { + in.readBoolean(); // disable_coord + } minimumShouldMatch = in.readOptionalString(); } @@ -100,7 +101,9 @@ public class BoolQueryBuilder extends AbstractQueryBuilder { writeQueries(out, shouldClauses); writeQueries(out, filterClauses); out.writeBoolean(adjustPureNegative); - out.writeBoolean(disableCoord); + if (out.getVersion().before(Version.V_6_0_0_alpha1_UNRELEASED)) { + out.writeBoolean(true); // disable_coord + } out.writeOptionalString(minimumShouldMatch); } @@ -186,21 +189,6 @@ public class BoolQueryBuilder extends AbstractQueryBuilder { return this.shouldClauses; } - /** - * Disables Similarity#coord(int,int) in scoring. Defaults to false. - */ - public BoolQueryBuilder disableCoord(boolean disableCoord) { - this.disableCoord = disableCoord; - return this; - } - - /** - * @return whether the Similarity#coord(int,int) in scoring are disabled. Defaults to false. - */ - public boolean disableCoord() { - return this.disableCoord; - } - /** * @return the string representation of the minimumShouldMatch settings for this query */ @@ -267,7 +255,6 @@ public class BoolQueryBuilder extends AbstractQueryBuilder { doXArrayContent(FILTER, filterClauses, builder, params); doXArrayContent(MUST_NOT, mustNotClauses, builder, params); doXArrayContent(SHOULD, shouldClauses, builder, params); - builder.field(DISABLE_COORD_FIELD.getPreferredName(), disableCoord); builder.field(ADJUST_PURE_NEGATIVE.getPreferredName(), adjustPureNegative); if (minimumShouldMatch != null) { builder.field(MINIMUM_SHOULD_MATCH.getPreferredName(), minimumShouldMatch); @@ -291,7 +278,6 @@ public class BoolQueryBuilder extends AbstractQueryBuilder { public static BoolQueryBuilder fromXContent(QueryParseContext parseContext) throws IOException, ParsingException { XContentParser parser = parseContext.parser(); - boolean disableCoord = BoolQueryBuilder.DISABLE_COORD_DEFAULT; boolean adjustPureNegative = BoolQueryBuilder.ADJUST_PURE_NEGATIVE_DEFAULT; float boost = AbstractQueryBuilder.DEFAULT_BOOST; String minimumShouldMatch = null; @@ -349,7 +335,7 @@ public class BoolQueryBuilder extends AbstractQueryBuilder { } } else if (token.isValue()) { if (DISABLE_COORD_FIELD.match(currentFieldName)) { - disableCoord = parser.booleanValue(); + // ignore } else if (MINIMUM_SHOULD_MATCH.match(currentFieldName)) { minimumShouldMatch = parser.textOrNull(); } else if (AbstractQueryBuilder.BOOST_FIELD.match(currentFieldName)) { @@ -377,7 +363,6 @@ public class BoolQueryBuilder extends AbstractQueryBuilder { boolQuery.filter(queryBuilder); } boolQuery.boost(boost); - boolQuery.disableCoord(disableCoord); boolQuery.adjustPureNegative(adjustPureNegative); boolQuery.minimumShouldMatch(minimumShouldMatch); boolQuery.queryName(queryName); @@ -392,7 +377,6 @@ public class BoolQueryBuilder extends AbstractQueryBuilder { @Override protected Query doToQuery(QueryShardContext context) throws IOException { BooleanQuery.Builder booleanQueryBuilder = new BooleanQuery.Builder(); - booleanQueryBuilder.setDisableCoord(disableCoord); addBooleanClauses(context, booleanQueryBuilder, mustClauses, BooleanClause.Occur.MUST); addBooleanClauses(context, booleanQueryBuilder, mustNotClauses, BooleanClause.Occur.MUST_NOT); addBooleanClauses(context, booleanQueryBuilder, shouldClauses, BooleanClause.Occur.SHOULD); @@ -432,14 +416,13 @@ public class BoolQueryBuilder extends AbstractQueryBuilder { @Override protected int doHashCode() { - return Objects.hash(adjustPureNegative, disableCoord, + return Objects.hash(adjustPureNegative, minimumShouldMatch, mustClauses, shouldClauses, mustNotClauses, filterClauses); } @Override protected boolean doEquals(BoolQueryBuilder other) { return Objects.equals(adjustPureNegative, other.adjustPureNegative) && - Objects.equals(disableCoord, other.disableCoord) && Objects.equals(minimumShouldMatch, other.minimumShouldMatch) && Objects.equals(mustClauses, other.mustClauses) && Objects.equals(shouldClauses, other.shouldClauses) && @@ -462,7 +445,6 @@ public class BoolQueryBuilder extends AbstractQueryBuilder { if (changed) { newBuilder.adjustPureNegative = adjustPureNegative; - newBuilder.disableCoord = disableCoord; newBuilder.minimumShouldMatch = minimumShouldMatch; newBuilder.boost(boost()); newBuilder.queryName(queryName()); diff --git a/core/src/main/java/org/elasticsearch/index/query/CommonTermsQueryBuilder.java b/core/src/main/java/org/elasticsearch/index/query/CommonTermsQueryBuilder.java index a91d8fe44cf..e5fff943cd3 100644 --- a/core/src/main/java/org/elasticsearch/index/query/CommonTermsQueryBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/CommonTermsQueryBuilder.java @@ -25,10 +25,9 @@ import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.apache.lucene.index.Term; import org.apache.lucene.queries.ExtendedCommonTermsQuery; import org.apache.lucene.search.BooleanClause.Occur; -import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.search.similarities.Similarity; import org.apache.lucene.util.BytesRefBuilder; +import org.elasticsearch.Version; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; @@ -48,13 +47,7 @@ import java.util.Objects; * added} terms where low-frequency terms are added to a required boolean clause * and high-frequency terms are added to an optional boolean clause. The * optional clause is only executed if the required "low-frequency' clause - * matches. Scores produced by this query will be slightly different to plain - * {@link BooleanQuery} scorer mainly due to differences in the - * {@link Similarity#coord(int,int) number of leave queries} in the required - * boolean clause. In the most cases high-frequency terms are unlikely to - * significantly contribute to the document score unless at least one of the - * low-frequency terms are matched such that this query can improve query - * execution times significantly if applicable. + * matches. */ public class CommonTermsQueryBuilder extends AbstractQueryBuilder { @@ -69,7 +62,8 @@ public class CommonTermsQueryBuilder extends AbstractQueryBuilder } @Override - public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { - return new RandomAccessWeight(this) { + public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException { + return new ConstantScoreWeight(this, boost) { + @Override - protected Bits getMatchingDocs(final LeafReaderContext context) throws IOException { + public Scorer scorer(LeafReaderContext context) throws IOException { + DocIdSetIterator approximation = DocIdSetIterator.all(context.reader().maxDoc()); final LeafSearchScript leafScript = searchScript.getLeafSearchScript(context); - return new Bits() { + TwoPhaseIterator twoPhase = new TwoPhaseIterator(approximation) { @Override - public boolean get(int doc) { - leafScript.setDocument(doc); + public boolean matches() throws IOException { + leafScript.setDocument(approximation.docID()); Object val = leafScript.run(); if (val == null) { return false; @@ -196,11 +201,12 @@ public class ScriptQueryBuilder extends AbstractQueryBuilder } @Override - public int length() { - return context.reader().maxDoc(); + public float matchCost() { + // TODO: how can we compute this? + return 1000f; } - }; + return new ConstantScoreScorer(this, score(), twoPhase); } }; } diff --git a/core/src/main/java/org/elasticsearch/index/query/SimpleQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/SimpleQueryParser.java index a147e496045..9899ba9a748 100644 --- a/core/src/main/java/org/elasticsearch/index/query/SimpleQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/SimpleQueryParser.java @@ -80,7 +80,6 @@ public class SimpleQueryParser extends org.apache.lucene.queryparser.simple.Simp @Override public Query newDefaultQuery(String text) { BooleanQuery.Builder bq = new BooleanQuery.Builder(); - bq.setDisableCoord(true); for (Map.Entry entry : weights.entrySet()) { try { Query q = createBooleanQuery(entry.getKey(), text, super.getDefaultOperator()); @@ -101,7 +100,6 @@ public class SimpleQueryParser extends org.apache.lucene.queryparser.simple.Simp @Override public Query newFuzzyQuery(String text, int fuzziness) { BooleanQuery.Builder bq = new BooleanQuery.Builder(); - bq.setDisableCoord(true); for (Map.Entry entry : weights.entrySet()) { final String fieldName = entry.getKey(); try { @@ -118,7 +116,6 @@ public class SimpleQueryParser extends org.apache.lucene.queryparser.simple.Simp @Override public Query newPhraseQuery(String text, int slop) { BooleanQuery.Builder bq = new BooleanQuery.Builder(); - bq.setDisableCoord(true); for (Map.Entry entry : weights.entrySet()) { try { String field = entry.getKey(); @@ -148,7 +145,6 @@ public class SimpleQueryParser extends org.apache.lucene.queryparser.simple.Simp @Override public Query newPrefixQuery(String text) { BooleanQuery.Builder bq = new BooleanQuery.Builder(); - bq.setDisableCoord(true); for (Map.Entry entry : weights.entrySet()) { final String fieldName = entry.getKey(); try { @@ -272,7 +268,7 @@ public class SimpleQueryParser extends org.apache.lucene.queryparser.simple.Simp innerBuilder.add(new BooleanClause(new PrefixQuery(new Term(field, token)), BooleanClause.Occur.SHOULD)); } - posQuery = innerBuilder.setDisableCoord(true).build(); + posQuery = innerBuilder.build(); } builder.add(new BooleanClause(posQuery, getDefaultOperator())); } diff --git a/core/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionBuilder.java b/core/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionBuilder.java index 77b67b89fce..e296c102188 100644 --- a/core/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionBuilder.java +++ b/core/src/main/java/org/elasticsearch/index/query/functionscore/DecayFunctionBuilder.java @@ -348,18 +348,18 @@ public abstract class DecayFunctionBuilder final MultiGeoPointValues geoPointValues = fieldData.load(context).getGeoPointValues(); return mode.select(new MultiValueMode.UnsortedNumericDoubleValues() { @Override - public int count() { - return geoPointValues.count(); + public int docValueCount() { + return geoPointValues.docValueCount(); } @Override - public void setDocument(int docId) { - geoPointValues.setDocument(docId); + public boolean advanceExact(int docId) throws IOException { + return geoPointValues.advanceExact(docId); } @Override - public double valueAt(int index) { - GeoPoint other = geoPointValues.valueAt(index); + public double nextValue() throws IOException { + GeoPoint other = geoPointValues.nextValue(); return Math.max(0.0d, distFunction.calculate(origin.lat(), origin.lon(), other.lat(), other.lon(), DistanceUnit.METERS) - offset); } @@ -367,15 +367,14 @@ public abstract class DecayFunctionBuilder } @Override - protected String getDistanceString(LeafReaderContext ctx, int docId) { + protected String getDistanceString(LeafReaderContext ctx, int docId) throws IOException { StringBuilder values = new StringBuilder(mode.name()); values.append(" of: ["); final MultiGeoPointValues geoPointValues = fieldData.load(ctx).getGeoPointValues(); - geoPointValues.setDocument(docId); - final int num = geoPointValues.count(); - if (num > 0) { + if (geoPointValues.advanceExact(docId)) { + final int num = geoPointValues.docValueCount(); for (int i = 0; i < num; i++) { - GeoPoint value = geoPointValues.valueAt(i); + GeoPoint value = geoPointValues.nextValue(); values.append("Math.max(arcDistance("); values.append(value).append("(=doc value),"); values.append(origin).append("(=origin)) - ").append(offset).append("(=offset), 0)"); @@ -430,33 +429,32 @@ public abstract class DecayFunctionBuilder final SortedNumericDoubleValues doubleValues = fieldData.load(context).getDoubleValues(); return mode.select(new MultiValueMode.UnsortedNumericDoubleValues() { @Override - public int count() { - return doubleValues.count(); + public int docValueCount() { + return doubleValues.docValueCount(); } @Override - public void setDocument(int docId) { - doubleValues.setDocument(docId); + public boolean advanceExact(int doc) throws IOException { + return doubleValues.advanceExact(doc); } @Override - public double valueAt(int index) { - return Math.max(0.0d, Math.abs(doubleValues.valueAt(index) - origin) - offset); + public double nextValue() throws IOException { + return Math.max(0.0d, Math.abs(doubleValues.nextValue() - origin) - offset); } }, 0.0); } @Override - protected String getDistanceString(LeafReaderContext ctx, int docId) { + protected String getDistanceString(LeafReaderContext ctx, int docId) throws IOException { StringBuilder values = new StringBuilder(mode.name()); values.append("["); final SortedNumericDoubleValues doubleValues = fieldData.load(ctx).getDoubleValues(); - doubleValues.setDocument(docId); - final int num = doubleValues.count(); - if (num > 0) { + if (doubleValues.advanceExact(docId)) { + final int num = doubleValues.docValueCount(); for (int i = 0; i < num; i++) { - double value = doubleValues.valueAt(i); + double value = doubleValues.nextValue(); values.append("Math.max(Math.abs("); values.append(value).append("(=doc value) - "); values.append(origin).append("(=origin))) - "); @@ -531,21 +529,28 @@ public abstract class DecayFunctionBuilder return new LeafScoreFunction() { @Override - public double score(int docId, float subQueryScore) { - return func.evaluate(distance.get(docId), scale); + public double score(int docId, float subQueryScore) throws IOException { + if (distance.advanceExact(docId)) { + return func.evaluate(distance.doubleValue(), scale); + } else { + return 0; + } } @Override public Explanation explainScore(int docId, Explanation subQueryScore) throws IOException { + if (distance.advanceExact(docId) == false) { + return Explanation.noMatch("No value for the distance"); + } return Explanation.match( CombineFunction.toFloat(score(docId, subQueryScore.getValue())), "Function for field " + getFieldName() + ":", - func.explainFunction(getDistanceString(ctx, docId), distance.get(docId), scale)); + func.explainFunction(getDistanceString(ctx, docId), distance.doubleValue(), scale)); } }; } - protected abstract String getDistanceString(LeafReaderContext ctx, int docId); + protected abstract String getDistanceString(LeafReaderContext ctx, int docId) throws IOException; protected abstract String getFieldName(); diff --git a/core/src/main/java/org/elasticsearch/index/search/ESToParentBlockJoinQuery.java b/core/src/main/java/org/elasticsearch/index/search/ESToParentBlockJoinQuery.java index 1ee427599ca..3762b1fffc0 100644 --- a/core/src/main/java/org/elasticsearch/index/search/ESToParentBlockJoinQuery.java +++ b/core/src/main/java/org/elasticsearch/index/search/ESToParentBlockJoinQuery.java @@ -75,8 +75,8 @@ public final class ESToParentBlockJoinQuery extends Query { } @Override - public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { - return query.createWeight(searcher, needsScores); + public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException { + return query.createWeight(searcher, needsScores, boost); } @Override diff --git a/core/src/main/java/org/elasticsearch/index/search/MatchQuery.java b/core/src/main/java/org/elasticsearch/index/search/MatchQuery.java index f3ea9447db7..c4630f338cb 100644 --- a/core/src/main/java/org/elasticsearch/index/search/MatchQuery.java +++ b/core/src/main/java/org/elasticsearch/index/search/MatchQuery.java @@ -434,7 +434,7 @@ public class MatchQuery { private Query boolToExtendedCommonTermsQuery(BooleanQuery bq, Occur highFreqOccur, Occur lowFreqOccur, float maxTermFrequency, MappedFieldType fieldType) { ExtendedCommonTermsQuery query = new ExtendedCommonTermsQuery(highFreqOccur, lowFreqOccur, maxTermFrequency, - bq.isCoordDisabled(), fieldType); + fieldType); for (BooleanClause clause : bq.clauses()) { if (!(clause.getQuery() instanceof TermQuery)) { return bq; diff --git a/core/src/main/java/org/elasticsearch/index/search/MultiMatchQuery.java b/core/src/main/java/org/elasticsearch/index/search/MultiMatchQuery.java index 15e8561c8fd..7c1f91d3587 100644 --- a/core/src/main/java/org/elasticsearch/index/search/MultiMatchQuery.java +++ b/core/src/main/java/org/elasticsearch/index/search/MultiMatchQuery.java @@ -305,9 +305,9 @@ public class MultiMatchQuery extends MatchQuery { terms = Arrays.copyOf(terms, i); blendedBoost = Arrays.copyOf(blendedBoost, i); if (commonTermsCutoff != null) { - queries.add(BlendedTermQuery.commonTermsBlendedQuery(terms, blendedBoost, false, commonTermsCutoff)); + queries.add(BlendedTermQuery.commonTermsBlendedQuery(terms, blendedBoost, commonTermsCutoff)); } else if (tieBreaker == 1.0f) { - queries.add(BlendedTermQuery.booleanBlendedQuery(terms, blendedBoost, false)); + queries.add(BlendedTermQuery.booleanBlendedQuery(terms, blendedBoost)); } else { queries.add(BlendedTermQuery.dismaxBlendedQuery(terms, blendedBoost, tieBreaker)); } @@ -319,7 +319,6 @@ public class MultiMatchQuery extends MatchQuery { // however their score contribution will be different // TODO: can we improve this? BooleanQuery.Builder bq = new BooleanQuery.Builder(); - bq.setDisableCoord(true); for (Query query : queries) { bq.add(query, Occur.SHOULD); } diff --git a/core/src/main/java/org/elasticsearch/index/shard/FilterDocValuesProducer.java b/core/src/main/java/org/elasticsearch/index/shard/FilterDocValuesProducer.java deleted file mode 100644 index 149915458ee..00000000000 --- a/core/src/main/java/org/elasticsearch/index/shard/FilterDocValuesProducer.java +++ /dev/null @@ -1,157 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.shard; - -import org.apache.lucene.codecs.DocValuesProducer; -import org.apache.lucene.index.BinaryDocValues; -import org.apache.lucene.index.FieldInfo; -import org.apache.lucene.index.NumericDocValues; -import org.apache.lucene.index.SortedDocValues; -import org.apache.lucene.index.SortedNumericDocValues; -import org.apache.lucene.index.SortedSetDocValues; -import org.apache.lucene.util.Accountable; -import org.apache.lucene.util.Bits; - -import java.io.IOException; -import java.util.Collection; -import java.util.Collections; - -// TODO: move this to lucene's FilterCodecReader - -/** - * Base class for filtering DocValuesProducer implementations. - *

- * NOTE: just like with DocValuesProducer, the default {@link #getMergeInstance()} - * is unoptimized. overriding this method when possible can improve performance. - */ -class FilterDocValuesProducer extends DocValuesProducer { - /** The underlying Producer instance. */ - protected final DocValuesProducer in; - - /** - * Creates a new FilterDocValuesProducer - * @param in the underlying producer. - */ - FilterDocValuesProducer(DocValuesProducer in) { - this.in = in; - } - - @Override - public void close() throws IOException { - in.close(); - } - - @Override - public long ramBytesUsed() { - return in.ramBytesUsed(); - } - - @Override - public Collection getChildResources() { - return in.getChildResources(); - } - - @Override - public NumericDocValues getNumeric(FieldInfo field) throws IOException { - return in.getNumeric(field); - } - - @Override - public BinaryDocValues getBinary(FieldInfo field) throws IOException { - return in.getBinary(field); - } - - @Override - public SortedDocValues getSorted(FieldInfo field) throws IOException { - return in.getSorted(field); - } - - @Override - public SortedNumericDocValues getSortedNumeric(FieldInfo field) throws IOException { - return in.getSortedNumeric(field); - } - - @Override - public SortedSetDocValues getSortedSet(FieldInfo field) throws IOException { - return in.getSortedSet(field); - } - - @Override - public Bits getDocsWithField(FieldInfo field) throws IOException { - return in.getDocsWithField(field); - } - - @Override - public void checkIntegrity() throws IOException { - in.checkIntegrity(); - } - - // TODO: move this out somewhere else (and can fix all these null producers in lucene?) - // we shouldn't need nullness for any reason. - - public static final DocValuesProducer EMPTY = new DocValuesProducer() { - - @Override - public void close() throws IOException {} - - @Override - public long ramBytesUsed() { - return 0; - } - - @Override - public Collection getChildResources() { - return Collections.emptyList(); - } - - @Override - public NumericDocValues getNumeric(FieldInfo field) throws IOException { - throw new IllegalStateException(); // we don't have any docvalues - } - - @Override - public BinaryDocValues getBinary(FieldInfo field) throws IOException { - throw new IllegalStateException(); // we don't have any docvalues - } - - @Override - public SortedDocValues getSorted(FieldInfo field) throws IOException { - throw new IllegalStateException(); // we don't have any docvalues - } - - @Override - public SortedNumericDocValues getSortedNumeric(FieldInfo field) throws IOException { - throw new IllegalStateException(); // we don't have any docvalues - } - - @Override - public SortedSetDocValues getSortedSet(FieldInfo field) throws IOException { - throw new IllegalStateException(); // we don't have any docvalues - } - - @Override - public Bits getDocsWithField(FieldInfo field) throws IOException { - throw new IllegalStateException(); // we don't have any docvalues - } - - @Override - public void checkIntegrity() throws IOException {} - }; -} diff --git a/core/src/main/java/org/elasticsearch/index/shard/IndexSearcherWrapper.java b/core/src/main/java/org/elasticsearch/index/shard/IndexSearcherWrapper.java index 5603001a293..a2e738128e3 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/IndexSearcherWrapper.java +++ b/core/src/main/java/org/elasticsearch/index/shard/IndexSearcherWrapper.java @@ -40,8 +40,9 @@ public class IndexSearcherWrapper { * Wraps the given {@link DirectoryReader}. The wrapped reader can filter out document just like delete documents etc. but * must not change any term or document content. *

- * NOTE: The wrapper has a per-request lifecycle, must delegate {@link IndexReader#getCoreCacheKey()} and must be an instance - * of {@link FilterDirectoryReader} that eventually exposes the original reader via {@link FilterDirectoryReader#getDelegate()}. + * NOTE: The wrapper has a per-request lifecycle, must delegate {@link IndexReader#getReaderCacheHelper()}, + * {@link LeafReader#getCoreCacheHelper()} and must be an instance of {@link FilterDirectoryReader} that + * eventually exposes the original reader via {@link FilterDirectoryReader#getDelegate()}. * The returned reader is closed once it goes out of scope. *

* @param reader The provided directory reader to be wrapped to add custom functionality @@ -74,7 +75,7 @@ public class IndexSearcherWrapper { NonClosingReaderWrapper nonClosingReaderWrapper = new NonClosingReaderWrapper(engineSearcher.getDirectoryReader()); DirectoryReader reader = wrap(nonClosingReaderWrapper); if (reader != nonClosingReaderWrapper) { - if (reader.getCoreCacheKey() != elasticsearchDirectoryReader.getCoreCacheKey()) { + if (reader.getReaderCacheHelper() != elasticsearchDirectoryReader.getReaderCacheHelper()) { throw new IllegalStateException("wrapped directory reader doesn't delegate IndexReader#getCoreCacheKey, wrappers must override this method and delegate" + " to the original readers core cache key. Wrapped readers can't be used as cache keys since their are used only per request which would lead to subtle bugs"); } @@ -136,9 +137,10 @@ public class IndexSearcherWrapper { } @Override - public Object getCoreCacheKey() { - return in.getCoreCacheKey(); + public CacheHelper getReaderCacheHelper() { + return in.getReaderCacheHelper(); } + } } diff --git a/core/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java b/core/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java index e8203af8523..2f3abf7a61b 100644 --- a/core/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java +++ b/core/src/main/java/org/elasticsearch/index/similarity/SimilarityService.java @@ -23,6 +23,8 @@ import org.apache.lucene.search.similarities.PerFieldSimilarityWrapper; import org.apache.lucene.search.similarities.Similarity; import org.elasticsearch.Version; import org.elasticsearch.common.TriFunction; +import org.elasticsearch.common.logging.DeprecationLogger; +import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.AbstractIndexComponent; import org.elasticsearch.index.IndexModule; @@ -36,9 +38,9 @@ import java.util.Map; public final class SimilarityService extends AbstractIndexComponent { + private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(SimilarityService.class)); public static final String DEFAULT_SIMILARITY = "BM25"; private final Similarity defaultSimilarity; - private final Similarity baseSimilarity; private final Map similarities; private static final Map> DEFAULTS; public static final Map> BUILT_IN; @@ -95,14 +97,14 @@ public final class SimilarityService extends AbstractIndexComponent { this.similarities = providers; defaultSimilarity = (providers.get("default") != null) ? providers.get("default").get() : providers.get(SimilarityService.DEFAULT_SIMILARITY).get(); - // Expert users can configure the base type as being different to default, but out-of-box we use default. - baseSimilarity = (providers.get("base") != null) ? providers.get("base").get() : - defaultSimilarity; + if (providers.get("base") != null) { + DEPRECATION_LOGGER.deprecated("The [base] similarity is ignored since query normalization and coords have been removed"); + } } public Similarity similarity(MapperService mapperService) { // TODO we can maybe factor out MapperService here entirely by introducing an interface for the lookup? - return (mapperService != null) ? new PerFieldSimilarity(defaultSimilarity, baseSimilarity, mapperService) : + return (mapperService != null) ? new PerFieldSimilarity(defaultSimilarity, mapperService) : defaultSimilarity; } @@ -134,8 +136,8 @@ public final class SimilarityService extends AbstractIndexComponent { private final Similarity defaultSimilarity; private final MapperService mapperService; - PerFieldSimilarity(Similarity defaultSimilarity, Similarity baseSimilarity, MapperService mapperService) { - super(baseSimilarity); + PerFieldSimilarity(Similarity defaultSimilarity, MapperService mapperService) { + super(); this.defaultSimilarity = defaultSimilarity; this.mapperService = mapperService; } diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesQueryCache.java b/core/src/main/java/org/elasticsearch/indices/IndicesQueryCache.java index f33818ca32b..3fda3d3f806 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndicesQueryCache.java +++ b/core/src/main/java/org/elasticsearch/indices/IndicesQueryCache.java @@ -139,16 +139,6 @@ public class IndicesQueryCache extends AbstractComponent implements QueryCache, return in.explain(context, doc); } - @Override - public float getValueForNormalization() throws IOException { - return in.getValueForNormalization(); - } - - @Override - public void normalize(float norm, float topLevelBoost) { - in.normalize(norm, topLevelBoost); - } - @Override public Scorer scorer(LeafReaderContext context) throws IOException { shardKeyMap.add(context.reader()); diff --git a/core/src/main/java/org/elasticsearch/indices/IndicesRequestCache.java b/core/src/main/java/org/elasticsearch/indices/IndicesRequestCache.java index 48560f102d2..bd6d75ea3d6 100644 --- a/core/src/main/java/org/elasticsearch/indices/IndicesRequestCache.java +++ b/core/src/main/java/org/elasticsearch/indices/IndicesRequestCache.java @@ -245,7 +245,7 @@ public final class IndicesRequestCache extends AbstractComponent implements Remo } } - private class CleanupKey implements IndexReader.ReaderClosedListener { + private class CleanupKey implements IndexReader.ClosedListener { final CacheEntity entity; final long readerVersion; // use the reader version to now keep a reference to a "short" lived reader until its reaped @@ -255,7 +255,7 @@ public final class IndicesRequestCache extends AbstractComponent implements Remo } @Override - public void onClose(IndexReader reader) { + public void onClose(IndexReader.CacheKey cacheKey) { Boolean remove = registeredClosedListeners.remove(this); if (remove != null) { keysToClean.add(this); @@ -265,6 +265,9 @@ public final class IndicesRequestCache extends AbstractComponent implements Remo @Override public boolean equals(Object o) { if (this == o) return true; + if (o == null || getClass() != o.getClass()) { + return false; + } CleanupKey that = (CleanupKey) o; if (readerVersion != that.readerVersion) return false; if (!entity.getCacheIdentity().equals(that.entity.getCacheIdentity())) return false; diff --git a/core/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java b/core/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java index 860bdeb8ff6..fcd925c2585 100644 --- a/core/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java +++ b/core/src/main/java/org/elasticsearch/indices/fielddata/cache/IndicesFieldDataCache.java @@ -23,7 +23,7 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.SegmentReader; +import org.apache.lucene.index.IndexReader.CacheKey; import org.apache.lucene.util.Accountable; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.cache.Cache; @@ -44,6 +44,7 @@ import org.elasticsearch.index.fielddata.IndexFieldDataCache; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardUtils; +import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.function.ToLongBiFunction; @@ -107,7 +108,7 @@ public class IndicesFieldDataCache extends AbstractComponent implements RemovalL /** * A specific cache instance for the relevant parameters of it (index, fieldNames, fieldType). */ - static class IndexFieldCache implements IndexFieldDataCache, SegmentReader.CoreClosedListener, IndexReader.ReaderClosedListener { + static class IndexFieldCache implements IndexFieldDataCache, IndexReader.ClosedListener { private final Logger logger; final Index index; final String fieldName; @@ -125,10 +126,14 @@ public class IndicesFieldDataCache extends AbstractComponent implements RemovalL @Override public > FD load(final LeafReaderContext context, final IFD indexFieldData) throws Exception { final ShardId shardId = ShardUtils.extractShardId(context.reader()); - final Key key = new Key(this, context.reader().getCoreCacheKey(), shardId); + final IndexReader.CacheHelper cacheHelper = context.reader().getCoreCacheHelper(); + if (cacheHelper == null) { + throw new IllegalArgumentException("Reader " + context.reader() + " does not support caching"); + } + final Key key = new Key(this, cacheHelper.getKey(), shardId); //noinspection unchecked final Accountable accountable = cache.computeIfAbsent(key, k -> { - context.reader().addCoreClosedListener(IndexFieldCache.this); + cacheHelper.addClosedListener(IndexFieldCache.this); for (Listener listener : this.listeners) { k.listeners.add(listener); } @@ -149,7 +154,11 @@ public class IndicesFieldDataCache extends AbstractComponent implements RemovalL @Override public > IFD load(final DirectoryReader indexReader, final IFD indexFieldData) throws Exception { final ShardId shardId = ShardUtils.extractShardId(indexReader); - final Key key = new Key(this, indexReader.getCoreCacheKey(), shardId); + final IndexReader.CacheHelper cacheHelper = indexReader.getReaderCacheHelper(); + if (cacheHelper == null) { + throw new IllegalArgumentException("Reader " + indexReader + " does not support caching"); + } + final Key key = new Key(this, cacheHelper.getKey(), shardId); //noinspection unchecked final Accountable accountable = cache.computeIfAbsent(key, k -> { ElasticsearchDirectoryReader.addReaderCloseListener(indexReader, IndexFieldCache.this); @@ -171,14 +180,8 @@ public class IndicesFieldDataCache extends AbstractComponent implements RemovalL } @Override - public void onClose(Object coreKey) { - cache.invalidate(new Key(this, coreKey, null)); - // don't call cache.cleanUp here as it would have bad performance implications - } - - @Override - public void onClose(IndexReader reader) { - cache.invalidate(new Key(this, reader.getCoreCacheKey(), null)); + public void onClose(CacheKey key) throws IOException { + cache.invalidate(new Key(this, key, null)); // don't call cache.cleanUp here as it would have bad performance implications } @@ -211,12 +214,12 @@ public class IndicesFieldDataCache extends AbstractComponent implements RemovalL public static class Key { public final IndexFieldCache indexCache; - public final Object readerKey; + public final IndexReader.CacheKey readerKey; public final ShardId shardId; public final List listeners = new ArrayList<>(); - Key(IndexFieldCache indexCache, Object readerKey, @Nullable ShardId shardId) { + Key(IndexFieldCache indexCache, IndexReader.CacheKey readerKey, @Nullable ShardId shardId) { this.indexCache = indexCache; this.readerKey = readerKey; this.shardId = shardId; diff --git a/core/src/main/java/org/elasticsearch/search/MultiValueMode.java b/core/src/main/java/org/elasticsearch/search/MultiValueMode.java index 90e3417ad1f..2d6fd8a2b60 100644 --- a/core/src/main/java/org/elasticsearch/search/MultiValueMode.java +++ b/core/src/main/java/org/elasticsearch/search/MultiValueMode.java @@ -23,17 +23,19 @@ package org.elasticsearch.search; import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.DocValues; import org.apache.lucene.index.NumericDocValues; -import org.apache.lucene.index.RandomAccessOrds; import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.util.BitSet; -import org.apache.lucene.util.Bits; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.index.fielddata.AbstractBinaryDocValues; +import org.elasticsearch.index.fielddata.AbstractNumericDocValues; +import org.elasticsearch.index.fielddata.AbstractSortedDocValues; import org.elasticsearch.index.fielddata.FieldData; import org.elasticsearch.index.fielddata.NumericDoubleValues; import org.elasticsearch.index.fielddata.SortedBinaryDocValues; @@ -52,86 +54,65 @@ public enum MultiValueMode implements Writeable { */ SUM { @Override - protected long pick(SortedNumericDocValues values, long missingValue, int doc) { - values.setDocument(doc); - final int count = values.count(); - if (count > 0) { - long total = 0; - for (int index = 0; index < count; ++index) { - total += values.valueAt(index); - } - return total; - } else { - return missingValue; + protected long pick(SortedNumericDocValues values) throws IOException { + final int count = values.docValueCount(); + long total = 0; + for (int index = 0; index < count; ++index) { + total += values.nextValue(); } + return total; } @Override - protected long pick(SortedNumericDocValues values, long missingValue, DocIdSetIterator docItr, int startDoc, int endDoc) { - try { - int totalCount = 0; - long totalValue = 0; - for (int doc = startDoc; doc < endDoc; doc = docItr.nextDoc()) { - values.setDocument(doc); - final int count = values.count(); + protected long pick(SortedNumericDocValues values, long missingValue, DocIdSetIterator docItr, int startDoc, int endDoc) throws IOException { + int totalCount = 0; + long totalValue = 0; + for (int doc = startDoc; doc < endDoc; doc = docItr.nextDoc()) { + if (values.advanceExact(doc)) { + final int count = values.docValueCount(); for (int index = 0; index < count; ++index) { - totalValue += values.valueAt(index); + totalValue += values.nextValue(); } totalCount += count; } - return totalCount > 0 ? totalValue : missingValue; - } catch (IOException ioException) { - throw new RuntimeException(ioException); } + return totalCount > 0 ? totalValue : missingValue; } @Override - protected double pick(SortedNumericDoubleValues values, double missingValue, int doc) { - values.setDocument(doc); - final int count = values.count(); - if (count > 0) { - double total = 0; - for (int index = 0; index < count; ++index) { - total += values.valueAt(index); - } - return total; - } else { - return missingValue; + protected double pick(SortedNumericDoubleValues values) throws IOException { + final int count = values.docValueCount(); + double total = 0; + for (int index = 0; index < count; ++index) { + total += values.nextValue(); } + return total; } @Override - protected double pick(SortedNumericDoubleValues values, double missingValue, DocIdSetIterator docItr, int startDoc, int endDoc) { - try { - int totalCount = 0; - double totalValue = 0; - for (int doc = startDoc; doc < endDoc; doc = docItr.nextDoc()) { - values.setDocument(doc); - final int count = values.count(); + protected double pick(SortedNumericDoubleValues values, double missingValue, DocIdSetIterator docItr, int startDoc, int endDoc) throws IOException { + int totalCount = 0; + double totalValue = 0; + for (int doc = startDoc; doc < endDoc; doc = docItr.nextDoc()) { + if (values.advanceExact(doc)) { + final int count = values.docValueCount(); for (int index = 0; index < count; ++index) { - totalValue += values.valueAt(index); + totalValue += values.nextValue(); } totalCount += count; } - return totalCount > 0 ? totalValue : missingValue; - } catch (IOException ioException) { - throw new RuntimeException(ioException); } + return totalCount > 0 ? totalValue : missingValue; } @Override - protected double pick(UnsortedNumericDoubleValues values, double missingValue, int doc) { - values.setDocument(doc); - final int count = values.count(); - if (count > 0) { - double total = 0; - for (int index = 0; index < count; ++index) { - total += values.valueAt(index); - } - return total; - } else { - return missingValue; + protected double pick(UnsortedNumericDoubleValues values) throws IOException { + final int count = values.docValueCount(); + double total = 0; + for (int index = 0; index < count; ++index) { + total += values.nextValue(); } + return total; } }, @@ -140,92 +121,71 @@ public enum MultiValueMode implements Writeable { */ AVG { @Override - protected long pick(SortedNumericDocValues values, long missingValue, int doc) { - values.setDocument(doc); - final int count = values.count(); - if (count > 0) { - long total = 0; - for (int index = 0; index < count; ++index) { - total += values.valueAt(index); - } - return count > 1 ? Math.round((double)total/(double)count) : total; - } else { - return missingValue; + protected long pick(SortedNumericDocValues values) throws IOException { + final int count = values.docValueCount(); + long total = 0; + for (int index = 0; index < count; ++index) { + total += values.nextValue(); } + return count > 1 ? Math.round((double)total/(double)count) : total; } @Override - protected long pick(SortedNumericDocValues values, long missingValue, DocIdSetIterator docItr, int startDoc, int endDoc) { - try { - int totalCount = 0; - long totalValue = 0; - for (int doc = startDoc; doc < endDoc; doc = docItr.nextDoc()) { - values.setDocument(doc); - final int count = values.count(); + protected long pick(SortedNumericDocValues values, long missingValue, DocIdSetIterator docItr, int startDoc, int endDoc) throws IOException { + int totalCount = 0; + long totalValue = 0; + for (int doc = startDoc; doc < endDoc; doc = docItr.nextDoc()) { + if (values.advanceExact(doc)) { + final int count = values.docValueCount(); for (int index = 0; index < count; ++index) { - totalValue += values.valueAt(index); + totalValue += values.nextValue(); } totalCount += count; } - if (totalCount < 1) { - return missingValue; - } - return totalCount > 1 ? Math.round((double)totalValue/(double)totalCount) : totalValue; - } catch (IOException ioException) { - throw new RuntimeException(ioException); } - } - - @Override - protected double pick(SortedNumericDoubleValues values, double missingValue, int doc) { - values.setDocument(doc); - final int count = values.count(); - if (count > 0) { - double total = 0; - for (int index = 0; index < count; ++index) { - total += values.valueAt(index); - } - return total/count; - } else { + if (totalCount < 1) { return missingValue; } + return totalCount > 1 ? Math.round((double)totalValue/(double)totalCount) : totalValue; } @Override - protected double pick(SortedNumericDoubleValues values, double missingValue, DocIdSetIterator docItr, int startDoc, int endDoc) { - try { - int totalCount = 0; - double totalValue = 0; - for (int doc = startDoc; doc < endDoc; doc = docItr.nextDoc()) { - values.setDocument(doc); - final int count = values.count(); + protected double pick(SortedNumericDoubleValues values) throws IOException { + final int count = values.docValueCount(); + double total = 0; + for (int index = 0; index < count; ++index) { + total += values.nextValue(); + } + return total/count; + } + + @Override + protected double pick(SortedNumericDoubleValues values, double missingValue, DocIdSetIterator docItr, int startDoc, int endDoc) throws IOException { + int totalCount = 0; + double totalValue = 0; + for (int doc = startDoc; doc < endDoc; doc = docItr.nextDoc()) { + if (values.advanceExact(doc)) { + final int count = values.docValueCount(); for (int index = 0; index < count; ++index) { - totalValue += values.valueAt(index); + totalValue += values.nextValue(); } totalCount += count; } - if (totalCount < 1) { - return missingValue; - } - return totalValue/totalCount; - } catch (IOException ioException) { - throw new RuntimeException(ioException); } + if (totalCount < 1) { + return missingValue; + } + return totalValue/totalCount; } @Override - protected double pick(UnsortedNumericDoubleValues values, double missingValue, int doc) { - values.setDocument(doc); - final int count = values.count(); - if (count > 0) { - double total = 0; - for (int index = 0; index < count; ++index) { - total += values.valueAt(index); - } - return total/count; - } else { - return missingValue; + protected double pick(UnsortedNumericDoubleValues values) throws IOException { + final int count = values.docValueCount(); + double total = 0; + for (int index = 0; index < count; ++index) { + total += values.nextValue(); } + return total/count; } }, @@ -234,36 +194,28 @@ public enum MultiValueMode implements Writeable { */ MEDIAN { @Override - protected long pick(SortedNumericDocValues values, long missingValue, int doc) { - values.setDocument(doc); - int count = values.count(); - if (count > 0) { - if (count % 2 == 0) { - count /= 2; - return Math.round((values.valueAt(count - 1) + values.valueAt(count))/2.0); - } else { - count /= 2; - return values.valueAt(count); - } + protected long pick(SortedNumericDocValues values) throws IOException { + int count = values.docValueCount(); + for (int i = 0; i < (count - 1) / 2; ++i) { + values.nextValue(); + } + if (count % 2 == 0) { + return Math.round(((double) values.nextValue() + values.nextValue()) / 2); } else { - return missingValue; + return values.nextValue(); } } @Override - protected double pick(SortedNumericDoubleValues values, double missingValue, int doc) { - values.setDocument(doc); - int count = values.count(); - if (count > 0) { - if (count % 2 == 0) { - count /= 2; - return (values.valueAt(count - 1) + values.valueAt(count))/2.0; - } else { - count /= 2; - return values.valueAt(count); - } + protected double pick(SortedNumericDoubleValues values) throws IOException { + int count = values.docValueCount(); + for (int i = 0; i < (count - 1) / 2; ++i) { + values.nextValue(); + } + if (count % 2 == 0) { + return (values.nextValue() + values.nextValue()) / 2; } else { - return missingValue; + return values.nextValue(); } } }, @@ -273,119 +225,93 @@ public enum MultiValueMode implements Writeable { */ MIN { @Override - protected long pick(SortedNumericDocValues values, long missingValue, int doc) { - values.setDocument(doc); - final int count = values.count(); - return count > 0 ? values.valueAt(0) : missingValue; + protected long pick(SortedNumericDocValues values) throws IOException { + return values.nextValue(); } @Override - protected long pick(SortedNumericDocValues values, long missingValue, DocIdSetIterator docItr, int startDoc, int endDoc) { - try { - int totalCount = 0; - long minValue = Long.MAX_VALUE; - for (int doc = startDoc; doc < endDoc; doc = docItr.nextDoc()) { - values.setDocument(doc); - final int count = values.count(); - if (count > 0) { - minValue = Math.min(minValue, values.valueAt(0)); - } - totalCount += count; + protected long pick(SortedNumericDocValues values, long missingValue, DocIdSetIterator docItr, int startDoc, int endDoc) throws IOException { + boolean hasValue = false; + long minValue = Long.MAX_VALUE; + for (int doc = startDoc; doc < endDoc; doc = docItr.nextDoc()) { + if (values.advanceExact(doc)) { + minValue = Math.min(minValue, values.nextValue()); + hasValue = true; } - return totalCount > 0 ? minValue : missingValue; - } catch (IOException ioException) { - throw new RuntimeException(ioException); } + return hasValue ? minValue : missingValue; } @Override - protected double pick(SortedNumericDoubleValues values, double missingValue, int doc) { - values.setDocument(doc); - int count = values.count(); - return count > 0 ? values.valueAt(0) : missingValue; + protected double pick(SortedNumericDoubleValues values) throws IOException { + return values.nextValue(); } @Override - protected double pick(SortedNumericDoubleValues values, double missingValue, DocIdSetIterator docItr, int startDoc, int endDoc) { - try { - int totalCount = 0; - double minValue = Double.MAX_VALUE; - for (int doc = startDoc; doc < endDoc; doc = docItr.nextDoc()) { - values.setDocument(doc); - final int count = values.count(); - if (count > 0) { - minValue = Math.min(minValue, values.valueAt(0)); - } - totalCount += count; + protected double pick(SortedNumericDoubleValues values, double missingValue, DocIdSetIterator docItr, int startDoc, int endDoc) throws IOException { + boolean hasValue = false; + double minValue = Double.POSITIVE_INFINITY; + for (int doc = startDoc; doc < endDoc; doc = docItr.nextDoc()) { + if (values.advanceExact(doc)) { + minValue = Math.min(minValue, values.nextValue()); + hasValue = true; } - return totalCount > 0 ? minValue : missingValue; - } catch (IOException ioException) { - throw new RuntimeException(ioException); } + return hasValue ? minValue : missingValue; } @Override - protected BytesRef pick(SortedBinaryDocValues values, BytesRef missingValue, int doc) { - values.setDocument(doc); - final int count = values.count(); - return count > 0 ? values.valueAt(0) : missingValue; + protected BytesRef pick(SortedBinaryDocValues values) throws IOException { + return values.nextValue(); } @Override - protected BytesRef pick(BinaryDocValues values, BytesRefBuilder builder, DocIdSetIterator docItr, int startDoc, int endDoc) { - try { - BytesRefBuilder value = null; - for (int doc = startDoc; doc < endDoc; doc = docItr.nextDoc()) { - final BytesRef innerValue = values.get(doc); - if (innerValue != null) { - if (value == null) { - builder.copyBytes(innerValue); - value = builder; - } else { - final BytesRef min = value.get().compareTo(innerValue) <= 0 ? value.get() : innerValue; - if (min == innerValue) { - value.copyBytes(min); - } + protected BytesRef pick(BinaryDocValues values, BytesRefBuilder builder, DocIdSetIterator docItr, int startDoc, int endDoc) throws IOException { + BytesRefBuilder value = null; + for (int doc = startDoc; doc < endDoc; doc = docItr.nextDoc()) { + if (values.advanceExact(doc)) { + final BytesRef innerValue = values.binaryValue(); + if (value == null) { + builder.copyBytes(innerValue); + value = builder; + } else { + final BytesRef min = value.get().compareTo(innerValue) <= 0 ? value.get() : innerValue; + if (min == innerValue) { + value.copyBytes(min); } } } - return value == null ? null : value.get(); - } catch (IOException ioException) { - throw new RuntimeException(ioException); } + return value == null ? null : value.get(); } @Override - protected int pick(RandomAccessOrds values, int doc) { - values.setDocument(doc); - return values.cardinality() > 0 ? (int)values.ordAt(0) : -1; + protected int pick(SortedSetDocValues values) throws IOException { + return Math.toIntExact(values.nextOrd()); } @Override - protected int pick(SortedDocValues values, DocIdSetIterator docItr, int startDoc, int endDoc) { - try { - int ord = -1; - for (int doc = startDoc; doc < endDoc; doc = docItr.nextDoc()) { - final int innerOrd = values.getOrd(doc); - if (innerOrd != -1) { - ord = ord == -1 ? innerOrd : Math.min(ord, innerOrd); - } + protected int pick(SortedDocValues values, DocIdSetIterator docItr, int startDoc, int endDoc) throws IOException { + int ord = Integer.MAX_VALUE; + boolean hasValue = false; + for (int doc = startDoc; doc < endDoc; doc = docItr.nextDoc()) { + if (values.advanceExact(doc)) { + final int innerOrd = values.ordValue(); + ord = Math.min(ord, innerOrd); + hasValue = true; } - return ord; - } catch (IOException ioException) { - throw new RuntimeException(ioException); } + return hasValue ? ord : -1; } @Override - protected double pick(UnsortedNumericDoubleValues values, double missingValue, int doc) { - values.setDocument(doc); - int count = values.count(); - double min = Double.MAX_VALUE; + protected double pick(UnsortedNumericDoubleValues values) throws IOException { + int count = values.docValueCount(); + double min = Double.POSITIVE_INFINITY; for (int index = 0; index < count; ++index) { - min = Math.min(values.valueAt(index), min); + min = Math.min(values.nextValue(), min); } - return count > 0 ? min : missingValue; + return min; } }, @@ -394,120 +320,114 @@ public enum MultiValueMode implements Writeable { */ MAX { @Override - protected long pick(SortedNumericDocValues values, long missingValue, int doc) { - values.setDocument(doc); - final int count = values.count(); - return count > 0 ? values.valueAt(count - 1) : missingValue; - } - - @Override - protected long pick(SortedNumericDocValues values, long missingValue, DocIdSetIterator docItr, int startDoc, int endDoc) { - try { - int totalCount = 0; - long maxValue = Long.MIN_VALUE; - for (int doc = startDoc; doc < endDoc; doc = docItr.nextDoc()) { - values.setDocument(doc); - final int count = values.count(); - if (count > 0) { - maxValue = Math.max(maxValue, values.valueAt(count - 1)); - } - totalCount += count; - } - return totalCount > 0 ? maxValue : missingValue; - } catch (IOException ioException) { - throw new RuntimeException(ioException); + protected long pick(SortedNumericDocValues values) throws IOException { + final int count = values.docValueCount(); + for (int i = 0; i < count - 1; ++i) { + values.nextValue(); } + return values.nextValue(); } @Override - protected double pick(SortedNumericDoubleValues values, double missingValue, int doc) { - values.setDocument(doc); - final int count = values.count(); - return count > 0 ? values.valueAt(count - 1) : missingValue; - } - - @Override - protected double pick(SortedNumericDoubleValues values, double missingValue, DocIdSetIterator docItr, int startDoc, int endDoc) { - try { - int totalCount = 0; - double maxValue = Double.MIN_VALUE; - for (int doc = startDoc; doc < endDoc; doc = docItr.nextDoc()) { - values.setDocument(doc); - final int count = values.count(); - if (count > 0) { - maxValue = Math.max(maxValue, values.valueAt(count - 1)); + protected long pick(SortedNumericDocValues values, long missingValue, DocIdSetIterator docItr, int startDoc, int endDoc) throws IOException { + boolean hasValue = false; + long maxValue = Long.MIN_VALUE; + for (int doc = startDoc; doc < endDoc; doc = docItr.nextDoc()) { + if (values.advanceExact(doc)) { + final int count = values.docValueCount(); + for (int i = 0; i < count - 1; ++i) { + values.nextValue(); } - totalCount += count; + maxValue = Math.max(maxValue, values.nextValue()); + hasValue = true; } - return totalCount > 0 ? maxValue : missingValue; - } catch (IOException ioException) { - throw new RuntimeException(ioException); } + return hasValue ? maxValue : missingValue; } @Override - protected BytesRef pick(SortedBinaryDocValues values, BytesRef missingValue, int doc) { - values.setDocument(doc); - final int count = values.count(); - return count > 0 ? values.valueAt(count - 1) : missingValue; + protected double pick(SortedNumericDoubleValues values) throws IOException { + final int count = values.docValueCount(); + for (int i = 0; i < count - 1; ++i) { + values.nextValue(); + } + return values.nextValue(); } @Override - protected BytesRef pick(BinaryDocValues values, BytesRefBuilder builder, DocIdSetIterator docItr, int startDoc, int endDoc) { - try { - BytesRefBuilder value = null; - for (int doc = startDoc; doc < endDoc; doc = docItr.nextDoc()) { - final BytesRef innerValue = values.get(doc); - if (innerValue != null) { - if (value == null) { - builder.copyBytes(innerValue); - value = builder; - } else { - final BytesRef max = value.get().compareTo(innerValue) > 0 ? value.get() : innerValue; - if (max == innerValue) { - value.copyBytes(max); - } + protected double pick(SortedNumericDoubleValues values, double missingValue, DocIdSetIterator docItr, int startDoc, int endDoc) throws IOException { + boolean hasValue = false; + double maxValue = Double.NEGATIVE_INFINITY; + for (int doc = startDoc; doc < endDoc; doc = docItr.nextDoc()) { + if (values.advanceExact(doc)) { + final int count = values.docValueCount(); + for (int i = 0; i < count - 1; ++i) { + values.nextValue(); + } + maxValue = Math.max(maxValue, values.nextValue()); + hasValue = true; + } + } + return hasValue ? maxValue : missingValue; + } + + @Override + protected BytesRef pick(SortedBinaryDocValues values) throws IOException { + int count = values.docValueCount(); + for (int i = 0; i < count - 1; ++i) { + values.nextValue(); + } + return values.nextValue(); + } + + @Override + protected BytesRef pick(BinaryDocValues values, BytesRefBuilder builder, DocIdSetIterator docItr, int startDoc, int endDoc) throws IOException { + BytesRefBuilder value = null; + for (int doc = startDoc; doc < endDoc; doc = docItr.nextDoc()) { + if (values.advanceExact(doc)) { + final BytesRef innerValue = values.binaryValue(); + if (value == null) { + builder.copyBytes(innerValue); + value = builder; + } else { + final BytesRef max = value.get().compareTo(innerValue) > 0 ? value.get() : innerValue; + if (max == innerValue) { + value.copyBytes(max); } } } - return value == null ? null : value.get(); - } catch (IOException ioException) { - throw new RuntimeException(ioException); } + return value == null ? null : value.get(); } @Override - protected int pick(RandomAccessOrds values, int doc) { - values.setDocument(doc); - final int count = values.cardinality(); - return count > 0 ? (int)values.ordAt(count - 1) : -1; + protected int pick(SortedSetDocValues values) throws IOException { + long maxOrd = -1; + for (long ord = values.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = values.nextOrd()) { + maxOrd = ord; + } + return Math.toIntExact(maxOrd); } @Override - protected int pick(SortedDocValues values, DocIdSetIterator docItr, int startDoc, int endDoc) { - try { - int ord = -1; - for (int doc = startDoc; doc < endDoc; doc = docItr.nextDoc()) { - final int innerOrd = values.getOrd(doc); - if (innerOrd != -1) { - ord = Math.max(ord, innerOrd); - } + protected int pick(SortedDocValues values, DocIdSetIterator docItr, int startDoc, int endDoc) throws IOException { + int ord = -1; + for (int doc = startDoc; doc < endDoc; doc = docItr.nextDoc()) { + if (values.advanceExact(doc)) { + ord = Math.max(ord, values.ordValue()); } - return ord; - } catch (IOException ioException) { - throw new RuntimeException(ioException); } + return ord; } @Override - protected double pick(UnsortedNumericDoubleValues values, double missingValue, int doc) { - values.setDocument(doc); - int count = values.count(); - double max = Double.MIN_VALUE; + protected double pick(UnsortedNumericDoubleValues values) throws IOException { + int count = values.docValueCount(); + double max = Double.NEGATIVE_INFINITY; for (int index = 0; index < count; ++index) { - max = Math.max(values.valueAt(index), max); + max = Math.max(values.nextValue(), max); } - return count > 0 ? max : missingValue; + return max; } }; @@ -534,32 +454,51 @@ public enum MultiValueMode implements Writeable { public NumericDocValues select(final SortedNumericDocValues values, final long missingValue) { final NumericDocValues singleton = DocValues.unwrapSingleton(values); if (singleton != null) { - final Bits docsWithField = DocValues.unwrapSingletonBits(values); - if (docsWithField == null || missingValue == 0) { - return singleton; - } else { - return new NumericDocValues() { - @Override - public long get(int docID) { - final long value = singleton.get(docID); - if (value == 0 && docsWithField.get(docID) == false) { - return missingValue; - } - return value; - } - }; - } - } else { - return new NumericDocValues() { + return new AbstractNumericDocValues() { + + private boolean hasValue; + @Override - public long get(int docID) { - return pick(values, missingValue, docID); + public boolean advanceExact(int target) throws IOException { + hasValue = singleton.advanceExact(target); + return true; + } + + @Override + public int docID() { + return singleton.docID(); + } + + @Override + public long longValue() throws IOException { + return hasValue ? singleton.longValue() : missingValue; + } + }; + } else { + return new AbstractNumericDocValues() { + + private boolean hasValue; + + @Override + public boolean advanceExact(int target) throws IOException { + hasValue = values.advanceExact(target); + return true; + } + + @Override + public int docID() { + return values.docID(); + } + + @Override + public long longValue() throws IOException { + return hasValue ? pick(values) : missingValue; } }; } } - protected long pick(SortedNumericDocValues values, long missingValue, int doc) { + protected long pick(SortedNumericDocValues values) throws IOException { throw new IllegalArgumentException("Unsupported sort mode: " + this); } @@ -580,38 +519,47 @@ public enum MultiValueMode implements Writeable { return select(DocValues.emptySortedNumeric(maxDoc), missingValue); } - return new NumericDocValues() { + return new AbstractNumericDocValues() { - int lastSeenRootDoc = 0; + int lastSeenRootDoc = -1; long lastEmittedValue = missingValue; @Override - public long get(int rootDoc) { + public boolean advanceExact(int rootDoc) throws IOException { assert rootDocs.get(rootDoc) : "can only sort root documents"; assert rootDoc >= lastSeenRootDoc : "can only evaluate current and upcoming root docs"; if (rootDoc == lastSeenRootDoc) { - return lastEmittedValue; + return true; + } else if (rootDoc == 0) { + lastEmittedValue = missingValue; + return true; + } + final int prevRootDoc = rootDocs.prevSetBit(rootDoc - 1); + final int firstNestedDoc; + if (innerDocs.docID() > prevRootDoc) { + firstNestedDoc = innerDocs.docID(); + } else { + firstNestedDoc = innerDocs.advance(prevRootDoc + 1); } - try { - final int prevRootDoc = rootDocs.prevSetBit(rootDoc - 1); - final int firstNestedDoc; - if (innerDocs.docID() > prevRootDoc) { - firstNestedDoc = innerDocs.docID(); - } else { - firstNestedDoc = innerDocs.advance(prevRootDoc + 1); - } - lastSeenRootDoc = rootDoc; - lastEmittedValue = pick(values, missingValue, innerDocs, firstNestedDoc, rootDoc); - return lastEmittedValue; - } catch (IOException e) { - throw new RuntimeException(e); - } + lastSeenRootDoc = rootDoc; + lastEmittedValue = pick(values, missingValue, innerDocs, firstNestedDoc, rootDoc); + return true; + } + + @Override + public int docID() { + return lastSeenRootDoc; + } + + @Override + public long longValue() { + return lastEmittedValue; } }; } - protected long pick(SortedNumericDocValues values, long missingValue, DocIdSetIterator docItr, int startDoc, int endDoc) { + protected long pick(SortedNumericDocValues values, long missingValue, DocIdSetIterator docItr, int startDoc, int endDoc) throws IOException { throw new IllegalArgumentException("Unsupported sort mode: " + this); } @@ -625,32 +573,42 @@ public enum MultiValueMode implements Writeable { public NumericDoubleValues select(final SortedNumericDoubleValues values, final double missingValue) { final NumericDoubleValues singleton = FieldData.unwrapSingleton(values); if (singleton != null) { - final Bits docsWithField = FieldData.unwrapSingletonBits(values); - if (docsWithField == null || missingValue == 0) { - return singleton; - } else { - return new NumericDoubleValues() { - @Override - public double get(int docID) { - final double value = singleton.get(docID); - if (value == 0 && docsWithField.get(docID) == false) { - return missingValue; - } - return value; - } - }; - } + return new NumericDoubleValues() { + + private boolean hasValue; + + @Override + public boolean advanceExact(int doc) throws IOException { + hasValue = singleton.advanceExact(doc); + return true; + } + + @Override + public double doubleValue() throws IOException { + return hasValue ? singleton.doubleValue() : missingValue; + } + + }; } else { return new NumericDoubleValues() { + + private boolean hasValue; + @Override - public double get(int docID) { - return pick(values, missingValue, docID); + public boolean advanceExact(int target) throws IOException { + hasValue = values.advanceExact(target); + return true; + } + + @Override + public double doubleValue() throws IOException { + return hasValue ? pick(values) : missingValue; } }; } } - protected double pick(SortedNumericDoubleValues values, double missingValue, int doc) { + protected double pick(SortedNumericDoubleValues values) throws IOException { throw new IllegalArgumentException("Unsupported sort mode: " + this); } @@ -668,7 +626,7 @@ public enum MultiValueMode implements Writeable { */ public NumericDoubleValues select(final SortedNumericDoubleValues values, final double missingValue, final BitSet rootDocs, final DocIdSetIterator innerDocs, int maxDoc) throws IOException { if (rootDocs == null || innerDocs == null) { - return select(FieldData.emptySortedNumericDoubles(maxDoc), missingValue); + return select(FieldData.emptySortedNumericDoubles(), missingValue); } return new NumericDoubleValues() { @@ -677,32 +635,33 @@ public enum MultiValueMode implements Writeable { double lastEmittedValue = missingValue; @Override - public double get(int rootDoc) { + public boolean advanceExact(int rootDoc) throws IOException { assert rootDocs.get(rootDoc) : "can only sort root documents"; assert rootDoc >= lastSeenRootDoc : "can only evaluate current and upcoming root docs"; if (rootDoc == lastSeenRootDoc) { - return lastEmittedValue; + return true; + } + final int prevRootDoc = rootDocs.prevSetBit(rootDoc - 1); + final int firstNestedDoc; + if (innerDocs.docID() > prevRootDoc) { + firstNestedDoc = innerDocs.docID(); + } else { + firstNestedDoc = innerDocs.advance(prevRootDoc + 1); } - try { - final int prevRootDoc = rootDocs.prevSetBit(rootDoc - 1); - final int firstNestedDoc; - if (innerDocs.docID() > prevRootDoc) { - firstNestedDoc = innerDocs.docID(); - } else { - firstNestedDoc = innerDocs.advance(prevRootDoc + 1); - } - lastSeenRootDoc = rootDoc; - lastEmittedValue = pick(values, missingValue, innerDocs, firstNestedDoc, rootDoc); - return lastEmittedValue; - } catch (IOException e) { - throw new RuntimeException(e); - } + lastSeenRootDoc = rootDoc; + lastEmittedValue = pick(values, missingValue, innerDocs, firstNestedDoc, rootDoc); + return true; + } + + @Override + public double doubleValue() throws IOException { + return lastEmittedValue; } }; } - protected double pick(SortedNumericDoubleValues values, double missingValue, DocIdSetIterator docItr, int startDoc, int endDoc) { + protected double pick(SortedNumericDoubleValues values, double missingValue, DocIdSetIterator docItr, int startDoc, int endDoc) throws IOException { throw new IllegalArgumentException("Unsupported sort mode: " + this); } @@ -716,32 +675,49 @@ public enum MultiValueMode implements Writeable { public BinaryDocValues select(final SortedBinaryDocValues values, final BytesRef missingValue) { final BinaryDocValues singleton = FieldData.unwrapSingleton(values); if (singleton != null) { - final Bits docsWithField = FieldData.unwrapSingletonBits(values); - if (docsWithField == null) { + if (missingValue == null) { return singleton; - } else { - return new BinaryDocValues() { - @Override - public BytesRef get(int docID) { - final BytesRef value = singleton.get(docID); - if (value.length == 0 && docsWithField.get(docID) == false) { - return missingValue; - } - return value; - } - }; } - } else { - return new BinaryDocValues() { + return new AbstractBinaryDocValues() { + + private boolean hasValue; + @Override - public BytesRef get(int docID) { - return pick(values, missingValue, docID); + public boolean advanceExact(int target) throws IOException { + hasValue = singleton.advanceExact(target); + return true; + } + + @Override + public BytesRef binaryValue() throws IOException { + return hasValue ? singleton.binaryValue() : missingValue; + } + }; + } else { + return new AbstractBinaryDocValues() { + + private BytesRef value; + + @Override + public boolean advanceExact(int target) throws IOException { + if (values.advanceExact(target)) { + value = pick(values); + return true; + } else { + value = missingValue; + return missingValue != null; + } + } + + @Override + public BytesRef binaryValue() throws IOException { + return value; } }; } } - protected BytesRef pick(SortedBinaryDocValues values, BytesRef missingValue, int doc) { + protected BytesRef pick(SortedBinaryDocValues values) throws IOException { throw new IllegalArgumentException("Unsupported sort mode: " + this); } @@ -759,11 +735,11 @@ public enum MultiValueMode implements Writeable { */ public BinaryDocValues select(final SortedBinaryDocValues values, final BytesRef missingValue, final BitSet rootDocs, final DocIdSetIterator innerDocs, int maxDoc) throws IOException { if (rootDocs == null || innerDocs == null) { - return select(FieldData.emptySortedBinary(maxDoc), missingValue); + return select(FieldData.emptySortedBinary(), missingValue); } final BinaryDocValues selectedValues = select(values, null); - return new BinaryDocValues() { + return new AbstractBinaryDocValues() { final BytesRefBuilder builder = new BytesRefBuilder(); @@ -771,36 +747,37 @@ public enum MultiValueMode implements Writeable { BytesRef lastEmittedValue = missingValue; @Override - public BytesRef get(int rootDoc) { + public boolean advanceExact(int rootDoc) throws IOException { assert rootDocs.get(rootDoc) : "can only sort root documents"; assert rootDoc >= lastSeenRootDoc : "can only evaluate current and upcoming root docs"; if (rootDoc == lastSeenRootDoc) { - return lastEmittedValue; + return true; } - try { - final int prevRootDoc = rootDocs.prevSetBit(rootDoc - 1); - final int firstNestedDoc; - if (innerDocs.docID() > prevRootDoc) { - firstNestedDoc = innerDocs.docID(); - } else { - firstNestedDoc = innerDocs.advance(prevRootDoc + 1); - } - - lastSeenRootDoc = rootDoc; - lastEmittedValue = pick(selectedValues, builder, innerDocs, firstNestedDoc, rootDoc); - if (lastEmittedValue == null) { - lastEmittedValue = missingValue; - } - return lastEmittedValue; - } catch (IOException e) { - throw new RuntimeException(e); + final int prevRootDoc = rootDocs.prevSetBit(rootDoc - 1); + final int firstNestedDoc; + if (innerDocs.docID() > prevRootDoc) { + firstNestedDoc = innerDocs.docID(); + } else { + firstNestedDoc = innerDocs.advance(prevRootDoc + 1); } + + lastSeenRootDoc = rootDoc; + lastEmittedValue = pick(selectedValues, builder, innerDocs, firstNestedDoc, rootDoc); + if (lastEmittedValue == null) { + lastEmittedValue = missingValue; + } + return true; + } + + @Override + public BytesRef binaryValue() throws IOException { + return lastEmittedValue; } }; } - protected BytesRef pick(BinaryDocValues values, BytesRefBuilder builder, DocIdSetIterator docItr, int startDoc, int endDoc) { + protected BytesRef pick(BinaryDocValues values, BytesRefBuilder builder, DocIdSetIterator docItr, int startDoc, int endDoc) throws IOException { throw new IllegalArgumentException("Unsupported sort mode: " + this); } @@ -810,7 +787,7 @@ public enum MultiValueMode implements Writeable { * * Allowed Modes: MIN, MAX */ - public SortedDocValues select(final RandomAccessOrds values) { + public SortedDocValues select(final SortedSetDocValues values) { if (values.getValueCount() >= Integer.MAX_VALUE) { throw new UnsupportedOperationException("fields containing more than " + (Integer.MAX_VALUE-1) + " unique terms are unsupported"); } @@ -819,26 +796,46 @@ public enum MultiValueMode implements Writeable { if (singleton != null) { return singleton; } else { - return new SortedDocValues() { + return new AbstractSortedDocValues() { + + int ord; + @Override - public int getOrd(int docID) { - return pick(values, docID); + public boolean advanceExact(int target) throws IOException { + if (values.advanceExact(target)) { + ord = pick(values); + return true; + } else { + ord = -1; + return false; + } } @Override - public BytesRef lookupOrd(int ord) { + public int docID() { + return values.docID(); + } + + @Override + public int ordValue() { + assert ord != -1; + return ord; + } + + @Override + public BytesRef lookupOrd(int ord) throws IOException { return values.lookupOrd(ord); } @Override public int getValueCount() { - return (int)values.getValueCount(); + return (int) values.getValueCount(); } }; } } - protected int pick(RandomAccessOrds values, int doc) { + protected int pick(SortedSetDocValues values) throws IOException { throw new IllegalArgumentException("Unsupported sort mode: " + this); } @@ -853,19 +850,20 @@ public enum MultiValueMode implements Writeable { * NOTE: Calling the returned instance on docs that are not root docs is illegal * The returned instance can only be evaluate the current and upcoming docs */ - public SortedDocValues select(final RandomAccessOrds values, final BitSet rootDocs, final DocIdSetIterator innerDocs) throws IOException { + public SortedDocValues select(final SortedSetDocValues values, final BitSet rootDocs, final DocIdSetIterator innerDocs) throws IOException { if (rootDocs == null || innerDocs == null) { return select(DocValues.emptySortedSet()); } final SortedDocValues selectedValues = select(values); - return new SortedDocValues() { + return new AbstractSortedDocValues() { + int docID = -1; int lastSeenRootDoc = 0; int lastEmittedOrd = -1; @Override - public BytesRef lookupOrd(int ord) { + public BytesRef lookupOrd(int ord) throws IOException { return selectedValues.lookupOrd(ord); } @@ -875,32 +873,39 @@ public enum MultiValueMode implements Writeable { } @Override - public int getOrd(int rootDoc) { + public boolean advanceExact(int rootDoc) throws IOException { assert rootDocs.get(rootDoc) : "can only sort root documents"; assert rootDoc >= lastSeenRootDoc : "can only evaluate current and upcoming root docs"; if (rootDoc == lastSeenRootDoc) { - return lastEmittedOrd; + return lastEmittedOrd != -1; } - try { - final int prevRootDoc = rootDocs.prevSetBit(rootDoc - 1); - final int firstNestedDoc; - if (innerDocs.docID() > prevRootDoc) { - firstNestedDoc = innerDocs.docID(); - } else { - firstNestedDoc = innerDocs.advance(prevRootDoc + 1); - } - - lastSeenRootDoc = rootDoc; - return lastEmittedOrd = pick(selectedValues, innerDocs, firstNestedDoc, rootDoc); - } catch (IOException e) { - throw new RuntimeException(e); + final int prevRootDoc = rootDocs.prevSetBit(rootDoc - 1); + final int firstNestedDoc; + if (innerDocs.docID() > prevRootDoc) { + firstNestedDoc = innerDocs.docID(); + } else { + firstNestedDoc = innerDocs.advance(prevRootDoc + 1); } + + docID = lastSeenRootDoc = rootDoc; + lastEmittedOrd = pick(selectedValues, innerDocs, firstNestedDoc, rootDoc); + return lastEmittedOrd != -1; + } + + @Override + public int docID() { + return docID; + } + + @Override + public int ordValue() { + return lastEmittedOrd; } }; } - protected int pick(SortedDocValues values, DocIdSetIterator docItr, int startDoc, int endDoc) { + protected int pick(SortedDocValues values, DocIdSetIterator docItr, int startDoc, int endDoc) throws IOException { throw new IllegalArgumentException("Unsupported sort mode: " + this); } @@ -913,24 +918,32 @@ public enum MultiValueMode implements Writeable { */ public NumericDoubleValues select(final UnsortedNumericDoubleValues values, final double missingValue) { return new NumericDoubleValues() { + private boolean hasValue; + @Override - public double get(int docID) { - return pick(values, missingValue, docID); + public boolean advanceExact(int doc) throws IOException { + hasValue = values.advanceExact(doc); + return true; + } + @Override + public double doubleValue() throws IOException { + return hasValue ? pick(values) : missingValue; } }; } - protected double pick(UnsortedNumericDoubleValues values, final double missingValue, int doc) { + protected double pick(UnsortedNumericDoubleValues values) throws IOException { throw new IllegalArgumentException("Unsupported sort mode: " + this); } /** * Interface allowing custom value generators to be used in MultiValueMode. */ + // TODO: why do we need it??? public interface UnsortedNumericDoubleValues { - int count(); - void setDocument(int docId); - double valueAt(int index); + boolean advanceExact(int doc) throws IOException; + int docValueCount() throws IOException; + double nextValue() throws IOException; } @Override diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ParentToChildrenAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ParentToChildrenAggregator.java index 5aa55c771b2..37a443e9bab 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ParentToChildrenAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/children/ParentToChildrenAggregator.java @@ -57,20 +57,25 @@ public class ParentToChildrenAggregator extends SingleBucketAggregator { private final Weight parentFilter; private final ValuesSource.Bytes.WithOrdinals.ParentChild valuesSource; - // Maybe use PagedGrowableWriter? This will be less wasteful than LongArray, but then we don't have the reuse feature of BigArrays. - // Also if we know the highest possible value that a parent agg will create then we store multiple values into one slot + // Maybe use PagedGrowableWriter? This will be less wasteful than LongArray, + // but then we don't have the reuse feature of BigArrays. + // Also if we know the highest possible value that a parent agg will create + // then we store multiple values into one slot private final LongArray parentOrdToBuckets; // Only pay the extra storage price if the a parentOrd has multiple buckets - // Most of the times a parent doesn't have multiple buckets, since there is only one document per parent ord, - // only in the case of terms agg if a parent doc has multiple terms per field this is needed: + // Most of the times a parent doesn't have multiple buckets, since there is + // only one document per parent ord, + // only in the case of terms agg if a parent doc has multiple terms per + // field this is needed: private final LongObjectPagedHashMap parentOrdToOtherBuckets; private boolean multipleBucketsPerParentOrd = false; - public ParentToChildrenAggregator(String name, AggregatorFactories factories, SearchContext context, - Aggregator parent, String parentType, Query childFilter, Query parentFilter, - ValuesSource.Bytes.WithOrdinals.ParentChild valuesSource, - long maxOrd, List pipelineAggregators, Map metaData) throws IOException { + public ParentToChildrenAggregator(String name, AggregatorFactories factories, + SearchContext context, Aggregator parent, String parentType, Query childFilter, + Query parentFilter, ValuesSource.Bytes.WithOrdinals.ParentChild valuesSource, + long maxOrd, List pipelineAggregators, Map metaData) + throws IOException { super(name, factories, context, parent, pipelineAggregators, metaData); this.parentType = parentType; // these two filters are cached in the parser @@ -84,13 +89,14 @@ public class ParentToChildrenAggregator extends SingleBucketAggregator { @Override public InternalAggregation buildAggregation(long owningBucketOrdinal) throws IOException { - return new InternalChildren(name, bucketDocCount(owningBucketOrdinal), bucketAggregations(owningBucketOrdinal), pipelineAggregators(), - metaData()); + return new InternalChildren(name, bucketDocCount(owningBucketOrdinal), + bucketAggregations(owningBucketOrdinal), pipelineAggregators(), metaData()); } @Override public InternalAggregation buildEmptyAggregation() { - return new InternalChildren(name, 0, buildEmptySubAggregations(), pipelineAggregators(), metaData()); + return new InternalChildren(name, 0, buildEmptySubAggregations(), pipelineAggregators(), + metaData()); } @Override @@ -108,8 +114,8 @@ public class ParentToChildrenAggregator extends SingleBucketAggregator { @Override public void collect(int docId, long bucket) throws IOException { - if (parentDocs.get(docId)) { - long globalOrdinal = globalOrdinals.getOrd(docId); + if (parentDocs.get(docId) && globalOrdinals.advanceExact(docId)) { + long globalOrdinal = globalOrdinals.ordValue(); if (globalOrdinal != -1) { if (parentOrdToBuckets.get(globalOrdinal) == -1) { parentOrdToBuckets.set(globalOrdinal, bucket); @@ -120,7 +126,7 @@ public class ParentToChildrenAggregator extends SingleBucketAggregator { bucketOrds[bucketOrds.length - 1] = bucket; parentOrdToOtherBuckets.put(globalOrdinal, bucketOrds); } else { - parentOrdToOtherBuckets.put(globalOrdinal, new long[]{bucket}); + parentOrdToOtherBuckets.put(globalOrdinal, new long[] { bucket }); } multipleBucketsPerParentOrd = true; } @@ -141,18 +147,21 @@ public class ParentToChildrenAggregator extends SingleBucketAggregator { DocIdSetIterator childDocsIter = childDocsScorer.iterator(); final LeafBucketCollector sub = collectableSubAggregators.getLeafCollector(ctx); - final SortedDocValues globalOrdinals = valuesSource.globalOrdinalsValues(parentType, ctx); + final SortedDocValues globalOrdinals = valuesSource.globalOrdinalsValues(parentType, + ctx); // Set the scorer, since we now replay only the child docIds - sub.setScorer(new ConstantScoreScorer(null, 1f,childDocsIter)); + sub.setScorer(new ConstantScoreScorer(null, 1f, childDocsIter)); final Bits liveDocs = ctx.reader().getLiveDocs(); - for (int docId = childDocsIter.nextDoc(); docId != DocIdSetIterator.NO_MORE_DOCS; docId = childDocsIter.nextDoc()) { + for (int docId = childDocsIter + .nextDoc(); docId != DocIdSetIterator.NO_MORE_DOCS; docId = childDocsIter + .nextDoc()) { if (liveDocs != null && liveDocs.get(docId) == false) { continue; } - long globalOrdinal = globalOrdinals.getOrd(docId); - if (globalOrdinal != -1) { + if (globalOrdinals.advanceExact(docId)) { + long globalOrdinal = globalOrdinals.ordValue(); long bucketOrd = parentOrdToBuckets.get(globalOrdinal); if (bucketOrd != -1) { collectBucket(sub, docId, bucketOrd); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregationBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregationBuilder.java index 602c3a81c66..1c602474465 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregationBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/geogrid/GeoGridAggregationBuilder.java @@ -28,10 +28,10 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.fielddata.AbstractSortingNumericDocValues; import org.elasticsearch.index.fielddata.MultiGeoPointValues; import org.elasticsearch.index.fielddata.SortedBinaryDocValues; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; -import org.elasticsearch.index.fielddata.SortingNumericDocValues; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; import org.elasticsearch.search.aggregations.AggregatorFactory; @@ -187,7 +187,7 @@ public class GeoGridAggregationBuilder extends ValuesSourceAggregationBuilder= previousRounded; - if (rounded == previousRounded) { - continue; + long previousRounded = Long.MIN_VALUE; + for (int i = 0; i < valuesCount; ++i) { + long value = values.nextValue(); + long rounded = rounding.round(value - offset) + offset; + assert rounded >= previousRounded; + if (rounded == previousRounded) { + continue; + } + long bucketOrd = bucketOrds.add(rounded); + if (bucketOrd < 0) { // already seen + bucketOrd = -1 - bucketOrd; + collectExistingBucket(sub, doc, bucketOrd); + } else { + collectBucket(sub, doc, bucketOrd); + } + previousRounded = rounded; } - long bucketOrd = bucketOrds.add(rounded); - if (bucketOrd < 0) { // already seen - bucketOrd = -1 - bucketOrd; - collectExistingBucket(sub, doc, bucketOrd); - } else { - collectBucket(sub, doc, bucketOrd); - } - previousRounded = rounded; } } }; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramAggregator.java index 4b547989d8b..a3a038cfa3c 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/HistogramAggregator.java @@ -101,25 +101,26 @@ class HistogramAggregator extends BucketsAggregator { @Override public void collect(int doc, long bucket) throws IOException { assert bucket == 0; - values.setDocument(doc); - final int valuesCount = values.count(); + if (values.advanceExact(doc)) { + final int valuesCount = values.docValueCount(); - double previousKey = Double.NEGATIVE_INFINITY; - for (int i = 0; i < valuesCount; ++i) { - double value = values.valueAt(i); - double key = Math.floor((value - offset) / interval); - assert key >= previousKey; - if (key == previousKey) { - continue; + double previousKey = Double.NEGATIVE_INFINITY; + for (int i = 0; i < valuesCount; ++i) { + double value = values.nextValue(); + double key = Math.floor((value - offset) / interval); + assert key >= previousKey; + if (key == previousKey) { + continue; + } + long bucketOrd = bucketOrds.add(Double.doubleToLongBits(key)); + if (bucketOrd < 0) { // already seen + bucketOrd = -1 - bucketOrd; + collectExistingBucket(sub, doc, bucketOrd); + } else { + collectBucket(sub, doc, bucketOrd); + } + previousKey = key; } - long bucketOrd = bucketOrds.add(Double.doubleToLongBits(key)); - if (bucketOrd < 0) { // already seen - bucketOrd = -1 - bucketOrd; - collectExistingBucket(sub, doc, bucketOrd); - } else { - collectBucket(sub, doc, bucketOrd); - } - previousKey = key; } } }; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java index 873eef2f715..9151ebc985a 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregator.java @@ -127,7 +127,7 @@ public final class BinaryRangeAggregator extends BucketsAggregator { final LeafBucketCollector sub; SortedSetRangeLeafCollector(SortedSetDocValues values, - Range[] ranges, LeafBucketCollector sub) { + Range[] ranges, LeafBucketCollector sub) throws IOException { super(sub, values); for (int i = 1; i < ranges.length; ++i) { if (RANGE_COMPARATOR.compare(ranges[i-1], ranges[i]) > 0) { @@ -167,10 +167,13 @@ public final class BinaryRangeAggregator extends BucketsAggregator { @Override public void collect(int doc, long bucket) throws IOException { - values.setDocument(doc); - int lo = 0; - for (long ord = values.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = values.nextOrd()) { - lo = collect(doc, ord, bucket, lo); + if (values.advanceExact(doc)) { + int lo = 0; + for (long ord = values + .nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = values + .nextOrd()) { + lo = collect(doc, ord, bucket, lo); + } } } @@ -259,11 +262,12 @@ public final class BinaryRangeAggregator extends BucketsAggregator { @Override public void collect(int doc, long bucket) throws IOException { - values.setDocument(doc); - final int valuesCount = values.count(); - for (int i = 0, lo = 0; i < valuesCount; ++i) { - final BytesRef value = values.valueAt(i); - lo = collect(doc, value, bucket, lo); + if (values.advanceExact(doc)) { + final int valuesCount = values.docValueCount(); + for (int i = 0, lo = 0; i < valuesCount; ++i) { + final BytesRef value = values.nextValue(); + lo = collect(doc, value, bucket, lo); + } } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java index a2e55c18fb7..45128030759 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/range/RangeAggregator.java @@ -236,11 +236,12 @@ public class RangeAggregator extends BucketsAggregator { return new LeafBucketCollectorBase(sub, values) { @Override public void collect(int doc, long bucket) throws IOException { - values.setDocument(doc); - final int valuesCount = values.count(); - for (int i = 0, lo = 0; i < valuesCount; ++i) { - final double value = values.valueAt(i); - lo = collect(doc, value, bucket, lo); + if (values.advanceExact(doc)) { + final int valuesCount = values.docValueCount(); + for (int i = 0, lo = 0; i < valuesCount; ++i) { + final double value = values.nextValue(); + lo = collect(doc, value, bucket, lo); + } } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedBytesHashSamplerAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedBytesHashSamplerAggregator.java index 33532a9f438..472881b0084 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedBytesHashSamplerAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedBytesHashSamplerAggregator.java @@ -26,6 +26,7 @@ import org.apache.lucene.search.DiversifiedTopDocsCollector.ScoreDocKey; import org.apache.lucene.search.TopDocsCollector; import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.index.fielddata.AbstractNumericDocValues; import org.elasticsearch.index.fielddata.SortedBinaryDocValues; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; @@ -97,20 +98,33 @@ public class DiversifiedBytesHashSamplerAggregator extends SamplerAggregator { } catch (IOException e) { throw new ElasticsearchException("Error reading values", e); } - return new NumericDocValues() { - @Override - public long get(int doc) { + return new AbstractNumericDocValues() { - values.setDocument(doc); - final int valuesCount = values.count(); - if (valuesCount > 1) { - throw new IllegalArgumentException("Sample diversifying key must be a single valued-field"); + private int docID = -1; + + @Override + public boolean advanceExact(int target) throws IOException { + docID = target; + if (values.advanceExact(target)) { + if (values.docValueCount() > 1) { + throw new IllegalArgumentException( + "Sample diversifying key must be a single valued-field"); + } + return true; + } else { + return false; } - if (valuesCount == 1) { - final BytesRef bytes = values.valueAt(0); - return bytes.hashCode(); - } - return 0; + } + + @Override + public int docID() { + return docID; + } + + @Override + public long longValue() throws IOException { + final BytesRef bytes = values.nextValue(); + return bytes.hashCode(); } }; } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedMapSamplerAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedMapSamplerAggregator.java index d16e798b961..935da11e132 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedMapSamplerAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedMapSamplerAggregator.java @@ -28,6 +28,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.util.BytesRefHash; +import org.elasticsearch.index.fielddata.AbstractNumericDocValues; import org.elasticsearch.index.fielddata.SortedBinaryDocValues; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; @@ -103,25 +104,38 @@ public class DiversifiedMapSamplerAggregator extends SamplerAggregator { } catch (IOException e) { throw new ElasticsearchException("Error reading values", e); } - return new NumericDocValues() { + return new AbstractNumericDocValues() { + + private int docID = -1; + @Override - public long get(int doc) { - - values.setDocument(doc); - final int valuesCount = values.count(); - if (valuesCount > 1) { - throw new IllegalArgumentException("Sample diversifying key must be a single valued-field"); - } - if (valuesCount == 1) { - final BytesRef bytes = values.valueAt(0); - - long bucketOrdinal = bucketOrds.add(bytes); - if (bucketOrdinal < 0) { // already seen - bucketOrdinal = -1 - bucketOrdinal; + public boolean advanceExact(int target) throws IOException { + docID = target; + if (values.advanceExact(target)) { + if (values.docValueCount() > 1) { + throw new IllegalArgumentException( + "Sample diversifying key must be a single valued-field"); } - return bucketOrdinal; + return true; + } else { + return false; } - return 0; + } + + @Override + public int docID() { + return docID; + } + + @Override + public long longValue() throws IOException { + final BytesRef bytes = values.nextValue(); + + long bucketOrdinal = bucketOrds.add(bytes); + if (bucketOrdinal < 0) { // already seen + bucketOrdinal = -1 - bucketOrdinal; + } + return bucketOrdinal; } }; } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedNumericSamplerAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedNumericSamplerAggregator.java index c33a1a55532..8ce29c14bb5 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedNumericSamplerAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedNumericSamplerAggregator.java @@ -26,6 +26,7 @@ import org.apache.lucene.search.DiversifiedTopDocsCollector; import org.apache.lucene.search.DiversifiedTopDocsCollector.ScoreDocKey; import org.apache.lucene.search.TopDocsCollector; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.index.fielddata.AbstractNumericDocValues; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.bucket.DeferringBucketCollector; @@ -88,18 +89,29 @@ public class DiversifiedNumericSamplerAggregator extends SamplerAggregator { } catch (IOException e) { throw new ElasticsearchException("Error reading values", e); } - return new NumericDocValues() { + return new AbstractNumericDocValues() { + @Override - public long get(int doc) { - values.setDocument(doc); - final int valuesCount = values.count(); - if (valuesCount > 1) { - throw new IllegalArgumentException("Sample diversifying key must be a single valued-field"); + public boolean advanceExact(int target) throws IOException { + if (values.advanceExact(target)) { + if (values.docValueCount() > 1) { + throw new IllegalArgumentException( + "Sample diversifying key must be a single valued-field"); + } + return true; + } else { + return false; } - if (valuesCount == 1) { - return values.valueAt(0); - } - return Long.MIN_VALUE; + } + + @Override + public int docID() { + return values.docID(); + } + + @Override + public long longValue() throws IOException { + return values.nextValue(); } }; } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedOrdinalsSamplerAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedOrdinalsSamplerAggregator.java index a5d027debe1..74f7a16d621 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedOrdinalsSamplerAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/sampler/DiversifiedOrdinalsSamplerAggregator.java @@ -22,11 +22,12 @@ package org.elasticsearch.search.aggregations.bucket.sampler; import org.apache.lucene.index.DocValues; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NumericDocValues; -import org.apache.lucene.index.RandomAccessOrds; import org.apache.lucene.index.SortedDocValues; +import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.search.DiversifiedTopDocsCollector; import org.apache.lucene.search.DiversifiedTopDocsCollector.ScoreDocKey; import org.apache.lucene.search.TopDocsCollector; +import org.elasticsearch.index.fielddata.AbstractNumericDocValues; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.bucket.DeferringBucketCollector; @@ -84,29 +85,55 @@ public class DiversifiedOrdinalsSamplerAggregator extends SamplerAggregator { @Override protected NumericDocValues getKeys(LeafReaderContext context) { - final RandomAccessOrds globalOrds = valuesSource.globalOrdinalsValues(context); + final SortedSetDocValues globalOrds = valuesSource.globalOrdinalsValues(context); final SortedDocValues singleValues = DocValues.unwrapSingleton(globalOrds); if (singleValues != null) { - return new NumericDocValues() { + return new AbstractNumericDocValues() { + @Override - public long get(int doc) { - return singleValues.getOrd(doc); + public boolean advanceExact(int target) throws IOException { + return singleValues.advanceExact(target); + } + + @Override + public int docID() { + return singleValues.docID(); + } + + @Override + public long longValue() throws IOException { + return singleValues.ordValue(); } }; } - return new NumericDocValues() { + return new AbstractNumericDocValues() { + + long value; + @Override - public long get(int doc) { - globalOrds.setDocument(doc); - final long valuesCount = globalOrds.cardinality(); - if (valuesCount > 1) { - throw new IllegalArgumentException("Sample diversifying key must be a single valued-field"); + public boolean advanceExact(int target) throws IOException { + if (globalOrds.advanceExact(target)) { + value = globalOrds.nextOrd(); + // Check there isn't a second value for this + // document + if (globalOrds.nextOrd() != SortedSetDocValues.NO_MORE_ORDS) { + throw new IllegalArgumentException( + "Sample diversifying key must be a single valued-field"); + } + return true; + } else { + return false; } - if (valuesCount == 1) { - long result = globalOrds.ordAt(0); - return result; - } - return Long.MIN_VALUE; + } + + @Override + public int docID() { + return globalOrds.docID(); + } + + @Override + public long longValue() throws IOException { + return value; } }; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/GlobalOrdinalsSignificantTermsAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/GlobalOrdinalsSignificantTermsAggregator.java index bf1714b14cc..98effdcfd54 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/GlobalOrdinalsSignificantTermsAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/significant/GlobalOrdinalsSignificantTermsAggregator.java @@ -20,6 +20,7 @@ package org.elasticsearch.search.aggregations.bucket.significant; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.util.LongHash; @@ -177,16 +178,17 @@ public class GlobalOrdinalsSignificantTermsAggregator extends GlobalOrdinalsStri public void collect(int doc, long bucket) throws IOException { assert bucket == 0; numCollectedDocs++; - globalOrds.setDocument(doc); - final int numOrds = globalOrds.cardinality(); - for (int i = 0; i < numOrds; i++) { - final long globalOrd = globalOrds.ordAt(i); - long bucketOrd = bucketOrds.add(globalOrd); - if (bucketOrd < 0) { - bucketOrd = -1 - bucketOrd; - collectExistingBucket(sub, doc, bucketOrd); - } else { - collectBucket(sub, doc, bucketOrd); + if (globalOrds.advanceExact(doc)) { + for (long globalOrd = globalOrds.nextOrd(); + globalOrd != SortedSetDocValues.NO_MORE_ORDS; + globalOrd = globalOrds.nextOrd()) { + long bucketOrd = bucketOrds.add(globalOrd); + if (bucketOrd < 0) { + bucketOrd = -1 - bucketOrd; + collectExistingBucket(sub, doc, bucketOrd); + } else { + collectBucket(sub, doc, bucketOrd); + } } } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java index 8e9d779cd08..5ef96d19257 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/GlobalOrdinalsStringTermsAggregator.java @@ -21,8 +21,8 @@ package org.elasticsearch.search.aggregations.bucket.terms; import org.apache.lucene.index.DocValues; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.RandomAccessOrds; import org.apache.lucene.index.SortedDocValues; +import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.LongBitSet; @@ -31,7 +31,7 @@ import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.util.IntArray; import org.elasticsearch.common.util.LongHash; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.index.fielddata.AbstractRandomAccessOrds; +import org.elasticsearch.index.fielddata.AbstractSortedSetDocValues; import org.elasticsearch.index.fielddata.ordinals.GlobalOrdinalMapping; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.Aggregator; @@ -67,7 +67,7 @@ public class GlobalOrdinalsStringTermsAggregator extends AbstractStringTermsAggr // especially if this agg is on a second layer or deeper. protected LongBitSet acceptedGlobalOrdinals; - protected RandomAccessOrds globalOrds; + protected SortedSetDocValues globalOrds; public GlobalOrdinalsStringTermsAggregator(String name, AggregatorFactories factories, ValuesSource.Bytes.WithOrdinals valuesSource, Terms.Order order, DocValueFormat format, BucketCountThresholds bucketCountThresholds, @@ -101,7 +101,8 @@ public class GlobalOrdinalsStringTermsAggregator extends AbstractStringTermsAggr return newCollector(globalOrds, sub); } - protected LeafBucketCollector newCollector(final RandomAccessOrds ords, final LeafBucketCollector sub) { + protected LeafBucketCollector newCollector(final SortedSetDocValues ords, + final LeafBucketCollector sub) { grow(ords.getValueCount()); final SortedDocValues singleValues = DocValues.unwrapSingleton(ords); if (singleValues != null) { @@ -109,8 +110,8 @@ public class GlobalOrdinalsStringTermsAggregator extends AbstractStringTermsAggr @Override public void collect(int doc, long bucket) throws IOException { assert bucket == 0; - final int ord = singleValues.getOrd(doc); - if (ord >= 0) { + if (singleValues.advanceExact(doc)) { + final int ord = singleValues.ordValue(); collectExistingBucket(sub, doc, ord); } } @@ -120,11 +121,12 @@ public class GlobalOrdinalsStringTermsAggregator extends AbstractStringTermsAggr @Override public void collect(int doc, long bucket) throws IOException { assert bucket == 0; - ords.setDocument(doc); - final int numOrds = ords.cardinality(); - for (int i = 0; i < numOrds; i++) { - final long globalOrd = ords.ordAt(i); - collectExistingBucket(sub, doc, globalOrd); + if (ords.advanceExact(doc)) { + for (long globalOrd = ords.nextOrd(); + globalOrd != SortedSetDocValues.NO_MORE_ORDS; + globalOrd = ords.nextOrd()) { + collectExistingBucket(sub, doc, globalOrd); + } } } }; @@ -270,14 +272,15 @@ public class GlobalOrdinalsStringTermsAggregator extends AbstractStringTermsAggr } @Override - protected LeafBucketCollector newCollector(final RandomAccessOrds ords, final LeafBucketCollector sub) { + protected LeafBucketCollector newCollector(final SortedSetDocValues ords, + final LeafBucketCollector sub) { final SortedDocValues singleValues = DocValues.unwrapSingleton(ords); if (singleValues != null) { return new LeafBucketCollectorBase(sub, ords) { @Override public void collect(int doc, long bucket) throws IOException { - final int globalOrd = singleValues.getOrd(doc); - if (globalOrd >= 0) { + if (singleValues.advanceExact(doc)) { + final int globalOrd = singleValues.ordValue(); long bucketOrd = bucketOrds.add(globalOrd); if (bucketOrd < 0) { bucketOrd = -1 - bucketOrd; @@ -292,16 +295,17 @@ public class GlobalOrdinalsStringTermsAggregator extends AbstractStringTermsAggr return new LeafBucketCollectorBase(sub, ords) { @Override public void collect(int doc, long bucket) throws IOException { - ords.setDocument(doc); - final int numOrds = ords.cardinality(); - for (int i = 0; i < numOrds; i++) { - final long globalOrd = ords.ordAt(i); - long bucketOrd = bucketOrds.add(globalOrd); - if (bucketOrd < 0) { - bucketOrd = -1 - bucketOrd; - collectExistingBucket(sub, doc, bucketOrd); - } else { - collectBucket(sub, doc, bucketOrd); + if (ords.advanceExact(doc)) { + for (long globalOrd = ords.nextOrd(); + globalOrd != SortedSetDocValues.NO_MORE_ORDS; + globalOrd = ords.nextOrd()) { + long bucketOrd = bucketOrds.add(globalOrd); + if (bucketOrd < 0) { + bucketOrd = -1 - bucketOrd; + collectExistingBucket(sub, doc, bucketOrd); + } else { + collectBucket(sub, doc, bucketOrd); + } } } } @@ -330,7 +334,7 @@ public class GlobalOrdinalsStringTermsAggregator extends AbstractStringTermsAggr private IntArray segmentDocCounts; - private RandomAccessOrds segmentOrds; + private SortedSetDocValues segmentOrds; public LowCardinality(String name, AggregatorFactories factories, ValuesSource.Bytes.WithOrdinals valuesSource, Terms.Order order, DocValueFormat format, @@ -345,7 +349,8 @@ public class GlobalOrdinalsStringTermsAggregator extends AbstractStringTermsAggr // bucketOrd is ord + 1 to avoid a branch to deal with the missing ord @Override - protected LeafBucketCollector newCollector(final RandomAccessOrds ords, LeafBucketCollector sub) { + protected LeafBucketCollector newCollector(final SortedSetDocValues ords, + LeafBucketCollector sub) { segmentDocCounts = context.bigArrays().grow(segmentDocCounts, 1 + ords.getValueCount()); assert sub == LeafBucketCollector.NO_OP_COLLECTOR; final SortedDocValues singleValues = DocValues.unwrapSingleton(ords); @@ -354,8 +359,10 @@ public class GlobalOrdinalsStringTermsAggregator extends AbstractStringTermsAggr @Override public void collect(int doc, long bucket) throws IOException { assert bucket == 0; - final int ord = singleValues.getOrd(doc); - segmentDocCounts.increment(ord + 1, 1); + if (singleValues.advanceExact(doc)) { + final int ord = singleValues.ordValue(); + segmentDocCounts.increment(ord + 1, 1); + } } }; } else { @@ -363,11 +370,12 @@ public class GlobalOrdinalsStringTermsAggregator extends AbstractStringTermsAggr @Override public void collect(int doc, long bucket) throws IOException { assert bucket == 0; - ords.setDocument(doc); - final int numOrds = ords.cardinality(); - for (int i = 0; i < numOrds; i++) { - final long segmentOrd = ords.ordAt(i); - segmentDocCounts.increment(segmentOrd + 1, 1); + if (ords.advanceExact(doc)) { + for (long segmentOrd = ords.nextOrd(); + segmentOrd != SortedSetDocValues.NO_MORE_ORDS; + segmentOrd = ords.nextOrd()) { + segmentDocCounts.increment(segmentOrd + 1, 1); + } } } }; @@ -422,15 +430,12 @@ public class GlobalOrdinalsStringTermsAggregator extends AbstractStringTermsAggr } } - private static final class FilteredOrdinals extends AbstractRandomAccessOrds { + private static final class FilteredOrdinals extends AbstractSortedSetDocValues { - private final RandomAccessOrds inner; + private final SortedSetDocValues inner; private final LongBitSet accepted; - private int cardinality; - private long[] ords = new long[0]; - - private FilteredOrdinals(RandomAccessOrds inner, LongBitSet accepted) { + private FilteredOrdinals(SortedSetDocValues inner, LongBitSet accepted) { this.inner = inner; this.accepted = accepted; } @@ -441,33 +446,33 @@ public class GlobalOrdinalsStringTermsAggregator extends AbstractStringTermsAggr } @Override - public long ordAt(int index) { - return ords[index]; + public BytesRef lookupOrd(long ord) throws IOException { + return inner.lookupOrd(ord); } @Override - public void doSetDocument(int docId) { - inner.setDocument(docId); - final int innerCardinality = inner.cardinality(); - ords = ArrayUtil.grow(ords, innerCardinality); - - cardinality = 0; - for (int slot = 0; slot < innerCardinality; slot++) { - long ord = inner.ordAt(slot); + public long nextOrd() throws IOException { + for (long ord = inner.nextOrd(); ord != NO_MORE_ORDS; ord = inner.nextOrd()) { if (accepted.get(ord)) { - ords[cardinality++] = ord; + return ord; } } + return NO_MORE_ORDS; } @Override - public int cardinality() { - return cardinality; - } - - @Override - public BytesRef lookupOrd(long ord) { - return inner.lookupOrd(ord); + public boolean advanceExact(int target) throws IOException { + if (inner.advanceExact(target)) { + for (long ord = inner.nextOrd(); ord != NO_MORE_ORDS; ord = inner.nextOrd()) { + if (accepted.get(ord)) { + // reset the iterator + boolean advanced = inner.advanceExact(target); + assert advanced; + return true; + } + } + } + return false; } } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongTermsAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongTermsAggregator.java index e041ada417f..8f8e2f3079b 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongTermsAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/LongTermsAggregator.java @@ -77,24 +77,25 @@ public class LongTermsAggregator extends TermsAggregator { @Override public void collect(int doc, long owningBucketOrdinal) throws IOException { assert owningBucketOrdinal == 0; - values.setDocument(doc); - final int valuesCount = values.count(); + if (values.advanceExact(doc)) { + final int valuesCount = values.docValueCount(); - long previous = Long.MAX_VALUE; - for (int i = 0; i < valuesCount; ++i) { - final long val = values.valueAt(i); - if (previous != val || i == 0) { - if ((longFilter == null) || (longFilter.accept(val))) { - long bucketOrdinal = bucketOrds.add(val); - if (bucketOrdinal < 0) { // already seen - bucketOrdinal = - 1 - bucketOrdinal; - collectExistingBucket(sub, doc, bucketOrdinal); - } else { - collectBucket(sub, doc, bucketOrdinal); + long previous = Long.MAX_VALUE; + for (int i = 0; i < valuesCount; ++i) { + final long val = values.nextValue(); + if (previous != val || i == 0) { + if ((longFilter == null) || (longFilter.accept(val))) { + long bucketOrdinal = bucketOrds.add(val); + if (bucketOrdinal < 0) { // already seen + bucketOrdinal = -1 - bucketOrdinal; + collectExistingBucket(sub, doc, bucketOrdinal); + } else { + collectBucket(sub, doc, bucketOrdinal); + } } - } - previous = val; + previous = val; + } } } } @@ -110,12 +111,13 @@ public class LongTermsAggregator extends TermsAggregator { for (LeafReaderContext ctx : context.searcher().getTopReaderContext().leaves()) { final SortedNumericDocValues values = getValues(valuesSource, ctx); for (int docId = 0; docId < ctx.reader().maxDoc(); ++docId) { - values.setDocument(docId); - final int valueCount = values.count(); - for (int i = 0; i < valueCount; ++i) { - long value = values.valueAt(i); - if (longFilter == null || longFilter.accept(value)) { - bucketOrds.add(value); + if (values.advanceExact(docId)) { + final int valueCount = values.docValueCount(); + for (int i = 0; i < valueCount; ++i) { + long value = values.nextValue(); + if (longFilter == null || longFilter.accept(value)) { + bucketOrds.add(value); + } } } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsAggregator.java index c93fc94ff61..61c46cdfd68 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/StringTermsAggregator.java @@ -78,27 +78,29 @@ public class StringTermsAggregator extends AbstractStringTermsAggregator { @Override public void collect(int doc, long bucket) throws IOException { assert bucket == 0; - values.setDocument(doc); - final int valuesCount = values.count(); + if (values.advanceExact(doc)) { + final int valuesCount = values.docValueCount(); - // SortedBinaryDocValues don't guarantee uniqueness so we need to take care of dups - previous.clear(); - for (int i = 0; i < valuesCount; ++i) { - final BytesRef bytes = values.valueAt(i); - if (includeExclude != null && !includeExclude.accept(bytes)) { - continue; + // SortedBinaryDocValues don't guarantee uniqueness so we + // need to take care of dups + previous.clear(); + for (int i = 0; i < valuesCount; ++i) { + final BytesRef bytes = values.nextValue(); + if (includeExclude != null && !includeExclude.accept(bytes)) { + continue; + } + if (previous.get().equals(bytes)) { + continue; + } + long bucketOrdinal = bucketOrds.add(bytes); + if (bucketOrdinal < 0) { // already seen + bucketOrdinal = -1 - bucketOrdinal; + collectExistingBucket(sub, doc, bucketOrdinal); + } else { + collectBucket(sub, doc, bucketOrdinal); + } + previous.copyBytes(bytes); } - if (previous.get().equals(bytes)) { - continue; - } - long bucketOrdinal = bucketOrds.add(bytes); - if (bucketOrdinal < 0) { // already seen - bucketOrdinal = - 1 - bucketOrdinal; - collectExistingBucket(sub, doc, bucketOrdinal); - } else { - collectBucket(sub, doc, bucketOrdinal); - } - previous.copyBytes(bytes); } } }; @@ -114,12 +116,13 @@ public class StringTermsAggregator extends AbstractStringTermsAggregator { final SortedBinaryDocValues values = valuesSource.bytesValues(ctx); // brute force for (int docId = 0; docId < ctx.reader().maxDoc(); ++docId) { - values.setDocument(docId); - final int valueCount = values.count(); - for (int i = 0; i < valueCount; ++i) { - final BytesRef term = values.valueAt(i); - if (includeExclude == null || includeExclude.accept(term)) { - bucketOrds.add(term); + if (values.advanceExact(docId)) { + final int valueCount = values.docValueCount(); + for (int i = 0; i < valueCount; ++i) { + final BytesRef term = values.nextValue(); + if (includeExclude == null || includeExclude.accept(term)) { + bucketOrds.add(term); + } } } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/support/IncludeExclude.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/support/IncludeExclude.java index 15207102108..aabe5f585da 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/support/IncludeExclude.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/support/IncludeExclude.java @@ -21,7 +21,7 @@ package org.elasticsearch.search.aggregations.bucket.terms.support; import com.carrotsearch.hppc.BitMixer; import com.carrotsearch.hppc.LongHashSet; import com.carrotsearch.hppc.LongSet; -import org.apache.lucene.index.RandomAccessOrds; + import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; @@ -233,14 +233,16 @@ public class IncludeExclude implements Writeable, ToXContent { } public abstract static class OrdinalsFilter { - public abstract LongBitSet acceptedGlobalOrdinals(RandomAccessOrds globalOrdinals) throws IOException; + public abstract LongBitSet acceptedGlobalOrdinals(SortedSetDocValues globalOrdinals) + throws IOException; } class PartitionedOrdinalsFilter extends OrdinalsFilter { @Override - public LongBitSet acceptedGlobalOrdinals(RandomAccessOrds globalOrdinals) throws IOException { + public LongBitSet acceptedGlobalOrdinals(SortedSetDocValues globalOrdinals) + throws IOException { final long numOrds = globalOrdinals.getValueCount(); final LongBitSet acceptedGlobalOrdinals = new LongBitSet(numOrds); final TermsEnum termEnum = globalOrdinals.termsEnum(); @@ -269,7 +271,7 @@ public class IncludeExclude implements Writeable, ToXContent { * */ @Override - public LongBitSet acceptedGlobalOrdinals(RandomAccessOrds globalOrdinals) + public LongBitSet acceptedGlobalOrdinals(SortedSetDocValues globalOrdinals) throws IOException { LongBitSet acceptedGlobalOrdinals = new LongBitSet(globalOrdinals.getValueCount()); TermsEnum globalTermsEnum; @@ -295,7 +297,8 @@ public class IncludeExclude implements Writeable, ToXContent { } @Override - public LongBitSet acceptedGlobalOrdinals(RandomAccessOrds globalOrdinals) throws IOException { + public LongBitSet acceptedGlobalOrdinals(SortedSetDocValues globalOrdinals) + throws IOException { LongBitSet acceptedGlobalOrdinals = new LongBitSet(globalOrdinals.getValueCount()); if (includeValues != null) { for (BytesRef term : includeValues) { diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/avg/AvgAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/avg/AvgAggregator.java index eb75bae268f..0decfa05575 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/avg/AvgAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/avg/AvgAggregator.java @@ -77,14 +77,15 @@ public class AvgAggregator extends NumericMetricsAggregator.SingleValue { counts = bigArrays.grow(counts, bucket + 1); sums = bigArrays.grow(sums, bucket + 1); - values.setDocument(doc); - final int valueCount = values.count(); - counts.increment(bucket, valueCount); - double sum = 0; - for (int i = 0; i < valueCount; i++) { - sum += values.valueAt(i); + if (values.advanceExact(doc)) { + final int valueCount = values.docValueCount(); + counts.increment(bucket, valueCount); + double sum = 0; + for (int i = 0; i < valueCount; i++) { + sum += values.nextValue(); + } + sums.increment(bucket, sum); } - sums.increment(bucket, sum); } }; } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityAggregator.java index 7d5db460ae6..7a8483b1b26 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/cardinality/CardinalityAggregator.java @@ -22,8 +22,8 @@ package org.elasticsearch.search.aggregations.metrics.cardinality; import com.carrotsearch.hppc.BitMixer; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.RandomAccessOrds; import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.FixedBitSet; @@ -88,7 +88,7 @@ public class CardinalityAggregator extends NumericMetricsAggregator.SingleValue if (valuesSource instanceof ValuesSource.Bytes.WithOrdinals) { ValuesSource.Bytes.WithOrdinals source = (ValuesSource.Bytes.WithOrdinals) valuesSource; - final RandomAccessOrds ordinalValues = source.ordinalsValues(ctx); + final SortedSetDocValues ordinalValues = source.ordinalsValues(ctx); final long maxOrd = ordinalValues.getValueCount(); if (maxOrd == 0) { return new EmptyCollector(); @@ -114,7 +114,7 @@ public class CardinalityAggregator extends NumericMetricsAggregator.SingleValue return collector; } - private void postCollectLastCollector() { + private void postCollectLastCollector() throws IOException { if (collector != null) { try { collector.postCollect(); @@ -126,7 +126,7 @@ public class CardinalityAggregator extends NumericMetricsAggregator.SingleValue } @Override - protected void doPostCollection() { + protected void doPostCollection() throws IOException { postCollectLastCollector(); } @@ -159,7 +159,7 @@ public class CardinalityAggregator extends NumericMetricsAggregator.SingleValue private abstract static class Collector extends LeafBucketCollector implements Releasable { - public abstract void postCollect(); + public abstract void postCollect() throws IOException; } @@ -192,11 +192,12 @@ public class CardinalityAggregator extends NumericMetricsAggregator.SingleValue } @Override - public void collect(int doc, long bucketOrd) { - hashes.setDocument(doc); - final int valueCount = hashes.count(); - for (int i = 0; i < valueCount; ++i) { - counts.collect(bucketOrd, hashes.valueAt(i)); + public void collect(int doc, long bucketOrd) throws IOException { + if (hashes.advanceExact(doc)) { + final int valueCount = hashes.count(); + for (int i = 0; i < valueCount; ++i) { + counts.collect(bucketOrd, hashes.nextValue()); + } } } @@ -224,12 +225,13 @@ public class CardinalityAggregator extends NumericMetricsAggregator.SingleValue } private final BigArrays bigArrays; - private final RandomAccessOrds values; + private final SortedSetDocValues values; private final int maxOrd; private final HyperLogLogPlusPlus counts; private ObjectArray visitedOrds; - OrdinalsCollector(HyperLogLogPlusPlus counts, RandomAccessOrds values, BigArrays bigArrays) { + OrdinalsCollector(HyperLogLogPlusPlus counts, SortedSetDocValues values, + BigArrays bigArrays) { if (values.getValueCount() > Integer.MAX_VALUE) { throw new IllegalArgumentException(); } @@ -241,22 +243,22 @@ public class CardinalityAggregator extends NumericMetricsAggregator.SingleValue } @Override - public void collect(int doc, long bucketOrd) { + public void collect(int doc, long bucketOrd) throws IOException { visitedOrds = bigArrays.grow(visitedOrds, bucketOrd + 1); FixedBitSet bits = visitedOrds.get(bucketOrd); if (bits == null) { bits = new FixedBitSet(maxOrd); visitedOrds.set(bucketOrd, bits); } - values.setDocument(doc); - final int valueCount = values.cardinality(); - for (int i = 0; i < valueCount; ++i) { - bits.set((int) values.ordAt(i)); + if (values.advanceExact(doc)) { + for (long ord = values.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = values.nextOrd()) { + bits.set((int) ord); + } } } @Override - public void postCollect() { + public void postCollect() throws IOException { final FixedBitSet allVisitedOrds = new FixedBitSet(maxOrd); for (long bucket = visitedOrds.size() - 1; bucket >= 0; --bucket) { final FixedBitSet bits = visitedOrds.get(bucket); @@ -296,11 +298,11 @@ public class CardinalityAggregator extends NumericMetricsAggregator.SingleValue */ abstract static class MurmurHash3Values { - public abstract void setDocument(int docId); + public abstract boolean advanceExact(int docId) throws IOException; public abstract int count(); - public abstract long valueAt(int index); + public abstract long nextValue() throws IOException; /** * Return a {@link MurmurHash3Values} instance that computes hashes on the fly for each double value. @@ -332,18 +334,18 @@ public class CardinalityAggregator extends NumericMetricsAggregator.SingleValue } @Override - public void setDocument(int docId) { - values.setDocument(docId); + public boolean advanceExact(int docId) throws IOException { + return values.advanceExact(docId); } @Override public int count() { - return values.count(); + return values.docValueCount(); } @Override - public long valueAt(int index) { - return BitMixer.mix64(values.valueAt(index)); + public long nextValue() throws IOException { + return BitMixer.mix64(values.nextValue()); } } @@ -356,18 +358,18 @@ public class CardinalityAggregator extends NumericMetricsAggregator.SingleValue } @Override - public void setDocument(int docId) { - values.setDocument(docId); + public boolean advanceExact(int docId) throws IOException { + return values.advanceExact(docId); } @Override public int count() { - return values.count(); + return values.docValueCount(); } @Override - public long valueAt(int index) { - return BitMixer.mix64(java.lang.Double.doubleToLongBits(values.valueAt(index))); + public long nextValue() throws IOException { + return BitMixer.mix64(java.lang.Double.doubleToLongBits(values.nextValue())); } } @@ -382,18 +384,18 @@ public class CardinalityAggregator extends NumericMetricsAggregator.SingleValue } @Override - public void setDocument(int docId) { - values.setDocument(docId); + public boolean advanceExact(int docId) throws IOException { + return values.advanceExact(docId); } @Override public int count() { - return values.count(); + return values.docValueCount(); } @Override - public long valueAt(int index) { - final BytesRef bytes = values.valueAt(index); + public long nextValue() throws IOException { + final BytesRef bytes = values.nextValue(); org.elasticsearch.common.hash.MurmurHash3.hash128(bytes.bytes, bytes.offset, bytes.length, 0, hash); return hash.h1; } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geobounds/GeoBoundsAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geobounds/GeoBoundsAggregator.java index 2083ea570d3..5c0cb4ba60a 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geobounds/GeoBoundsAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geobounds/GeoBoundsAggregator.java @@ -102,41 +102,42 @@ public final class GeoBoundsAggregator extends MetricsAggregator { negRights.fill(from, negRights.size(), Double.NEGATIVE_INFINITY); } - values.setDocument(doc); - final int valuesCount = values.count(); + if (values.advanceExact(doc)) { + final int valuesCount = values.docValueCount(); - for (int i = 0; i < valuesCount; ++i) { - GeoPoint value = values.valueAt(i); - double top = tops.get(bucket); - if (value.lat() > top) { - top = value.lat(); + for (int i = 0; i < valuesCount; ++i) { + GeoPoint value = values.nextValue(); + double top = tops.get(bucket); + if (value.lat() > top) { + top = value.lat(); + } + double bottom = bottoms.get(bucket); + if (value.lat() < bottom) { + bottom = value.lat(); + } + double posLeft = posLefts.get(bucket); + if (value.lon() >= 0 && value.lon() < posLeft) { + posLeft = value.lon(); + } + double posRight = posRights.get(bucket); + if (value.lon() >= 0 && value.lon() > posRight) { + posRight = value.lon(); + } + double negLeft = negLefts.get(bucket); + if (value.lon() < 0 && value.lon() < negLeft) { + negLeft = value.lon(); + } + double negRight = negRights.get(bucket); + if (value.lon() < 0 && value.lon() > negRight) { + negRight = value.lon(); + } + tops.set(bucket, top); + bottoms.set(bucket, bottom); + posLefts.set(bucket, posLeft); + posRights.set(bucket, posRight); + negLefts.set(bucket, negLeft); + negRights.set(bucket, negRight); } - double bottom = bottoms.get(bucket); - if (value.lat() < bottom) { - bottom = value.lat(); - } - double posLeft = posLefts.get(bucket); - if (value.lon() >= 0 && value.lon() < posLeft) { - posLeft = value.lon(); - } - double posRight = posRights.get(bucket); - if (value.lon() >= 0 && value.lon() > posRight) { - posRight = value.lon(); - } - double negLeft = negLefts.get(bucket); - if (value.lon() < 0 && value.lon() < negLeft) { - negLeft = value.lon(); - } - double negRight = negRights.get(bucket); - if (value.lon() < 0 && value.lon() > negRight) { - negRight = value.lon(); - } - tops.set(bucket, top); - bottoms.set(bucket, bottom); - posLefts.set(bucket, posLeft); - posRights.set(bucket, posRight); - negLefts.set(bucket, negLeft); - negRights.set(bucket, negRight); } } }; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/GeoCentroidAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/GeoCentroidAggregator.java index 4b23d042d53..795524e5a0f 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/GeoCentroidAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/GeoCentroidAggregator.java @@ -20,7 +20,6 @@ package org.elasticsearch.search.aggregations.metrics.geocentroid; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.spatial.geopoint.document.GeoPointField; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.util.BigArrays; @@ -72,29 +71,30 @@ final class GeoCentroidAggregator extends MetricsAggregator { centroids = bigArrays.grow(centroids, bucket + 1); counts = bigArrays.grow(counts, bucket + 1); - values.setDocument(doc); - final int valueCount = values.count(); - if (valueCount > 0) { + if (values.advanceExact(doc)) { + final int valueCount = values.docValueCount(); double[] pt = new double[2]; // get the previously accumulated number of counts long prevCounts = counts.get(bucket); // increment by the number of points for this document counts.increment(bucket, valueCount); - // get the previous GeoPoint if a moving avg was computed + // get the previous GeoPoint if a moving avg was + // computed if (prevCounts > 0) { final long mortonCode = centroids.get(bucket); - pt[0] = GeoPointField.decodeLongitude(mortonCode); - pt[1] = GeoPointField.decodeLatitude(mortonCode); + pt[0] = InternalGeoCentroid.decodeLongitude(mortonCode); + pt[1] = InternalGeoCentroid.decodeLatitude(mortonCode); } // update the moving average for (int i = 0; i < valueCount; ++i) { - GeoPoint value = values.valueAt(i); + GeoPoint value = values.nextValue(); pt[0] = pt[0] + (value.getLon() - pt[0]) / ++prevCounts; pt[1] = pt[1] + (value.getLat() - pt[1]) / prevCounts; } - // TODO: we do not need to interleave the lat and lon bits here + // TODO: we do not need to interleave the lat and lon + // bits here // should we just store them contiguously? - centroids.set(bucket, GeoPointField.encodeLatLon(pt[1], pt[0])); + centroids.set(bucket, InternalGeoCentroid.encodeLatLon(pt[1], pt[0])); } } }; @@ -108,7 +108,8 @@ final class GeoCentroidAggregator extends MetricsAggregator { final long bucketCount = counts.get(bucket); final long mortonCode = centroids.get(bucket); final GeoPoint bucketCentroid = (bucketCount > 0) - ? new GeoPoint(GeoPointField.decodeLatitude(mortonCode), GeoPointField.decodeLongitude(mortonCode)) + ? new GeoPoint(InternalGeoCentroid.decodeLatitude(mortonCode), + InternalGeoCentroid.decodeLongitude(mortonCode)) : null; return new InternalGeoCentroid(name, bucketCentroid , bucketCount, pipelineAggregators(), metaData()); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/InternalGeoCentroid.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/InternalGeoCentroid.java index 597ad6176bd..bd65cd28aff 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/InternalGeoCentroid.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/geocentroid/InternalGeoCentroid.java @@ -19,7 +19,7 @@ package org.elasticsearch.search.aggregations.metrics.geocentroid; -import org.apache.lucene.spatial.geopoint.document.GeoPointField; +import org.apache.lucene.geo.GeoEncodingUtils; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -38,6 +38,18 @@ public class InternalGeoCentroid extends InternalAggregation implements GeoCentr protected final GeoPoint centroid; protected final long count; + public static long encodeLatLon(double lat, double lon) { + return (Integer.toUnsignedLong(GeoEncodingUtils.encodeLatitude(lat)) << 32) | Integer.toUnsignedLong(GeoEncodingUtils.encodeLongitude(lon)); + } + + public static double decodeLatitude(long encodedLatLon) { + return GeoEncodingUtils.decodeLatitude((int) (encodedLatLon >>> 32)); + } + + public static double decodeLongitude(long encodedLatLon) { + return GeoEncodingUtils.decodeLongitude((int) (encodedLatLon & 0xFFFFFFFFL)); + } + InternalGeoCentroid(String name, GeoPoint centroid, long count, List pipelineAggregators, Map metaData) { super(name, pipelineAggregators, metaData); @@ -55,7 +67,7 @@ public class InternalGeoCentroid extends InternalAggregation implements GeoCentr count = in.readVLong(); if (in.readBoolean()) { final long hash = in.readLong(); - centroid = new GeoPoint(GeoPointField.decodeLatitude(hash), GeoPointField.decodeLongitude(hash)); + centroid = new GeoPoint(decodeLatitude(hash), decodeLongitude(hash)); } else { centroid = null; } @@ -67,7 +79,7 @@ public class InternalGeoCentroid extends InternalAggregation implements GeoCentr if (centroid != null) { out.writeBoolean(true); // should we just write lat and lon separately? - out.writeLong(GeoPointField.encodeLatLon(centroid.lat(), centroid.lon())); + out.writeLong(encodeLatLon(centroid.lat(), centroid.lon())); } else { out.writeBoolean(false); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/max/MaxAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/max/MaxAggregator.java index cef7f373280..8ef4d0b7e29 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/max/MaxAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/max/MaxAggregator.java @@ -82,10 +82,12 @@ public class MaxAggregator extends NumericMetricsAggregator.SingleValue { maxes = bigArrays.grow(maxes, bucket + 1); maxes.fill(from, maxes.size(), Double.NEGATIVE_INFINITY); } - final double value = values.get(doc); - double max = maxes.get(bucket); - max = Math.max(max, value); - maxes.set(bucket, max); + if (values.advanceExact(doc)) { + final double value = values.doubleValue(); + double max = maxes.get(bucket); + max = Math.max(max, value); + maxes.set(bucket, max); + } } }; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/min/MinAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/min/MinAggregator.java index 31e87b12a62..f355f55139c 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/min/MinAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/min/MinAggregator.java @@ -81,10 +81,12 @@ public class MinAggregator extends NumericMetricsAggregator.SingleValue { mins = bigArrays.grow(mins, bucket + 1); mins.fill(from, mins.size(), Double.POSITIVE_INFINITY); } - final double value = values.get(doc); - double min = mins.get(bucket); - min = Math.min(min, value); - mins.set(bucket, min); + if (values.advanceExact(doc)) { + final double value = values.doubleValue(); + double min = mins.get(bucket); + min = Math.min(min, value); + mins.set(bucket, min); + } } }; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/AbstractHDRPercentilesAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/AbstractHDRPercentilesAggregator.java index bf4443c8873..47c267aae90 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/AbstractHDRPercentilesAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/AbstractHDRPercentilesAggregator.java @@ -95,10 +95,11 @@ public abstract class AbstractHDRPercentilesAggregator extends NumericMetricsAgg states.set(bucket, state); } - values.setDocument(doc); - final int valueCount = values.count(); - for (int i = 0; i < valueCount; i++) { - state.recordValue(values.valueAt(i)); + if (values.advanceExact(doc)) { + final int valueCount = values.docValueCount(); + for (int i = 0; i < valueCount; i++) { + state.recordValue(values.nextValue()); + } } } }; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/AbstractTDigestPercentilesAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/AbstractTDigestPercentilesAggregator.java index 2c68d580e14..1b5ed510f8d 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/AbstractTDigestPercentilesAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/AbstractTDigestPercentilesAggregator.java @@ -87,10 +87,11 @@ public abstract class AbstractTDigestPercentilesAggregator extends NumericMetric states.set(bucket, state); } - values.setDocument(doc); - final int valueCount = values.count(); - for (int i = 0; i < valueCount; i++) { - state.add(values.valueAt(i)); + if (values.advanceExact(doc)) { + final int valueCount = values.docValueCount(); + for (int i = 0; i < valueCount; i++) { + state.add(values.nextValue()); + } } } }; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/StatsAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/StatsAggregator.java index e3657c3851d..cca176bd1ad 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/StatsAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/StatsAggregator.java @@ -94,21 +94,22 @@ public class StatsAggregator extends NumericMetricsAggregator.MultiValue { maxes.fill(from, overSize, Double.NEGATIVE_INFINITY); } - values.setDocument(doc); - final int valuesCount = values.count(); - counts.increment(bucket, valuesCount); - double sum = 0; - double min = mins.get(bucket); - double max = maxes.get(bucket); - for (int i = 0; i < valuesCount; i++) { - double value = values.valueAt(i); - sum += value; - min = Math.min(min, value); - max = Math.max(max, value); + if (values.advanceExact(doc)) { + final int valuesCount = values.docValueCount(); + counts.increment(bucket, valuesCount); + double sum = 0; + double min = mins.get(bucket); + double max = maxes.get(bucket); + for (int i = 0; i < valuesCount; i++) { + double value = values.nextValue(); + sum += value; + min = Math.min(min, value); + max = Math.max(max, value); + } + sums.increment(bucket, sum); + mins.set(bucket, min); + maxes.set(bucket, max); } - sums.increment(bucket, sum); - mins.set(bucket, min); - maxes.set(bucket, max); } }; } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/ExtendedStatsAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/ExtendedStatsAggregator.java index d6faf5cbb78..8dd78bf1373 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/ExtendedStatsAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/stats/extended/ExtendedStatsAggregator.java @@ -102,24 +102,25 @@ public class ExtendedStatsAggregator extends NumericMetricsAggregator.MultiValue maxes.fill(from, overSize, Double.NEGATIVE_INFINITY); } - values.setDocument(doc); - final int valuesCount = values.count(); - counts.increment(bucket, valuesCount); - double sum = 0; - double sumOfSqr = 0; - double min = mins.get(bucket); - double max = maxes.get(bucket); - for (int i = 0; i < valuesCount; i++) { - double value = values.valueAt(i); - sum += value; - sumOfSqr += value * value; - min = Math.min(min, value); - max = Math.max(max, value); + if (values.advanceExact(doc)) { + final int valuesCount = values.docValueCount(); + counts.increment(bucket, valuesCount); + double sum = 0; + double sumOfSqr = 0; + double min = mins.get(bucket); + double max = maxes.get(bucket); + for (int i = 0; i < valuesCount; i++) { + double value = values.nextValue(); + sum += value; + sumOfSqr += value * value; + min = Math.min(min, value); + max = Math.max(max, value); + } + sums.increment(bucket, sum); + sumOfSqrs.increment(bucket, sumOfSqr); + mins.set(bucket, min); + maxes.set(bucket, max); } - sums.increment(bucket, sum); - sumOfSqrs.increment(bucket, sumOfSqr); - mins.set(bucket, min); - maxes.set(bucket, max); } }; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/sum/SumAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/sum/SumAggregator.java index f7f2ee5501b..bd325b39373 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/sum/SumAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/sum/SumAggregator.java @@ -71,13 +71,14 @@ public class SumAggregator extends NumericMetricsAggregator.SingleValue { @Override public void collect(int doc, long bucket) throws IOException { sums = bigArrays.grow(sums, bucket + 1); - values.setDocument(doc); - final int valuesCount = values.count(); - double sum = 0; - for (int i = 0; i < valuesCount; i++) { - sum += values.valueAt(i); + if (values.advanceExact(doc)) { + final int valuesCount = values.docValueCount(); + double sum = 0; + for (int i = 0; i < valuesCount; i++) { + sum += values.nextValue(); + } + sums.increment(bucket, sum); } - sums.increment(bucket, sum); } }; } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/valuecount/ValueCountAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/valuecount/ValueCountAggregator.java index e0ed6d1ade0..929e26d04ba 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/metrics/valuecount/ValueCountAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/metrics/valuecount/ValueCountAggregator.java @@ -73,8 +73,9 @@ public class ValueCountAggregator extends NumericMetricsAggregator.SingleValue { @Override public void collect(int doc, long bucket) throws IOException { counts = bigArrays.grow(counts, bucket + 1); - values.setDocument(doc); - counts.increment(bucket, values.count()); + if (values.advanceExact(doc)) { + counts.increment(bucket, values.docValueCount()); + } } }; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/support/MissingValues.java b/core/src/main/java/org/elasticsearch/search/aggregations/support/MissingValues.java index 4a01b67b780..d5ff2cde97f 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/support/MissingValues.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/support/MissingValues.java @@ -20,11 +20,12 @@ package org.elasticsearch.search.aggregations.support; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.RandomAccessOrds; import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.index.fielddata.AbstractRandomAccessOrds; +import org.elasticsearch.index.fielddata.AbstractSortedNumericDocValues; +import org.elasticsearch.index.fielddata.AbstractSortedSetDocValues; import org.elasticsearch.index.fielddata.MultiGeoPointValues; import org.elasticsearch.index.fielddata.SortedBinaryDocValues; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; @@ -56,26 +57,30 @@ public enum MissingValues { private int count; @Override - public BytesRef valueAt(int index) { - if (count > 0) { - return values.valueAt(index); - } else if (index == 0) { - return missing; + public boolean advanceExact(int doc) throws IOException { + if (values.advanceExact(doc)) { + count = values.docValueCount(); } else { - throw new IndexOutOfBoundsException(); + count = 0; } + // always return true because we want to return a value even if + // the document does not have a value + return true; } @Override - public void setDocument(int docId) { - values.setDocument(docId); - count = values.count(); - } - - @Override - public int count() { + public int docValueCount() { return count == 0 ? 1 : count; } + + @Override + public BytesRef nextValue() throws IOException { + if (count > 0) { + return values.nextValue(); + } else { + return missing; + } + } }; } @@ -109,32 +114,36 @@ public enum MissingValues { } static SortedNumericDocValues replaceMissing(final SortedNumericDocValues values, final long missing) { - return new SortedNumericDocValues() { + return new AbstractSortedNumericDocValues() { private int count; @Override - public void setDocument(int doc) { - values.setDocument(doc); - count = values.count(); - } - - @Override - public long valueAt(int index) { + public long nextValue() throws IOException { if (count > 0) { - return values.valueAt(index); - } else if (index == 0) { - return missing; + return values.nextValue(); } else { - throw new IndexOutOfBoundsException(); + return missing; } } @Override - public int count() { + public int docValueCount() { return count == 0 ? 1 : count; } + @Override + public boolean advanceExact(int doc) throws IOException { + if (values.advanceExact(doc)) { + count = values.docValueCount(); + } else { + count = 0; + } + // always return true because we want to return a value even if + // the document does not have a value + return true; + } + }; } @@ -144,24 +153,28 @@ public enum MissingValues { private int count; @Override - public void setDocument(int doc) { - values.setDocument(doc); - count = values.count(); + public boolean advanceExact(int doc) throws IOException { + if (values.advanceExact(doc)) { + count = values.docValueCount(); + } else { + count = 0; + } + // always return true because we want to return a value even if + // the document does not have a value + return true; } @Override - public double valueAt(int index) { + public double nextValue() throws IOException { if (count > 0) { - return values.valueAt(index); - } else if (index == 0) { - return missing; + return values.nextValue(); } else { - throw new IndexOutOfBoundsException(); + return missing; } } @Override - public int count() { + public int docValueCount() { return count == 0 ? 1 : count; } @@ -177,20 +190,22 @@ public enum MissingValues { } @Override - public RandomAccessOrds ordinalsValues(LeafReaderContext context) { - RandomAccessOrds values = valuesSource.ordinalsValues(context); + public SortedSetDocValues ordinalsValues(LeafReaderContext context) throws IOException { + SortedSetDocValues values = valuesSource.ordinalsValues(context); return replaceMissing(values, missing); } @Override - public RandomAccessOrds globalOrdinalsValues(LeafReaderContext context) { - RandomAccessOrds values = valuesSource.globalOrdinalsValues(context); + public SortedSetDocValues globalOrdinalsValues(LeafReaderContext context) + throws IOException { + SortedSetDocValues values = valuesSource.globalOrdinalsValues(context); return replaceMissing(values, missing); } }; } - static RandomAccessOrds replaceMissing(final RandomAccessOrds values, final BytesRef missing) { + static SortedSetDocValues replaceMissing(final SortedSetDocValues values, + final BytesRef missing) throws IOException { final long missingOrd = values.lookupTerm(missing); if (missingOrd >= 0) { // The value already exists @@ -201,19 +216,15 @@ public enum MissingValues { } } - static RandomAccessOrds replaceMissingOrd(final RandomAccessOrds values, final long missingOrd) { - return new AbstractRandomAccessOrds() { + static SortedSetDocValues replaceMissingOrd(final SortedSetDocValues values, + final long missingOrd) { + return new AbstractSortedSetDocValues() { - private int cardinality = 0; + private boolean hasOrds; + private long nextMissingOrd; @Override - public void doSetDocument(int docID) { - values.setDocument(docID); - cardinality = values.cardinality(); - } - - @Override - public BytesRef lookupOrd(long ord) { + public BytesRef lookupOrd(long ord) throws IOException { return values.lookupOrd(ord); } @@ -223,36 +234,39 @@ public enum MissingValues { } @Override - public long ordAt(int index) { - if (cardinality > 0) { - return values.ordAt(index); - } else if (index == 0) { - return missingOrd; + public long nextOrd() throws IOException { + if (hasOrds) { + return values.nextOrd(); } else { - throw new IndexOutOfBoundsException(); + // we want to return the next missing ord but set this to + // NO_MORE_ORDS so on the next call we indicate there are no + // more values + long ordToReturn = nextMissingOrd; + nextMissingOrd = SortedSetDocValues.NO_MORE_ORDS; + return ordToReturn; } } @Override - public int cardinality() { - return cardinality == 0 ? 1 : cardinality; + public boolean advanceExact(int doc) throws IOException { + hasOrds = values.advanceExact(doc); + nextMissingOrd = missingOrd; + // always return true because we want to return a value even if + // the document does not have a value + return true; } }; } - static RandomAccessOrds insertOrd(final RandomAccessOrds values, final long insertedOrd, final BytesRef missingValue) { - return new AbstractRandomAccessOrds() { + static SortedSetDocValues insertOrd(final SortedSetDocValues values, final long insertedOrd, + final BytesRef missingValue) { + return new AbstractSortedSetDocValues() { - private int cardinality = 0; + private boolean hasOrds; + private long nextMissingOrd; @Override - public void doSetDocument(int docID) { - values.setDocument(docID); - cardinality = values.cardinality(); - } - - @Override - public BytesRef lookupOrd(long ord) { + public BytesRef lookupOrd(long ord) throws IOException { if (ord < insertedOrd) { return values.lookupOrd(ord); } else if (ord > insertedOrd) { @@ -268,24 +282,31 @@ public enum MissingValues { } @Override - public long ordAt(int index) { - if (cardinality > 0) { - final long ord = values.ordAt(index); + public long nextOrd() throws IOException { + if (hasOrds) { + final long ord = values.nextOrd(); if (ord < insertedOrd) { return ord; } else { return ord + 1; } - } else if (index == 0) { - return insertedOrd; } else { - throw new IndexOutOfBoundsException(); + // we want to return the next missing ord but set this to + // NO_MORE_ORDS so on the next call we indicate there are no + // more values + long ordToReturn = nextMissingOrd; + nextMissingOrd = SortedSetDocValues.NO_MORE_ORDS; + return ordToReturn; } } @Override - public int cardinality() { - return cardinality == 0 ? 1 : cardinality; + public boolean advanceExact(int doc) throws IOException { + hasOrds = values.advanceExact(doc); + nextMissingOrd = insertedOrd; + // always return true because we want to return a value even if + // the document does not have a value + return true; } }; } @@ -312,26 +333,30 @@ public enum MissingValues { private int count; @Override - public GeoPoint valueAt(int index) { - if (count > 0) { - return values.valueAt(index); - } else if (index == 0) { - return missing; + public boolean advanceExact(int doc) throws IOException { + if (values.advanceExact(doc)) { + count = values.docValueCount(); } else { - throw new IndexOutOfBoundsException(); + count = 0; } + // always return true because we want to return a value even if + // the document does not have a value + return true; } @Override - public void setDocument(int docId) { - values.setDocument(docId); - count = values.count(); - } - - @Override - public int count() { + public int docValueCount() { return count == 0 ? 1 : count; } + + @Override + public GeoPoint nextValue() throws IOException { + if (count > 0) { + return values.nextValue(); + } else { + return missing; + } + } }; } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSource.java b/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSource.java index f3de52ae653..e6ed81c2d83 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSource.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/support/ValuesSource.java @@ -22,14 +22,15 @@ import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.DocValues; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.RandomAccessOrds; import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Scorer; import org.apache.lucene.util.Bits; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.lucene.ScorerAware; +import org.elasticsearch.index.fielddata.AbstractSortingNumericDocValues; import org.elasticsearch.index.fielddata.AtomicOrdinalsFieldData; import org.elasticsearch.index.fielddata.AtomicParentChildFieldData; import org.elasticsearch.index.fielddata.IndexFieldData; @@ -41,7 +42,6 @@ import org.elasticsearch.index.fielddata.MultiGeoPointValues; import org.elasticsearch.index.fielddata.SortedBinaryDocValues; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; import org.elasticsearch.index.fielddata.SortingBinaryDocValues; -import org.elasticsearch.index.fielddata.SortingNumericDocValues; import org.elasticsearch.index.fielddata.SortingNumericDoubleValues; import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData; import org.elasticsearch.script.LeafSearchScript; @@ -72,11 +72,8 @@ public abstract class ValuesSource { @Override public Bits docsWithValue(LeafReaderContext context) throws IOException { final SortedBinaryDocValues bytes = bytesValues(context); - if (org.elasticsearch.index.fielddata.FieldData.unwrapSingleton(bytes) != null) { - return org.elasticsearch.index.fielddata.FieldData.unwrapSingletonBits(bytes); - } else { - return org.elasticsearch.index.fielddata.FieldData.docsWithValue(bytes, context.reader().maxDoc()); - } + return org.elasticsearch.index.fielddata.FieldData.docsWithValue(bytes, + context.reader().maxDoc()); } public abstract static class WithOrdinals extends Bytes { @@ -84,43 +81,42 @@ public abstract class ValuesSource { public static final WithOrdinals EMPTY = new WithOrdinals() { @Override - public RandomAccessOrds ordinalsValues(LeafReaderContext context) { + public SortedSetDocValues ordinalsValues(LeafReaderContext context) { return DocValues.emptySortedSet(); } @Override - public RandomAccessOrds globalOrdinalsValues(LeafReaderContext context) { + public SortedSetDocValues globalOrdinalsValues(LeafReaderContext context) { return DocValues.emptySortedSet(); } @Override public SortedBinaryDocValues bytesValues(LeafReaderContext context) throws IOException { - return org.elasticsearch.index.fielddata.FieldData.emptySortedBinary(context.reader().maxDoc()); + return org.elasticsearch.index.fielddata.FieldData.emptySortedBinary(); } }; @Override - public Bits docsWithValue(LeafReaderContext context) { - final RandomAccessOrds ordinals = ordinalsValues(context); - if (DocValues.unwrapSingleton(ordinals) != null) { - return DocValues.docsWithValue(DocValues.unwrapSingleton(ordinals), context.reader().maxDoc()); - } else { - return DocValues.docsWithValue(ordinals, context.reader().maxDoc()); - } + public Bits docsWithValue(LeafReaderContext context) throws IOException { + final SortedSetDocValues ordinals = ordinalsValues(context); + return org.elasticsearch.index.fielddata.FieldData.docsWithValue(ordinals, + context.reader().maxDoc()); } - public abstract RandomAccessOrds ordinalsValues(LeafReaderContext context); + public abstract SortedSetDocValues ordinalsValues(LeafReaderContext context) + throws IOException; - public abstract RandomAccessOrds globalOrdinalsValues(LeafReaderContext context); + public abstract SortedSetDocValues globalOrdinalsValues(LeafReaderContext context) + throws IOException; - public long globalMaxOrd(IndexSearcher indexSearcher) { + public long globalMaxOrd(IndexSearcher indexSearcher) throws IOException { IndexReader indexReader = indexSearcher.getIndexReader(); if (indexReader.leaves().isEmpty()) { return 0; } else { LeafReaderContext atomicReaderContext = indexReader.leaves().get(0); - RandomAccessOrds values = globalOrdinalsValues(atomicReaderContext); + SortedSetDocValues values = globalOrdinalsValues(atomicReaderContext); return values.getValueCount(); } } @@ -140,13 +136,13 @@ public abstract class ValuesSource { } @Override - public RandomAccessOrds ordinalsValues(LeafReaderContext context) { + public SortedSetDocValues ordinalsValues(LeafReaderContext context) { final AtomicOrdinalsFieldData atomicFieldData = indexFieldData.load(context); return atomicFieldData.getOrdinalsValues(); } @Override - public RandomAccessOrds globalOrdinalsValues(LeafReaderContext context) { + public SortedSetDocValues globalOrdinalsValues(LeafReaderContext context) { final IndexOrdinalsFieldData global = indexFieldData.loadGlobal((DirectoryReader)context.parent.reader()); final AtomicOrdinalsFieldData atomicFieldData = global.load(context); return atomicFieldData.getOrdinalsValues(); @@ -240,12 +236,12 @@ public abstract class ValuesSource { @Override public SortedNumericDoubleValues doubleValues(LeafReaderContext context) throws IOException { - return org.elasticsearch.index.fielddata.FieldData.emptySortedNumericDoubles(context.reader().maxDoc()); + return org.elasticsearch.index.fielddata.FieldData.emptySortedNumericDoubles(); } @Override public SortedBinaryDocValues bytesValues(LeafReaderContext context) throws IOException { - return org.elasticsearch.index.fielddata.FieldData.emptySortedBinary(context.reader().maxDoc()); + return org.elasticsearch.index.fielddata.FieldData.emptySortedBinary(); } }; @@ -263,18 +259,12 @@ public abstract class ValuesSource { public Bits docsWithValue(LeafReaderContext context) throws IOException { if (isFloatingPoint()) { final SortedNumericDoubleValues values = doubleValues(context); - if (org.elasticsearch.index.fielddata.FieldData.unwrapSingleton(values) != null) { - return org.elasticsearch.index.fielddata.FieldData.unwrapSingletonBits(values); - } else { - return org.elasticsearch.index.fielddata.FieldData.docsWithValue(values, context.reader().maxDoc()); - } + return org.elasticsearch.index.fielddata.FieldData.docsWithValue(values, + context.reader().maxDoc()); } else { final SortedNumericDocValues values = longValues(context); - if (DocValues.unwrapSingleton(values) != null) { - return DocValues.unwrapSingletonBits(values); - } else { - return DocValues.docsWithValue(values, context.reader().maxDoc()); - } + return org.elasticsearch.index.fielddata.FieldData.docsWithValue(values, + context.reader().maxDoc()); } } @@ -313,7 +303,7 @@ public abstract class ValuesSource { return new DoubleValues(delegate.doubleValues(context), script.getLeafSearchScript(context)); } - static class LongValues extends SortingNumericDocValues implements ScorerAware { + static class LongValues extends AbstractSortingNumericDocValues implements ScorerAware { private final SortedNumericDocValues longValues; private final LeafSearchScript script; @@ -324,20 +314,23 @@ public abstract class ValuesSource { } @Override - public void setDocument(int doc) { - longValues.setDocument(doc); - resize(longValues.count()); - script.setDocument(doc); - for (int i = 0; i < count(); ++i) { - script.setNextAggregationValue(longValues.valueAt(i)); - values[i] = script.runAsLong(); - } - sort(); + public void setScorer(Scorer scorer) { + script.setScorer(scorer); } @Override - public void setScorer(Scorer scorer) { - script.setScorer(scorer); + public boolean advanceExact(int target) throws IOException { + if (longValues.advanceExact(target)) { + resize(longValues.docValueCount()); + script.setDocument(target); + for (int i = 0; i < docValueCount(); ++i) { + script.setNextAggregationValue(longValues.nextValue()); + values[i] = script.runAsLong(); + } + sort(); + return true; + } + return false; } } @@ -352,20 +345,23 @@ public abstract class ValuesSource { } @Override - public void setDocument(int doc) { - doubleValues.setDocument(doc); - resize(doubleValues.count()); - script.setDocument(doc); - for (int i = 0; i < count(); ++i) { - script.setNextAggregationValue(doubleValues.valueAt(i)); - values[i] = script.runAsDouble(); - } - sort(); + public void setScorer(Scorer scorer) { + script.setScorer(scorer); } @Override - public void setScorer(Scorer scorer) { - script.setScorer(scorer); + public boolean advanceExact(int target) throws IOException { + if (doubleValues.advanceExact(target)) { + resize(doubleValues.docValueCount()); + script.setDocument(target); + for (int i = 0; i < docValueCount(); ++i) { + script.setNextAggregationValue(doubleValues.nextValue()); + values[i] = script.runAsDouble(); + } + sort(); + return true; + } + return false; } } } @@ -468,21 +464,27 @@ public abstract class ValuesSource { } @Override - public void setDocument(int docId) { - bytesValues.setDocument(docId); - count = bytesValues.count(); - grow(); - for (int i = 0; i < count; ++i) { - final BytesRef value = bytesValues.valueAt(i); - script.setNextAggregationValue(value.utf8ToString()); - values[i].copyChars(script.run().toString()); - } - sort(); + public void setScorer(Scorer scorer) { + script.setScorer(scorer); } @Override - public void setScorer(Scorer scorer) { - script.setScorer(scorer); + public boolean advanceExact(int doc) throws IOException { + if (bytesValues.advanceExact(doc)) { + count = bytesValues.docValueCount(); + grow(); + for (int i = 0; i < count; ++i) { + final BytesRef value = bytesValues.nextValue(); + script.setNextAggregationValue(value.utf8ToString()); + values[i].copyChars(script.run().toString()); + } + sort(); + return true; + } else { + count = 0; + grow(); + return false; + } } } } @@ -493,12 +495,12 @@ public abstract class ValuesSource { @Override public MultiGeoPointValues geoPointValues(LeafReaderContext context) { - return org.elasticsearch.index.fielddata.FieldData.emptyMultiGeoPoints(context.reader().maxDoc()); + return org.elasticsearch.index.fielddata.FieldData.emptyMultiGeoPoints(); } @Override public SortedBinaryDocValues bytesValues(LeafReaderContext context) throws IOException { - return org.elasticsearch.index.fielddata.FieldData.emptySortedBinary(context.reader().maxDoc()); + return org.elasticsearch.index.fielddata.FieldData.emptySortedBinary(); } }; @@ -506,11 +508,8 @@ public abstract class ValuesSource { @Override public Bits docsWithValue(LeafReaderContext context) { final MultiGeoPointValues geoPoints = geoPointValues(context); - if (org.elasticsearch.index.fielddata.FieldData.unwrapSingleton(geoPoints) != null) { - return org.elasticsearch.index.fielddata.FieldData.unwrapSingletonBits(geoPoints); - } else { - return org.elasticsearch.index.fielddata.FieldData.docsWithValue(geoPoints, context.reader().maxDoc()); - } + return org.elasticsearch.index.fielddata.FieldData.docsWithValue(geoPoints, + context.reader().maxDoc()); } public abstract MultiGeoPointValues geoPointValues(LeafReaderContext context); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/support/values/ScriptBytesValues.java b/core/src/main/java/org/elasticsearch/search/aggregations/support/values/ScriptBytesValues.java index ce36fec9416..78685ed0e82 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/support/values/ScriptBytesValues.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/support/values/ScriptBytesValues.java @@ -24,6 +24,7 @@ import org.elasticsearch.index.fielddata.SortedBinaryDocValues; import org.elasticsearch.index.fielddata.SortingBinaryDocValues; import org.elasticsearch.script.LeafSearchScript; +import java.io.IOException; import java.lang.reflect.Array; import java.util.Collection; @@ -48,14 +49,16 @@ public class ScriptBytesValues extends SortingBinaryDocValues implements ScorerA } @Override - public void setDocument(int docId) { - script.setDocument(docId); + public boolean advanceExact(int doc) throws IOException { + script.setDocument(doc); final Object value = script.run(); - if (value == null) { - count = 0; + return false; } else if (value.getClass().isArray()) { count = Array.getLength(value); + if (count == 0) { + return false; + } grow(); for (int i = 0; i < count; ++i) { set(i, Array.get(value, i)); @@ -63,6 +66,9 @@ public class ScriptBytesValues extends SortingBinaryDocValues implements ScorerA } else if (value instanceof Collection) { final Collection coll = (Collection) value; count = coll.size(); + if (count == 0) { + return false; + } grow(); int i = 0; for (Object v : coll) { @@ -73,6 +79,7 @@ public class ScriptBytesValues extends SortingBinaryDocValues implements ScorerA set(0, value); } sort(); + return true; } @Override diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/support/values/ScriptDoubleValues.java b/core/src/main/java/org/elasticsearch/search/aggregations/support/values/ScriptDoubleValues.java index 4f91f59ebe7..619ffde0a1e 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/support/values/ScriptDoubleValues.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/support/values/ScriptDoubleValues.java @@ -25,6 +25,7 @@ import org.elasticsearch.script.LeafSearchScript; import org.elasticsearch.search.aggregations.AggregationExecutionException; import org.joda.time.ReadableInstant; +import java.io.IOException; import java.lang.reflect.Array; import java.util.Collection; @@ -41,12 +42,12 @@ public class ScriptDoubleValues extends SortingNumericDoubleValues implements Sc } @Override - public void setDocument(int docId) { - script.setDocument(docId); + public boolean advanceExact(int target) throws IOException { + script.setDocument(target); final Object value = script.run(); if (value == null) { - resize(0); + return false; } else if (value instanceof Number) { resize(1); values[0] = ((Number) value).doubleValue(); @@ -54,23 +55,32 @@ public class ScriptDoubleValues extends SortingNumericDoubleValues implements Sc resize(1); values[0] = ((ReadableInstant) value).getMillis(); } else if (value.getClass().isArray()) { - resize(Array.getLength(value)); - for (int i = 0; i < count(); ++i) { + int length = Array.getLength(value); + if (length == 0) { + return false; + } + resize(length); + for (int i = 0; i < length; ++i) { values[i] = toDoubleValue(Array.get(value, i)); } } else if (value instanceof Collection) { - resize(((Collection) value).size()); + Collection coll = (Collection) value; + if (coll.isEmpty()) { + return false; + } + resize(coll.size()); int i = 0; - for (Object v : (Collection) value) { + for (Object v : coll) { values[i++] = toDoubleValue(v); } - assert i == count(); + assert i == docValueCount(); } else { resize(1); values[0] = toDoubleValue(value); } sort(); + return true; } private static double toDoubleValue(Object o) { diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/support/values/ScriptLongValues.java b/core/src/main/java/org/elasticsearch/search/aggregations/support/values/ScriptLongValues.java index cd515401c52..6247e96c7ec 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/support/values/ScriptLongValues.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/support/values/ScriptLongValues.java @@ -21,11 +21,12 @@ package org.elasticsearch.search.aggregations.support.values; import org.apache.lucene.search.Scorer; import org.apache.lucene.util.LongValues; import org.elasticsearch.common.lucene.ScorerAware; -import org.elasticsearch.index.fielddata.SortingNumericDocValues; +import org.elasticsearch.index.fielddata.AbstractSortingNumericDocValues; import org.elasticsearch.script.LeafSearchScript; import org.elasticsearch.search.aggregations.AggregationExecutionException; import org.joda.time.ReadableInstant; +import java.io.IOException; import java.lang.reflect.Array; import java.util.Collection; import java.util.Iterator; @@ -33,7 +34,7 @@ import java.util.Iterator; /** * {@link LongValues} implementation which is based on a script */ -public class ScriptLongValues extends SortingNumericDocValues implements ScorerAware { +public class ScriptLongValues extends AbstractSortingNumericDocValues implements ScorerAware { final LeafSearchScript script; @@ -43,28 +44,36 @@ public class ScriptLongValues extends SortingNumericDocValues implements ScorerA } @Override - public void setDocument(int docId) { - script.setDocument(docId); + public boolean advanceExact(int target) throws IOException { + script.setDocument(target); final Object value = script.run(); if (value == null) { - resize(0); + return false; } else if (value.getClass().isArray()) { - resize(Array.getLength(value)); - for (int i = 0; i < count(); ++i) { + int length = Array.getLength(value); + if (length == 0) { + return false; + } + resize(length); + for (int i = 0; i < length; ++i) { values[i] = toLongValue(Array.get(value, i)); } } else if (value instanceof Collection) { - resize(((Collection) value).size()); + Collection coll = (Collection) value; + if (coll.isEmpty()) { + return false; + } + resize(coll.size()); int i = 0; - for (Iterator it = ((Collection) value).iterator(); it.hasNext(); ++i) { + for (Iterator it = coll.iterator(); it.hasNext(); ++i) { values[i] = toLongValue(it.next()); } - assert i == count(); + assert i == docValueCount(); } else { @@ -73,6 +82,7 @@ public class ScriptLongValues extends SortingNumericDocValues implements ScorerA } sort(); + return true; } private static long toLongValue(Object o) { diff --git a/core/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java b/core/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java index 97f2681252b..e5bfcfa6df5 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java @@ -132,41 +132,41 @@ public class FetchPhase implements SearchPhase { } } - SearchHit[] hits = new SearchHit[context.docIdsToLoadSize()]; - FetchSubPhase.HitContext hitContext = new FetchSubPhase.HitContext(); - for (int index = 0; index < context.docIdsToLoadSize(); index++) { - if(context.isCancelled()) { - throw new TaskCancelledException("cancelled"); - } - int docId = context.docIdsToLoad()[context.docIdsToLoadFrom() + index]; - int readerIndex = ReaderUtil.subIndex(docId, context.searcher().getIndexReader().leaves()); - LeafReaderContext subReaderContext = context.searcher().getIndexReader().leaves().get(readerIndex); - int subDocId = docId - subReaderContext.docBase; + try { + SearchHit[] hits = new SearchHit[context.docIdsToLoadSize()]; + FetchSubPhase.HitContext hitContext = new FetchSubPhase.HitContext(); + for (int index = 0; index < context.docIdsToLoadSize(); index++) { + if(context.isCancelled()) { + throw new TaskCancelledException("cancelled"); + } + int docId = context.docIdsToLoad()[context.docIdsToLoadFrom() + index]; + int readerIndex = ReaderUtil.subIndex(docId, context.searcher().getIndexReader().leaves()); + LeafReaderContext subReaderContext = context.searcher().getIndexReader().leaves().get(readerIndex); + int subDocId = docId - subReaderContext.docBase; - final SearchHit searchHit; - try { + final SearchHit searchHit; int rootDocId = findRootDocumentIfNested(context, subReaderContext, subDocId); if (rootDocId != -1) { searchHit = createNestedSearchHit(context, docId, subDocId, rootDocId, fieldNames, fieldNamePatterns, subReaderContext); } else { searchHit = createSearchHit(context, fieldsVisitor, docId, subDocId, subReaderContext); } - } catch (IOException e) { - throw ExceptionsHelper.convertToElastic(e); + + hits[index] = searchHit; + hitContext.reset(searchHit, subReaderContext, subDocId, context.searcher()); + for (FetchSubPhase fetchSubPhase : fetchSubPhases) { + fetchSubPhase.hitExecute(context, hitContext); + } } - hits[index] = searchHit; - hitContext.reset(searchHit, subReaderContext, subDocId, context.searcher()); for (FetchSubPhase fetchSubPhase : fetchSubPhases) { - fetchSubPhase.hitExecute(context, hitContext); + fetchSubPhase.hitsExecute(context, hits); } - } - for (FetchSubPhase fetchSubPhase : fetchSubPhases) { - fetchSubPhase.hitsExecute(context, hits); + context.fetchResult().hits(new SearchHits(hits, context.queryResult().getTotalHits(), context.queryResult().getMaxScore())); + } catch (IOException e) { + throw ExceptionsHelper.convertToElastic(e); } - - context.fetchResult().hits(new SearchHits(hits, context.queryResult().getTotalHits(), context.queryResult().getMaxScore())); } private int findRootDocumentIfNested(SearchContext context, LeafReaderContext subReaderContext, int subDocId) throws IOException { diff --git a/core/src/main/java/org/elasticsearch/search/fetch/FetchSubPhase.java b/core/src/main/java/org/elasticsearch/search/fetch/FetchSubPhase.java index 6f34eba2129..84154926bf6 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/FetchSubPhase.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/FetchSubPhase.java @@ -25,6 +25,7 @@ import org.apache.lucene.search.IndexSearcher; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.internal.SearchContext; +import java.io.IOException; import java.util.HashMap; import java.util.Map; @@ -79,8 +80,8 @@ public interface FetchSubPhase { /** * Executes the hit level phase, with a reader and doc id (note, its a low level reader, and the matching doc). */ - default void hitExecute(SearchContext context, HitContext hitContext) {} + default void hitExecute(SearchContext context, HitContext hitContext) throws IOException {} - default void hitsExecute(SearchContext context, SearchHit[] hits) {} + default void hitsExecute(SearchContext context, SearchHit[] hits) throws IOException {} } diff --git a/core/src/main/java/org/elasticsearch/search/fetch/subphase/DocValueFieldsFetchSubPhase.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/DocValueFieldsFetchSubPhase.java index 031f7a62400..42cee23d390 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/subphase/DocValueFieldsFetchSubPhase.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/DocValueFieldsFetchSubPhase.java @@ -25,6 +25,7 @@ import org.elasticsearch.search.SearchHitField; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.internal.SearchContext; +import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; @@ -37,7 +38,7 @@ import java.util.HashMap; public final class DocValueFieldsFetchSubPhase implements FetchSubPhase { @Override - public void hitExecute(SearchContext context, HitContext hitContext) { + public void hitExecute(SearchContext context, HitContext hitContext) throws IOException { if (context.collapse() != null) { // retrieve the `doc_value` associated with the collapse field String name = context.collapse().getFieldType().name(); diff --git a/core/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsContext.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsContext.java index 4edde7f9bc6..aa4168bd040 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsContext.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/InnerHitsContext.java @@ -130,13 +130,9 @@ public final class InnerHitsContext { return new TopDocs(context.searcher().count(q), Lucene.EMPTY_SCORE_DOCS, 0); } else { int topN = Math.min(from() + size(), context.searcher().getIndexReader().maxDoc()); - TopDocsCollector topDocsCollector; + TopDocsCollector topDocsCollector; if (sort() != null) { - try { - topDocsCollector = TopFieldCollector.create(sort().sort, topN, true, trackScores(), trackScores()); - } catch (IOException e) { - throw ExceptionsHelper.convertToElastic(e); - } + topDocsCollector = TopFieldCollector.create(sort().sort, topN, true, trackScores(), trackScores()); } else { topDocsCollector = TopScoreDocCollector.create(topN); } @@ -148,7 +144,6 @@ public final class InnerHitsContext { return topDocsCollector.topDocs(from(), size()); } } - } public static final class ParentChildInnerHits extends BaseInnerHits { diff --git a/core/src/main/java/org/elasticsearch/search/fetch/subphase/ParentFieldSubFetchPhase.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/ParentFieldSubFetchPhase.java index 00b96867026..0ffef32e427 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/subphase/ParentFieldSubFetchPhase.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/ParentFieldSubFetchPhase.java @@ -62,11 +62,11 @@ public final class ParentFieldSubFetchPhase implements FetchSubPhase { public static String getParentId(ParentFieldMapper fieldMapper, LeafReader reader, int docId) { try { SortedDocValues docValues = reader.getSortedDocValues(fieldMapper.name()); - if (docValues == null) { + if (docValues == null || docValues.advanceExact(docId) == false) { // hit has no _parent field. return null; } - BytesRef parentId = docValues.get(docId); + BytesRef parentId = docValues.binaryValue(); return parentId.length > 0 ? parentId.utf8ToString() : null; } catch (IOException e) { throw ExceptionsHelper.convertToElastic(e); diff --git a/core/src/main/java/org/elasticsearch/search/fetch/subphase/VersionFetchSubPhase.java b/core/src/main/java/org/elasticsearch/search/fetch/subphase/VersionFetchSubPhase.java index 1ce102e364b..5f69230ca42 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/subphase/VersionFetchSubPhase.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/subphase/VersionFetchSubPhase.java @@ -38,8 +38,8 @@ public final class VersionFetchSubPhase implements FetchSubPhase { long version = Versions.NOT_FOUND; try { NumericDocValues versions = hitContext.reader().getNumericDocValues(VersionFieldMapper.NAME); - if (versions != null) { - version = versions.get(hitContext.docId()); + if (versions != null && versions.advanceExact(hitContext.docId())) { + version = versions.longValue(); } } catch (IOException e) { throw new ElasticsearchException("Could not retrieve version", e); diff --git a/core/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java b/core/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java index 8d33140e3ee..d3b1951846c 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java +++ b/core/src/main/java/org/elasticsearch/search/internal/ContextIndexSearcher.java @@ -110,7 +110,7 @@ public class ContextIndexSearcher extends IndexSearcher implements Releasable { } @Override - public Weight createWeight(Query query, boolean needsScores) throws IOException { + public Weight createWeight(Query query, boolean needsScores, float boost) throws IOException { if (profiler != null) { // createWeight() is called for each query in the tree, so we tell the queryProfiler // each invocation so that it can build an internal representation of the query @@ -119,7 +119,7 @@ public class ContextIndexSearcher extends IndexSearcher implements Releasable { profile.startTime(QueryTimingType.CREATE_WEIGHT); final Weight weight; try { - weight = super.createWeight(query, needsScores); + weight = super.createWeight(query, needsScores, boost); } finally { profile.stopAndRecordTime(); profiler.pollLastElement(); @@ -127,7 +127,7 @@ public class ContextIndexSearcher extends IndexSearcher implements Releasable { return new ProfileWeight(query, weight, profile); } else { // needs to be 'super', not 'in' in order to use aggregated DFS - return super.createWeight(query, needsScores); + return super.createWeight(query, needsScores, boost); } } diff --git a/core/src/main/java/org/elasticsearch/search/lookup/LeafDocLookup.java b/core/src/main/java/org/elasticsearch/search/lookup/LeafDocLookup.java index 6ce060c493e..f02cf14f4ba 100644 --- a/core/src/main/java/org/elasticsearch/search/lookup/LeafDocLookup.java +++ b/core/src/main/java/org/elasticsearch/search/lookup/LeafDocLookup.java @@ -19,12 +19,14 @@ package org.elasticsearch.search.lookup; import org.apache.lucene.index.LeafReaderContext; +import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.Nullable; import org.elasticsearch.index.fielddata.IndexFieldDataService; import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperService; +import java.io.IOException; import java.security.AccessController; import java.security.PrivilegedAction; import java.util.Arrays; @@ -86,7 +88,11 @@ public class LeafDocLookup implements Map> { }); localCacheFieldData.put(fieldName, scriptValues); } - scriptValues.setNextDocId(docId); + try { + scriptValues.setNextDocId(docId); + } catch (IOException e) { + throw ExceptionsHelper.convertToElastic(e); + } return scriptValues; } diff --git a/core/src/main/java/org/elasticsearch/search/profile/query/ProfileWeight.java b/core/src/main/java/org/elasticsearch/search/profile/query/ProfileWeight.java index 9ca33f84542..716f68da86f 100644 --- a/core/src/main/java/org/elasticsearch/search/profile/query/ProfileWeight.java +++ b/core/src/main/java/org/elasticsearch/search/profile/query/ProfileWeight.java @@ -79,16 +79,6 @@ public final class ProfileWeight extends Weight { return subQueryWeight.explain(context, doc); } - @Override - public float getValueForNormalization() throws IOException { - return subQueryWeight.getValueForNormalization(); - } - - @Override - public void normalize(float norm, float topLevelBoost) { - subQueryWeight.normalize(norm, topLevelBoost); - } - @Override public void extractTerms(Set set) { subQueryWeight.extractTerms(set); diff --git a/core/src/main/java/org/elasticsearch/search/slice/DocValuesSliceQuery.java b/core/src/main/java/org/elasticsearch/search/slice/DocValuesSliceQuery.java index 95cafacedde..796bcb2dac8 100644 --- a/core/src/main/java/org/elasticsearch/search/slice/DocValuesSliceQuery.java +++ b/core/src/main/java/org/elasticsearch/search/slice/DocValuesSliceQuery.java @@ -23,10 +23,13 @@ import com.carrotsearch.hppc.BitMixer; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.DocValues; import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.search.ConstantScoreScorer; +import org.apache.lucene.search.ConstantScoreWeight; +import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.TwoPhaseIterator; import org.apache.lucene.search.Weight; -import org.apache.lucene.search.RandomAccessWeight; -import org.apache.lucene.util.Bits; import java.io.IOException; @@ -42,27 +45,38 @@ public final class DocValuesSliceQuery extends SliceQuery { } @Override - public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { - return new RandomAccessWeight(this) { + public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException { + return new ConstantScoreWeight(this, boost) { + @Override - protected Bits getMatchingDocs(final LeafReaderContext context) throws IOException { + public Scorer scorer(LeafReaderContext context) throws IOException { final SortedNumericDocValues values = DocValues.getSortedNumeric(context.reader(), getField()); - return new Bits() { + final DocIdSetIterator approximation = DocIdSetIterator.all(context.reader().maxDoc()); + final TwoPhaseIterator twoPhase = new TwoPhaseIterator(approximation) { + @Override - public boolean get(int doc) { - values.setDocument(doc); - for (int i = 0; i < values.count(); i++) { - return contains(BitMixer.mix(values.valueAt(i))); + public boolean matches() throws IOException { + if (values.advanceExact(approximation.docID())) { + for (int i = 0; i < values.docValueCount(); i++) { + if (contains(BitMixer.mix(values.nextValue()))) { + return true; + } + } + return false; + } else { + return contains(0); } - return contains(0); } @Override - public int length() { - return context.reader().maxDoc(); + public float matchCost() { + // BitMixer.mix seems to be about 10 ops + return 10; } }; + return new ConstantScoreScorer(this, score(), twoPhase); } + }; } } diff --git a/core/src/main/java/org/elasticsearch/search/slice/TermsSliceQuery.java b/core/src/main/java/org/elasticsearch/search/slice/TermsSliceQuery.java index ddc02d32e55..bb6908c99fd 100644 --- a/core/src/main/java/org/elasticsearch/search/slice/TermsSliceQuery.java +++ b/core/src/main/java/org/elasticsearch/search/slice/TermsSliceQuery.java @@ -55,8 +55,8 @@ public final class TermsSliceQuery extends SliceQuery { } @Override - public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { - return new ConstantScoreWeight(this) { + public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException { + return new ConstantScoreWeight(this, boost) { @Override public Scorer scorer(LeafReaderContext context) throws IOException { final DocIdSet disi = build(context.reader()); diff --git a/core/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java b/core/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java index ec4e19d1eea..2901e05f051 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/sort/ScriptSortBuilder.java @@ -32,6 +32,7 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ObjectParser.ValueType; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.fielddata.AbstractBinaryDocValues; import org.elasticsearch.index.fielddata.FieldData; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; @@ -261,16 +262,20 @@ public class ScriptSortBuilder extends SortBuilder { @Override protected SortedBinaryDocValues getValues(LeafReaderContext context) throws IOException { leafScript = searchScript.getLeafSearchScript(context); - final BinaryDocValues values = new BinaryDocValues() { + final BinaryDocValues values = new AbstractBinaryDocValues() { final BytesRefBuilder spare = new BytesRefBuilder(); @Override - public BytesRef get(int docID) { - leafScript.setDocument(docID); + public boolean advanceExact(int doc) throws IOException { + leafScript.setDocument(doc); + return true; + } + @Override + public BytesRef binaryValue() { spare.copyChars(leafScript.run().toString()); return spare.get(); } }; - return FieldData.singleton(values, null); + return FieldData.singleton(values); } @Override protected void setScorer(Scorer scorer) { @@ -286,12 +291,16 @@ public class ScriptSortBuilder extends SortBuilder { leafScript = searchScript.getLeafSearchScript(context); final NumericDoubleValues values = new NumericDoubleValues() { @Override - public double get(int docID) { - leafScript.setDocument(docID); + public boolean advanceExact(int doc) throws IOException { + leafScript.setDocument(doc); + return false; + } + @Override + public double doubleValue() { return leafScript.runAsDouble(); } }; - return FieldData.singleton(values, null); + return FieldData.singleton(values); } @Override protected void setScorer(Scorer scorer) { diff --git a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java index abc8d209cd3..0b127b2eeef 100644 --- a/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java +++ b/core/src/main/java/org/elasticsearch/search/suggest/completion/CompletionSuggester.java @@ -83,7 +83,7 @@ public class CompletionSuggester extends Suggester private static void suggest(IndexSearcher searcher, CompletionQuery query, TopSuggestDocsCollector collector) throws IOException { query = (CompletionQuery) query.rewrite(searcher.getIndexReader()); - Weight weight = query.createWeight(searcher, collector.needsScores()); + Weight weight = query.createWeight(searcher, collector.needsScores(), 1f); for (LeafReaderContext context : searcher.getIndexReader().leaves()) { BulkScorer scorer = weight.bulkScorer(context); if (scorer != null) { @@ -185,7 +185,7 @@ public class CompletionSuggester extends Suggester private final Map scoreDocMap; // TODO: expose dup removal - + TopDocumentsCollector(int num) { super(1, false); // TODO hack, we don't use the underlying pq, so we allocate a size of 1 this.num = num; diff --git a/core/src/main/resources/org/elasticsearch/bootstrap/security.policy b/core/src/main/resources/org/elasticsearch/bootstrap/security.policy index 5c9c56de182..6d28944680e 100644 --- a/core/src/main/resources/org/elasticsearch/bootstrap/security.policy +++ b/core/src/main/resources/org/elasticsearch/bootstrap/security.policy @@ -31,7 +31,7 @@ grant codeBase "${codebase.securesm-1.1.jar}" { //// Very special jar permissions: //// These are dangerous permissions that we don't want to grant to everything. -grant codeBase "${codebase.lucene-core-6.5.0.jar}" { +grant codeBase "${codebase.lucene-core-7.0.0-snapshot-89f6d17.jar}" { // needed to allow MMapDirectory's "unmap hack" (die unmap hack, die) // java 8 package permission java.lang.RuntimePermission "accessClassInPackage.sun.misc"; @@ -42,7 +42,7 @@ grant codeBase "${codebase.lucene-core-6.5.0.jar}" { permission java.lang.RuntimePermission "accessDeclaredMembers"; }; -grant codeBase "${codebase.lucene-misc-6.5.0.jar}" { +grant codeBase "${codebase.lucene-misc-7.0.0-snapshot-89f6d17.jar}" { // needed to allow shard shrinking to use hard-links if possible via lucenes HardlinkCopyDirectoryWrapper permission java.nio.file.LinkPermission "hard"; }; diff --git a/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy b/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy index 2c39ccb350e..9a089ef2810 100644 --- a/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy +++ b/core/src/main/resources/org/elasticsearch/bootstrap/test-framework.policy @@ -33,7 +33,7 @@ grant codeBase "${codebase.securemock-1.2.jar}" { permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; }; -grant codeBase "${codebase.lucene-test-framework-6.5.0.jar}" { +grant codeBase "${codebase.lucene-test-framework-7.0.0-snapshot-89f6d17.jar}" { // needed by RamUsageTester permission java.lang.reflect.ReflectPermission "suppressAccessChecks"; // needed for testing hardlinks in StoreRecoveryTests since we install MockFS diff --git a/core/src/test/java/org/apache/lucene/queries/BlendedTermQueryTests.java b/core/src/test/java/org/apache/lucene/queries/BlendedTermQueryTests.java index 3c727ddf6e3..b0477169567 100644 --- a/core/src/test/java/org/apache/lucene/queries/BlendedTermQueryTests.java +++ b/core/src/test/java/org/apache/lucene/queries/BlendedTermQueryTests.java @@ -85,7 +85,7 @@ public class BlendedTermQueryTests extends ESTestCase { { Term[] terms = new Term[]{new Term("firstname", "simon"), new Term("surname", "simon")}; - BlendedTermQuery query = BlendedTermQuery.booleanBlendedQuery(terms, true); + BlendedTermQuery query = BlendedTermQuery.booleanBlendedQuery(terms); TopDocs search = searcher.search(query, 3); ScoreDoc[] scoreDocs = search.scoreDocs; assertEquals(3, scoreDocs.length); @@ -93,7 +93,6 @@ public class BlendedTermQueryTests extends ESTestCase { } { BooleanQuery.Builder query = new BooleanQuery.Builder(); - query.setDisableCoord(true); query.add(new TermQuery(new Term("firstname", "simon")), BooleanClause.Occur.SHOULD); query.add(new TermQuery(new Term("surname", "simon")), BooleanClause.Occur.SHOULD); TopDocs search = searcher.search(query.build(), 1); @@ -146,7 +145,6 @@ public class BlendedTermQueryTests extends ESTestCase { { String[] fields = new String[]{"username", "song"}; BooleanQuery.Builder query = new BooleanQuery.Builder(); - query.setDisableCoord(true); query.add(BlendedTermQuery.dismaxBlendedQuery(toTerms(fields, "foo"), 0.1f), BooleanClause.Occur.SHOULD); query.add(BlendedTermQuery.dismaxBlendedQuery(toTerms(fields, "fighters"), 0.1f), BooleanClause.Occur.SHOULD); query.add(BlendedTermQuery.dismaxBlendedQuery(toTerms(fields, "generator"), 0.1f), BooleanClause.Occur.SHOULD); @@ -156,7 +154,6 @@ public class BlendedTermQueryTests extends ESTestCase { } { BooleanQuery.Builder query = new BooleanQuery.Builder(); - query.setDisableCoord(true); DisjunctionMaxQuery uname = new DisjunctionMaxQuery( Arrays.asList(new TermQuery(new Term("username", "foo")), new TermQuery(new Term("song", "foo"))), 0.0f); @@ -186,13 +183,12 @@ public class BlendedTermQueryTests extends ESTestCase { } String term = TestUtil.randomRealisticUnicodeString(random(), 1, 10); Term[] terms = toTerms(fields, term); - boolean disableCoord = random().nextBoolean(); boolean useBoolean = random().nextBoolean(); float tieBreaker = random().nextFloat(); - BlendedTermQuery query = useBoolean ? BlendedTermQuery.booleanBlendedQuery(terms, disableCoord) : BlendedTermQuery.dismaxBlendedQuery(terms, tieBreaker); + BlendedTermQuery query = useBoolean ? BlendedTermQuery.booleanBlendedQuery(terms) : BlendedTermQuery.dismaxBlendedQuery(terms, tieBreaker); QueryUtils.check(query); terms = toTerms(fields, term); - BlendedTermQuery query2 = useBoolean ? BlendedTermQuery.booleanBlendedQuery(terms, disableCoord) : BlendedTermQuery.dismaxBlendedQuery(terms, tieBreaker); + BlendedTermQuery query2 = useBoolean ? BlendedTermQuery.booleanBlendedQuery(terms) : BlendedTermQuery.dismaxBlendedQuery(terms, tieBreaker); assertEquals(query, query2); } } @@ -222,7 +218,7 @@ public class BlendedTermQueryTests extends ESTestCase { } BlendedTermQuery blendedTermQuery = random().nextBoolean() ? BlendedTermQuery.dismaxBlendedQuery(terms.toArray(new Term[0]), random().nextFloat()) : - BlendedTermQuery.booleanBlendedQuery(terms.toArray(new Term[0]), random().nextBoolean()); + BlendedTermQuery.booleanBlendedQuery(terms.toArray(new Term[0])); Set extracted = new HashSet<>(); IndexSearcher searcher = new IndexSearcher(new MultiReader()); searcher.createNormalizedWeight(blendedTermQuery, false).extractTerms(extracted); diff --git a/core/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java b/core/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java index 33a0b855a7a..a1132647c7e 100644 --- a/core/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java +++ b/core/src/test/java/org/elasticsearch/common/lucene/LuceneTests.java @@ -28,7 +28,6 @@ import org.apache.lucene.document.TextField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; -import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NoDeletionPolicy; import org.apache.lucene.index.NoMergePolicy; @@ -375,7 +374,7 @@ public class LuceneTests extends ESTestCase { try (DirectoryReader reader = DirectoryReader.open(w)) { IndexSearcher searcher = newSearcher(reader); - Weight termWeight = new TermQuery(new Term("foo", "bar")).createWeight(searcher, false); + Weight termWeight = new TermQuery(new Term("foo", "bar")).createWeight(searcher, false, 1f); assertEquals(1, reader.leaves().size()); LeafReaderContext leafReaderContext = searcher.getIndexReader().leaves().get(0); Bits bits = Lucene.asSequentialAccessBits(leafReaderContext.reader().maxDoc(), termWeight.scorer(leafReaderContext)); diff --git a/core/src/test/java/org/elasticsearch/common/lucene/ShardCoreKeyMapTests.java b/core/src/test/java/org/elasticsearch/common/lucene/ShardCoreKeyMapTests.java index 5cba06741a9..ca24a1346fe 100644 --- a/core/src/test/java/org/elasticsearch/common/lucene/ShardCoreKeyMapTests.java +++ b/core/src/test/java/org/elasticsearch/common/lucene/ShardCoreKeyMapTests.java @@ -112,7 +112,7 @@ public class ShardCoreKeyMapTests extends ESTestCase { assertEquals(3, map.size()); for (LeafReaderContext ctx : reader2.leaves()) { - assertEquals(shardId2, map.getShardId(ctx.reader().getCoreCacheKey())); + assertEquals(shardId2, map.getShardId(ctx.reader().getCoreCacheHelper().getKey())); } w1.addDocument(new Document()); @@ -136,7 +136,7 @@ public class ShardCoreKeyMapTests extends ESTestCase { final Set index1Keys = new HashSet<>(); for (DirectoryReader reader : Arrays.asList(reader1, reader2)) { for (LeafReaderContext ctx : reader.leaves()) { - index1Keys.add(ctx.reader().getCoreCacheKey()); + index1Keys.add(ctx.reader().getCoreCacheHelper().getKey()); } } index1Keys.removeAll(map.getCoreKeysForIndex("index1")); diff --git a/core/src/test/java/org/elasticsearch/common/lucene/index/ESDirectoryReaderTests.java b/core/src/test/java/org/elasticsearch/common/lucene/index/ESDirectoryReaderTests.java index 817dabfece3..397dd284604 100644 --- a/core/src/test/java/org/elasticsearch/common/lucene/index/ESDirectoryReaderTests.java +++ b/core/src/test/java/org/elasticsearch/common/lucene/index/ESDirectoryReaderTests.java @@ -29,7 +29,6 @@ import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.Term; import org.apache.lucene.store.Directory; import org.apache.lucene.util.IOUtils; -import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.ESTestCase; @@ -66,7 +65,7 @@ public class ESDirectoryReaderTests extends ESTestCase { // we should have the same cache key as before assertEquals(1, ir2.numDocs()); assertEquals(1, ir2.leaves().size()); - assertSame(ir.leaves().get(0).reader().getCoreCacheKey(), ir2.leaves().get(0).reader().getCoreCacheKey()); + assertSame(ir.leaves().get(0).reader().getCoreCacheHelper().getKey(), ir2.leaves().get(0).reader().getCoreCacheHelper().getKey()); IOUtils.close(ir, ir2, iw, dir); } } diff --git a/core/src/test/java/org/elasticsearch/deps/lucene/SimpleLuceneTests.java b/core/src/test/java/org/elasticsearch/deps/lucene/SimpleLuceneTests.java index 2b08213ab43..6b117fc7e96 100644 --- a/core/src/test/java/org/elasticsearch/deps/lucene/SimpleLuceneTests.java +++ b/core/src/test/java/org/elasticsearch/deps/lucene/SimpleLuceneTests.java @@ -21,24 +21,19 @@ package org.elasticsearch.deps.lucene; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; -import org.apache.lucene.document.FieldType; -import org.apache.lucene.document.LegacyIntField; +import org.apache.lucene.document.IntPoint; import org.apache.lucene.document.SortedDocValuesField; +import org.apache.lucene.document.StoredField; import org.apache.lucene.document.TextField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.FieldInfo; -import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.IndexableField; -import org.apache.lucene.index.LeafReader; -import org.apache.lucene.index.PostingsEnum; -import org.apache.lucene.index.SlowCompositeReaderWrapper; +import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.StoredFieldVisitor; import org.apache.lucene.index.Term; -import org.apache.lucene.index.Terms; -import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; @@ -50,8 +45,6 @@ import org.apache.lucene.search.TopFieldDocs; import org.apache.lucene.store.Directory; import org.apache.lucene.store.RAMDirectory; import org.apache.lucene.util.BytesRef; -import org.apache.lucene.util.BytesRefBuilder; -import org.apache.lucene.util.LegacyNumericUtils; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.test.ESTestCase; @@ -71,7 +64,7 @@ public class SimpleLuceneTests extends ESTestCase { document.add(new SortedDocValuesField("str", new BytesRef(text))); indexWriter.addDocument(document); } - IndexReader reader = SlowCompositeReaderWrapper.wrap(DirectoryReader.open(indexWriter)); + IndexReader reader = DirectoryReader.open(indexWriter); IndexSearcher searcher = new IndexSearcher(reader); TopFieldDocs docs = searcher.search(new MatchAllDocsQuery(), 10, new Sort(new SortField("str", SortField.Type.STRING))); for (int i = 0; i < 10; i++) { @@ -86,7 +79,8 @@ public class SimpleLuceneTests extends ESTestCase { Document document = new Document(); document.add(new TextField("_id", "1", Field.Store.YES)); - document.add(new LegacyIntField("test", 2, LegacyIntField.TYPE_STORED)); + document.add(new IntPoint("test", 2)); + document.add(new StoredField("test", 2)); indexWriter.addDocument(document); IndexReader reader = DirectoryReader.open(indexWriter); @@ -94,11 +88,9 @@ public class SimpleLuceneTests extends ESTestCase { TopDocs topDocs = searcher.search(new TermQuery(new Term("_id", "1")), 1); Document doc = searcher.doc(topDocs.scoreDocs[0].doc); IndexableField f = doc.getField("test"); - assertThat(f.stringValue(), equalTo("2")); + assertThat(f.numericValue(), equalTo(2)); - BytesRefBuilder bytes = new BytesRefBuilder(); - LegacyNumericUtils.intToPrefixCoded(2, 0, bytes); - topDocs = searcher.search(new TermQuery(new Term("test", bytes.get())), 1); + topDocs = searcher.search(IntPoint.newExactQuery("test", 2), 1); doc = searcher.doc(topDocs.scoreDocs[0].doc); f = doc.getField("test"); assertThat(f.stringValue(), equalTo("2")); @@ -139,40 +131,6 @@ public class SimpleLuceneTests extends ESTestCase { indexWriter.close(); } - public void testBoost() throws Exception { - Directory dir = new RAMDirectory(); - IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER)); - - for (int i = 0; i < 100; i++) { - // TODO (just setting the boost value does not seem to work...) - StringBuilder value = new StringBuilder().append("value"); - for (int j = 0; j < i; j++) { - value.append(" ").append("value"); - } - Document document = new Document(); - TextField textField = new TextField("_id", Integer.toString(i), Field.Store.YES); - textField.setBoost(i); - document.add(textField); - textField = new TextField("value", value.toString(), Field.Store.YES); - textField.setBoost(i); - document.add(textField); - indexWriter.addDocument(document); - } - - IndexReader reader = DirectoryReader.open(indexWriter); - IndexSearcher searcher = new IndexSearcher(reader); - TermQuery query = new TermQuery(new Term("value", "value")); - TopDocs topDocs = searcher.search(query, 100); - assertThat(100, equalTo(topDocs.totalHits)); - for (int i = 0; i < topDocs.scoreDocs.length; i++) { - Document doc = searcher.doc(topDocs.scoreDocs[i].doc); -// System.out.println(doc.get("id") + ": " + searcher.explain(query, topDocs.scoreDocs[i].doc)); - assertThat(doc.get("_id"), equalTo(Integer.toString(100 - i - 1))); - } - - indexWriter.close(); - } - public void testNRTSearchOnClosedWriter() throws Exception { Directory dir = new RAMDirectory(); IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER)); @@ -181,7 +139,6 @@ public class SimpleLuceneTests extends ESTestCase { for (int i = 0; i < 100; i++) { Document document = new Document(); TextField field = new TextField("_id", Integer.toString(i), Field.Store.YES); - field.setBoost(i); document.add(field); indexWriter.addDocument(document); } @@ -189,60 +146,9 @@ public class SimpleLuceneTests extends ESTestCase { indexWriter.close(); - TermsEnum termDocs = SlowCompositeReaderWrapper.wrap(reader).terms("_id").iterator(); - termDocs.next(); - } - - /** - * A test just to verify that term freqs are not stored for numeric fields. int1 is not storing termFreq - * and int2 does. - */ - public void testNumericTermDocsFreqs() throws Exception { - Directory dir = new RAMDirectory(); - IndexWriter indexWriter = new IndexWriter(dir, new IndexWriterConfig(Lucene.STANDARD_ANALYZER)); - - Document doc = new Document(); - FieldType type = LegacyIntField.TYPE_NOT_STORED; - LegacyIntField field = new LegacyIntField("int1", 1, type); - doc.add(field); - - type = new FieldType(LegacyIntField.TYPE_NOT_STORED); - type.setIndexOptions(IndexOptions.DOCS_AND_FREQS); - type.freeze(); - - field = new LegacyIntField("int1", 1, type); - doc.add(field); - - field = new LegacyIntField("int2", 1, type); - doc.add(field); - - field = new LegacyIntField("int2", 1, type); - doc.add(field); - - indexWriter.addDocument(doc); - - IndexReader reader = DirectoryReader.open(indexWriter); - LeafReader atomicReader = SlowCompositeReaderWrapper.wrap(reader); - - Terms terms = atomicReader.terms("int1"); - TermsEnum termsEnum = terms.iterator(); - termsEnum.next(); - - PostingsEnum termDocs = termsEnum.postings(null); - assertThat(termDocs.nextDoc(), equalTo(0)); - assertThat(termDocs.docID(), equalTo(0)); - assertThat(termDocs.freq(), equalTo(1)); - - terms = atomicReader.terms("int2"); - termsEnum = terms.iterator(); - termsEnum.next(); - termDocs = termsEnum.postings(termDocs); - assertThat(termDocs.nextDoc(), equalTo(0)); - assertThat(termDocs.docID(), equalTo(0)); - assertThat(termDocs.freq(), equalTo(2)); - - reader.close(); - indexWriter.close(); + for (LeafReaderContext leaf : reader.leaves()) { + leaf.reader().terms("_id").iterator().next(); + } } private DirectoryReader refreshReader(DirectoryReader reader) throws IOException { diff --git a/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java b/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java index 106dc9274da..6f80da2d639 100644 --- a/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java +++ b/core/src/test/java/org/elasticsearch/index/IndexModuleTests.java @@ -431,8 +431,8 @@ public class IndexModuleTests extends ESTestCase { } @Override - public SimWeight computeWeight(CollectionStatistics collectionStats, TermStatistics... termStats) { - return delegate.computeWeight(collectionStats, termStats); + public SimWeight computeWeight(float boost, CollectionStatistics collectionStats, TermStatistics... termStats) { + return delegate.computeWeight(boost, collectionStats, termStats); } @Override diff --git a/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java b/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java index e49e21bd2ff..57dfd19185d 100644 --- a/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java +++ b/core/src/test/java/org/elasticsearch/index/codec/CodecTests.java @@ -20,12 +20,10 @@ package org.elasticsearch.index.codec; import org.apache.lucene.codecs.Codec; -import org.apache.lucene.codecs.lucene50.Lucene50Codec; import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat; import org.apache.lucene.codecs.lucene50.Lucene50StoredFieldsFormat.Mode; -import org.apache.lucene.codecs.lucene53.Lucene53Codec; -import org.apache.lucene.codecs.lucene54.Lucene54Codec; import org.apache.lucene.codecs.lucene62.Lucene62Codec; +import org.apache.lucene.codecs.lucene70.Lucene70Codec; import org.apache.lucene.document.Document; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; @@ -55,10 +53,8 @@ public class CodecTests extends ESTestCase { public void testResolveDefaultCodecs() throws Exception { CodecService codecService = createCodecService(); assertThat(codecService.codec("default"), instanceOf(PerFieldMappingPostingFormatCodec.class)); - assertThat(codecService.codec("default"), instanceOf(Lucene62Codec.class)); - assertThat(codecService.codec("Lucene54"), instanceOf(Lucene54Codec.class)); - assertThat(codecService.codec("Lucene53"), instanceOf(Lucene53Codec.class)); - assertThat(codecService.codec("Lucene50"), instanceOf(Lucene50Codec.class)); + assertThat(codecService.codec("default"), instanceOf(Lucene70Codec.class)); + assertThat(codecService.codec("Lucene62"), instanceOf(Lucene62Codec.class)); } public void testDefault() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index 30d22dfb731..3f9965c0662 100644 --- a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -2171,7 +2171,10 @@ public class InternalEngineTests extends ESTestCase { final Bits bits = leaf.getLiveDocs(); for (int docID = 0; docID < leaf.maxDoc(); docID++) { if (bits == null || bits.get(docID)) { - final long seqNo = values.get(docID); + if (values.advanceExact(docID) == false) { + throw new AssertionError("Document does not have a seq number: " + docID); + } + final long seqNo = values.longValue(); assertFalse("should not have more than one document with the same seq_no[" + seqNo + "]", bitSet.get((int) seqNo)); bitSet.set((int) seqNo); } diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java index c96292d90e5..c22114e28aa 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java @@ -31,6 +31,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Strings; import org.elasticsearch.search.MultiValueMode; +import java.io.IOException; import java.util.List; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @@ -77,8 +78,8 @@ public abstract class AbstractFieldDataImplTestCase extends AbstractFieldDataTes AtomicFieldData fieldData = indexFieldData.load(readerContext); SortedBinaryDocValues values = fieldData.getBytesValues(); for (int i = 0; i < readerContext.reader().maxDoc(); ++i) { - values.setDocument(i); - assertThat(values.count(), greaterThanOrEqualTo(1)); + assertTrue(values.advanceExact(i)); + assertThat(values.docValueCount(), greaterThanOrEqualTo(1)); } } } @@ -93,16 +94,17 @@ public abstract class AbstractFieldDataImplTestCase extends AbstractFieldDataTes SortedBinaryDocValues bytesValues = fieldData.getBytesValues(); - bytesValues.setDocument(0); - assertThat(bytesValues.count(), equalTo(1)); - assertThat(bytesValues.valueAt(0), equalTo(new BytesRef(two()))); - bytesValues.setDocument(1); - assertThat(bytesValues.count(), equalTo(1)); - assertThat(bytesValues.valueAt(0), equalTo(new BytesRef(one()))); - bytesValues.setDocument(2); - assertThat(bytesValues.count(), equalTo(1)); - assertThat(bytesValues.valueAt(0), equalTo(new BytesRef(three()))); + assertTrue(bytesValues.advanceExact(0)); + assertThat(bytesValues.docValueCount(), equalTo(1)); + assertThat(bytesValues.nextValue(), equalTo(new BytesRef(two()))); + assertTrue(bytesValues.advanceExact(1)); + assertThat(bytesValues.docValueCount(), equalTo(1)); + assertThat(bytesValues.nextValue(), equalTo(new BytesRef(one()))); + assertTrue(bytesValues.advanceExact(2)); + assertThat(bytesValues.docValueCount(), equalTo(1)); + assertThat(bytesValues.nextValue(), equalTo(new BytesRef(three()))); + bytesValues = fieldData.getBytesValues(); assertValues(bytesValues, 0, two()); assertValues(bytesValues, 1, one()); assertValues(bytesValues, 2, three()); @@ -132,19 +134,23 @@ public abstract class AbstractFieldDataImplTestCase extends AbstractFieldDataTes protected abstract void fillSingleValueWithMissing() throws Exception; - public void assertValues(SortedBinaryDocValues values, int docId, BytesRef... actualValues) { - values.setDocument(docId); - assertThat(values.count(), equalTo(actualValues.length)); - for (int i = 0; i < actualValues.length; i++) { - assertThat(values.valueAt(i), equalTo(actualValues[i])); + public void assertValues(SortedBinaryDocValues values, int docId, BytesRef... actualValues) throws IOException { + assertEquals(actualValues.length > 0, values.advanceExact(docId)); + if (actualValues.length > 0) { + assertThat(values.docValueCount(), equalTo(actualValues.length)); + for (int i = 0; i < actualValues.length; i++) { + assertThat(values.nextValue(), equalTo(actualValues[i])); + } } } - public void assertValues(SortedBinaryDocValues values, int docId, String... actualValues) { - values.setDocument(docId); - assertThat(values.count(), equalTo(actualValues.length)); - for (int i = 0; i < actualValues.length; i++) { - assertThat(values.valueAt(i), equalTo(new BytesRef(actualValues[i]))); + public void assertValues(SortedBinaryDocValues values, int docId, String... actualValues) throws IOException { + assertEquals(actualValues.length > 0, values.advanceExact(docId)); + if (actualValues.length > 0) { + assertThat(values.docValueCount(), equalTo(actualValues.length)); + for (int i = 0; i < actualValues.length; i++) { + assertThat(values.nextValue(), equalTo(new BytesRef(actualValues[i]))); + } } } diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractGeoFieldDataTestCase.java b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractGeoFieldDataTestCase.java index 21d816e83ea..a46fd68a291 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractGeoFieldDataTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractGeoFieldDataTestCase.java @@ -25,10 +25,10 @@ import org.apache.lucene.document.StringField; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoUtils; +import java.io.IOException; + import static org.elasticsearch.test.geo.RandomShapeGenerator.randomPoint; import static org.hamcrest.Matchers.allOf; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.lessThanOrEqualTo; @@ -71,23 +71,20 @@ public abstract class AbstractGeoFieldDataTestCase extends AbstractFieldDataImpl assumeFalse("Only test on non geo_point fields", getFieldDataType().equals("geo_point")); } - protected void assertValues(MultiGeoPointValues values, int docId) { + protected void assertValues(MultiGeoPointValues values, int docId) throws IOException { assertValues(values, docId, false); } - protected void assertMissing(MultiGeoPointValues values, int docId) { + protected void assertMissing(MultiGeoPointValues values, int docId) throws IOException { assertValues(values, docId, true); } - private void assertValues(MultiGeoPointValues values, int docId, boolean missing) { - values.setDocument(docId); - int docCount = values.count(); - if (missing) { - assertThat(docCount, equalTo(0)); - } else { - assertThat(docCount, greaterThan(0)); + private void assertValues(MultiGeoPointValues values, int docId, boolean missing) throws IOException { + assertEquals(missing == false, values.advanceExact(docId)); + if (missing == false) { + final int docCount = values.docValueCount(); for (int i = 0; i < docCount; ++i) { - final GeoPoint point = values.valueAt(i); + final GeoPoint point = values.nextValue(); assertThat(point.lat(), allOf(greaterThanOrEqualTo(GeoUtils.MIN_LAT), lessThanOrEqualTo(GeoUtils.MAX_LAT))); assertThat(point.lon(), allOf(greaterThanOrEqualTo(GeoUtils.MIN_LON), lessThanOrEqualTo(GeoUtils.MAX_LON))); } diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java index c2416278878..aebb0a802ed 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.fielddata; import com.carrotsearch.randomizedtesting.generators.RandomPicks; + import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.Field.Store; @@ -27,7 +28,7 @@ import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.document.StringField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.RandomAccessOrds; +import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.index.Term; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.ConstantScoreQuery; @@ -48,8 +49,6 @@ import org.apache.lucene.util.TestUtil; import org.apache.lucene.util.UnicodeUtil; import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.lucene.search.Queries; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource; import org.elasticsearch.index.fielddata.IndexFieldData.XFieldComparatorSource.Nested; import org.elasticsearch.index.fielddata.fieldcomparator.BytesRefFieldComparatorSource; import org.elasticsearch.index.fielddata.ordinals.GlobalOrdinalsIndexFieldData; @@ -453,19 +452,6 @@ public abstract class AbstractStringFieldDataTestCase extends AbstractFieldDataI searcher.getIndexReader().close(); } - private void assertIteratorConsistentWithRandomAccess(RandomAccessOrds ords, int maxDoc) { - for (int doc = 0; doc < maxDoc; ++doc) { - ords.setDocument(doc); - final int cardinality = ords.cardinality(); - for (int i = 0; i < cardinality; ++i) { - assertEquals(ords.nextOrd(), ords.ordAt(i)); - } - for (int i = 0; i < 3; ++i) { - assertEquals(ords.nextOrd(), -1); - } - } - } - public void testGlobalOrdinals() throws Exception { fillExtendedMvSet(); refreshReader(); @@ -477,31 +463,29 @@ public abstract class AbstractStringFieldDataTestCase extends AbstractFieldDataI assertThat(globalOrdinals, instanceOf(GlobalOrdinalsIndexFieldData.class)); LeafReaderContext leaf = topLevelReader.leaves().get(0); AtomicOrdinalsFieldData afd = globalOrdinals.load(leaf); - RandomAccessOrds values = afd.getOrdinalsValues(); - assertIteratorConsistentWithRandomAccess(values, leaf.reader().maxDoc()); - values.setDocument(0); - assertThat(values.cardinality(), equalTo(2)); + SortedSetDocValues values = afd.getOrdinalsValues(); + assertTrue(values.advanceExact(0)); long ord = values.nextOrd(); assertThat(ord, equalTo(3L)); assertThat(values.lookupOrd(ord).utf8ToString(), equalTo("02")); ord = values.nextOrd(); assertThat(ord, equalTo(5L)); assertThat(values.lookupOrd(ord).utf8ToString(), equalTo("04")); - values.setDocument(1); - assertThat(values.cardinality(), equalTo(0)); - values.setDocument(2); - assertThat(values.cardinality(), equalTo(1)); + ord = values.nextOrd(); + assertThat(ord, equalTo(SortedSetDocValues.NO_MORE_ORDS)); + assertFalse(values.advanceExact(1)); + assertTrue(values.advanceExact(2)); ord = values.nextOrd(); assertThat(ord, equalTo(4L)); assertThat(values.lookupOrd(ord).utf8ToString(), equalTo("03")); + ord = values.nextOrd(); + assertThat(ord, equalTo(SortedSetDocValues.NO_MORE_ORDS)); // Second segment leaf = topLevelReader.leaves().get(1); afd = globalOrdinals.load(leaf); values = afd.getOrdinalsValues(); - assertIteratorConsistentWithRandomAccess(values, leaf.reader().maxDoc()); - values.setDocument(0); - assertThat(values.cardinality(), equalTo(3)); + assertTrue(values.advanceExact(0)); ord = values.nextOrd(); assertThat(ord, equalTo(5L)); assertThat(values.lookupOrd(ord).utf8ToString(), equalTo("04")); @@ -511,8 +495,9 @@ public abstract class AbstractStringFieldDataTestCase extends AbstractFieldDataI ord = values.nextOrd(); assertThat(ord, equalTo(7L)); assertThat(values.lookupOrd(ord).utf8ToString(), equalTo("06")); - values.setDocument(1); - assertThat(values.cardinality(), equalTo(3)); + ord = values.nextOrd(); + assertThat(ord, equalTo(SortedSetDocValues.NO_MORE_ORDS)); + assertTrue(values.advanceExact(1)); ord = values.nextOrd(); assertThat(ord, equalTo(7L)); assertThat(values.lookupOrd(ord).utf8ToString(), equalTo("06")); @@ -522,10 +507,10 @@ public abstract class AbstractStringFieldDataTestCase extends AbstractFieldDataI ord = values.nextOrd(); assertThat(ord, equalTo(9L)); assertThat(values.lookupOrd(ord).utf8ToString(), equalTo("08")); - values.setDocument(2); - assertThat(values.cardinality(), equalTo(0)); - values.setDocument(3); - assertThat(values.cardinality(), equalTo(3)); + ord = values.nextOrd(); + assertThat(ord, equalTo(SortedSetDocValues.NO_MORE_ORDS)); + assertFalse(values.advanceExact(2)); + assertTrue(values.advanceExact(3)); ord = values.nextOrd(); assertThat(ord, equalTo(9L)); assertThat(values.lookupOrd(ord).utf8ToString(), equalTo("08")); @@ -535,15 +520,14 @@ public abstract class AbstractStringFieldDataTestCase extends AbstractFieldDataI ord = values.nextOrd(); assertThat(ord, equalTo(11L)); assertThat(values.lookupOrd(ord).utf8ToString(), equalTo("10")); + ord = values.nextOrd(); + assertThat(ord, equalTo(SortedSetDocValues.NO_MORE_ORDS)); // Third segment leaf = topLevelReader.leaves().get(2); afd = globalOrdinals.load(leaf); values = afd.getOrdinalsValues(); - assertIteratorConsistentWithRandomAccess(values, leaf.reader().maxDoc()); - values.setDocument(0); - values.setDocument(0); - assertThat(values.cardinality(), equalTo(3)); + assertTrue(values.advanceExact(0)); ord = values.nextOrd(); assertThat(ord, equalTo(0L)); assertThat(values.lookupOrd(ord).utf8ToString(), equalTo("!08")); @@ -553,6 +537,8 @@ public abstract class AbstractStringFieldDataTestCase extends AbstractFieldDataI ord = values.nextOrd(); assertThat(ord, equalTo(2L)); assertThat(values.lookupOrd(ord).utf8ToString(), equalTo("!10")); + ord = values.nextOrd(); + assertThat(ord, equalTo(SortedSetDocValues.NO_MORE_ORDS)); } public void testTermsEnum() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/BinaryDVFieldDataTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/BinaryDVFieldDataTests.java index 70c1486fe1f..dfb3a3c1b3e 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/BinaryDVFieldDataTests.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/BinaryDVFieldDataTests.java @@ -23,7 +23,6 @@ import com.carrotsearch.hppc.ObjectArrayList; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; @@ -87,23 +86,22 @@ public class BinaryDVFieldDataTests extends AbstractFieldDataTestCase { SortedBinaryDocValues bytesValues = fieldData.getBytesValues(); CollectionUtils.sortAndDedup(bytesList1); - bytesValues.setDocument(0); - assertThat(bytesValues.count(), equalTo(2)); - assertThat(bytesValues.valueAt(0), equalTo(new BytesRef(bytesList1.get(0)))); - assertThat(bytesValues.valueAt(1), equalTo(new BytesRef(bytesList1.get(1)))); + assertTrue(bytesValues.advanceExact(0)); + assertThat(bytesValues.docValueCount(), equalTo(2)); + assertThat(bytesValues.nextValue(), equalTo(new BytesRef(bytesList1.get(0)))); + assertThat(bytesValues.nextValue(), equalTo(new BytesRef(bytesList1.get(1)))); - bytesValues.setDocument(1); - assertThat(bytesValues.count(), equalTo(1)); - assertThat(bytesValues.valueAt(0), equalTo(new BytesRef(bytes1))); + assertTrue(bytesValues.advanceExact(1)); + assertThat(bytesValues.docValueCount(), equalTo(1)); + assertThat(bytesValues.nextValue(), equalTo(new BytesRef(bytes1))); - bytesValues.setDocument(2); - assertThat(bytesValues.count(), equalTo(0)); + assertFalse(bytesValues.advanceExact(2)); CollectionUtils.sortAndDedup(bytesList2); - bytesValues.setDocument(3); - assertThat(bytesValues.count(), equalTo(2)); - assertThat(bytesValues.valueAt(0), equalTo(new BytesRef(bytesList2.get(0)))); - assertThat(bytesValues.valueAt(1), equalTo(new BytesRef(bytesList2.get(1)))); + assertTrue(bytesValues.advanceExact(3)); + assertThat(bytesValues.docValueCount(), equalTo(2)); + assertThat(bytesValues.nextValue(), equalTo(new BytesRef(bytesList2.get(0)))); + assertThat(bytesValues.nextValue(), equalTo(new BytesRef(bytesList2.get(1)))); } } diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/FieldDataTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/FieldDataTests.java index 33ddc80868a..6236517dde0 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/FieldDataTests.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/FieldDataTests.java @@ -25,83 +25,116 @@ import org.apache.lucene.index.SortedNumericDocValues; import org.apache.lucene.util.NumericUtils; import org.elasticsearch.test.ESTestCase; +import java.io.IOException; + public class FieldDataTests extends ESTestCase { - public void testSortableLongBitsToDoubles() { + private static class DummyValues extends AbstractNumericDocValues { + + private final long value; + private int docID = -1; + + DummyValues(long value) { + this.value = value; + } + + @Override + public boolean advanceExact(int target) throws IOException { + docID = target; + return true; + } + + @Override + public int docID() { + return docID; + } + + @Override + public long longValue() throws IOException { + return value; + } + } + + public void testSortableLongBitsToDoubles() throws IOException { final double value = randomDouble(); final long valueBits = NumericUtils.doubleToSortableLong(value); - NumericDocValues values = new NumericDocValues() { - @Override - public long get(int docID) { - return valueBits; - } - }; + NumericDocValues values = new DummyValues(valueBits); - SortedNumericDoubleValues asMultiDoubles = FieldData.sortableLongBitsToDoubles(DocValues.singleton(values, null)); + SortedNumericDoubleValues asMultiDoubles = FieldData.sortableLongBitsToDoubles(DocValues.singleton(values)); NumericDoubleValues asDoubles = FieldData.unwrapSingleton(asMultiDoubles); assertNotNull(asDoubles); - assertEquals(value, asDoubles.get(0), 0); + assertTrue(asDoubles.advanceExact(0)); + assertEquals(value, asDoubles.doubleValue(), 0); + values = new DummyValues(valueBits); + asMultiDoubles = FieldData.sortableLongBitsToDoubles(DocValues.singleton(values)); NumericDocValues backToLongs = DocValues.unwrapSingleton(FieldData.toSortableLongBits(asMultiDoubles)); assertSame(values, backToLongs); - SortedNumericDocValues multiValues = new SortedNumericDocValues() { + SortedNumericDocValues multiValues = new AbstractSortedNumericDocValues() { @Override - public long valueAt(int index) { + public boolean advanceExact(int target) throws IOException { + return true; + } + + @Override + public long nextValue() { return valueBits; } @Override - public void setDocument(int doc) { - } - - @Override - public int count() { + public int docValueCount() { return 1; } }; asMultiDoubles = FieldData.sortableLongBitsToDoubles(multiValues); - assertEquals(value, asMultiDoubles.valueAt(0), 0); + assertEquals(value, asMultiDoubles.nextValue(), 0); assertSame(multiValues, FieldData.toSortableLongBits(asMultiDoubles)); } - public void testDoublesToSortableLongBits() { + public void testDoublesToSortableLongBits() throws IOException { final double value = randomDouble(); final long valueBits = NumericUtils.doubleToSortableLong(value); NumericDoubleValues values = new NumericDoubleValues() { @Override - public double get(int docID) { + public boolean advanceExact(int doc) throws IOException { + return true; + } + @Override + public double doubleValue() { return value; } }; - SortedNumericDocValues asMultiLongs = FieldData.toSortableLongBits(FieldData.singleton(values, null)); + SortedNumericDocValues asMultiLongs = FieldData.toSortableLongBits(FieldData.singleton(values)); NumericDocValues asLongs = DocValues.unwrapSingleton(asMultiLongs); assertNotNull(asLongs); - assertEquals(valueBits, asLongs.get(0)); + assertTrue(asLongs.advanceExact(0)); + assertEquals(valueBits, asLongs.longValue()); SortedNumericDoubleValues multiValues = new SortedNumericDoubleValues() { @Override - public double valueAt(int index) { + public double nextValue() { return value; } @Override - public void setDocument(int doc) { + public boolean advanceExact(int target) throws IOException { + return true; } @Override - public int count() { + public int docValueCount() { return 1; } }; asMultiLongs = FieldData.toSortableLongBits(multiValues); - assertEquals(valueBits, asMultiLongs.valueAt(0)); + assertEquals(valueBits, asMultiLongs.nextValue()); assertSame(multiValues, FieldData.sortableLongBitsToDoubles(asMultiLongs)); } } diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/FilterFieldDataTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/FilterFieldDataTests.java index 33ecd57b130..19725aca523 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/FilterFieldDataTests.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/FilterFieldDataTests.java @@ -22,11 +22,11 @@ import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.StringField; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.RandomAccessOrds; +import org.apache.lucene.index.SortedSetDocValues; import org.elasticsearch.index.mapper.ContentPath; import org.elasticsearch.index.mapper.MappedFieldType; -import org.elasticsearch.index.mapper.TextFieldMapper; import org.elasticsearch.index.mapper.Mapper.BuilderContext; +import org.elasticsearch.index.mapper.TextFieldMapper; import java.util.List; import java.util.Random; @@ -71,7 +71,7 @@ public class FilterFieldDataTests extends AbstractFieldDataTestCase { IndexOrdinalsFieldData fieldData = ifdService.getForField(ft); for (LeafReaderContext context : contexts) { AtomicOrdinalsFieldData loadDirect = fieldData.loadDirect(context); - RandomAccessOrds bytesValues = loadDirect.getOrdinalsValues(); + SortedSetDocValues bytesValues = loadDirect.getOrdinalsValues(); assertThat(2L, equalTo(bytesValues.getValueCount())); assertThat(bytesValues.lookupOrd(0).utf8ToString(), equalTo("10")); assertThat(bytesValues.lookupOrd(1).utf8ToString(), equalTo("100")); @@ -86,7 +86,7 @@ public class FilterFieldDataTests extends AbstractFieldDataTestCase { IndexOrdinalsFieldData fieldData = ifdService.getForField(ft); for (LeafReaderContext context : contexts) { AtomicOrdinalsFieldData loadDirect = fieldData.loadDirect(context); - RandomAccessOrds bytesValues = loadDirect.getOrdinalsValues(); + SortedSetDocValues bytesValues = loadDirect.getOrdinalsValues(); assertThat(1L, equalTo(bytesValues.getValueCount())); assertThat(bytesValues.lookupOrd(0).utf8ToString(), equalTo("5")); } @@ -101,7 +101,7 @@ public class FilterFieldDataTests extends AbstractFieldDataTestCase { IndexOrdinalsFieldData fieldData = ifdService.getForField(ft); for (LeafReaderContext context : contexts) { AtomicOrdinalsFieldData loadDirect = fieldData.loadDirect(context); - RandomAccessOrds bytesValues = loadDirect.getOrdinalsValues(); + SortedSetDocValues bytesValues = loadDirect.getOrdinalsValues(); assertThat(2L, equalTo(bytesValues.getValueCount())); assertThat(bytesValues.lookupOrd(0).utf8ToString(), equalTo("10")); assertThat(bytesValues.lookupOrd(1).utf8ToString(), equalTo("100")); @@ -117,7 +117,7 @@ public class FilterFieldDataTests extends AbstractFieldDataTestCase { IndexOrdinalsFieldData fieldData = ifdService.getForField(ft); for (LeafReaderContext context : contexts) { AtomicOrdinalsFieldData loadDirect = fieldData.loadDirect(context); - RandomAccessOrds bytesValues = loadDirect.getOrdinalsValues(); + SortedSetDocValues bytesValues = loadDirect.getOrdinalsValues(); assertThat(2L, equalTo(bytesValues.getValueCount())); assertThat(bytesValues.lookupOrd(0).utf8ToString(), equalTo("10")); assertThat(bytesValues.lookupOrd(1).utf8ToString(), equalTo("100")); diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/ParentChildFieldDataTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/ParentChildFieldDataTests.java index f426e5433c6..f7bdc8efed1 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/ParentChildFieldDataTests.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/ParentChildFieldDataTests.java @@ -132,40 +132,39 @@ public class ParentChildFieldDataTests extends AbstractFieldDataTestCase { AtomicFieldData fieldData = indexFieldData.load(readerContext); SortedBinaryDocValues bytesValues = fieldData.getBytesValues(); - bytesValues.setDocument(0); - assertThat(bytesValues.count(), equalTo(1)); - assertThat(bytesValues.valueAt(0).utf8ToString(), equalTo("1")); + assertTrue(bytesValues.advanceExact(0)); + assertThat(bytesValues.docValueCount(), equalTo(1)); + assertThat(bytesValues.nextValue().utf8ToString(), equalTo("1")); - bytesValues.setDocument(1); - assertThat(bytesValues.count(), equalTo(2)); - assertThat(bytesValues.valueAt(0).utf8ToString(), equalTo("1")); - assertThat(bytesValues.valueAt(1).utf8ToString(), equalTo("2")); + assertTrue(bytesValues.advanceExact(1)); + assertThat(bytesValues.docValueCount(), equalTo(2)); + assertThat(bytesValues.nextValue().utf8ToString(), equalTo("1")); + assertThat(bytesValues.nextValue().utf8ToString(), equalTo("2")); - bytesValues.setDocument(2); - assertThat(bytesValues.count(), equalTo(2)); - assertThat(bytesValues.valueAt(0).utf8ToString(), equalTo("1")); - assertThat(bytesValues.valueAt(1).utf8ToString(), equalTo("3")); + assertTrue(bytesValues.advanceExact(2)); + assertThat(bytesValues.docValueCount(), equalTo(2)); + assertThat(bytesValues.nextValue().utf8ToString(), equalTo("1")); + assertThat(bytesValues.nextValue().utf8ToString(), equalTo("3")); - bytesValues.setDocument(3); - assertThat(bytesValues.count(), equalTo(1)); - assertThat(bytesValues.valueAt(0).utf8ToString(), equalTo("2")); + assertTrue(bytesValues.advanceExact(3)); + assertThat(bytesValues.docValueCount(), equalTo(1)); + assertThat(bytesValues.nextValue().utf8ToString(), equalTo("2")); - bytesValues.setDocument(4); - assertThat(bytesValues.count(), equalTo(2)); - assertThat(bytesValues.valueAt(0).utf8ToString(), equalTo("2")); - assertThat(bytesValues.valueAt(1).utf8ToString(), equalTo("4")); + assertTrue(bytesValues.advanceExact(4)); + assertThat(bytesValues.docValueCount(), equalTo(2)); + assertThat(bytesValues.nextValue().utf8ToString(), equalTo("2")); + assertThat(bytesValues.nextValue().utf8ToString(), equalTo("4")); - bytesValues.setDocument(5); - assertThat(bytesValues.count(), equalTo(2)); - assertThat(bytesValues.valueAt(0).utf8ToString(), equalTo("1")); - assertThat(bytesValues.valueAt(1).utf8ToString(), equalTo("5")); + assertTrue(bytesValues.advanceExact(5)); + assertThat(bytesValues.docValueCount(), equalTo(2)); + assertThat(bytesValues.nextValue().utf8ToString(), equalTo("1")); + assertThat(bytesValues.nextValue().utf8ToString(), equalTo("5")); - bytesValues.setDocument(6); - assertThat(bytesValues.count(), equalTo(1)); - assertThat(bytesValues.valueAt(0).utf8ToString(), equalTo("2")); + assertTrue(bytesValues.advanceExact(6)); + assertThat(bytesValues.docValueCount(), equalTo(1)); + assertThat(bytesValues.nextValue().utf8ToString(), equalTo("2")); - bytesValues.setDocument(7); - assertThat(bytesValues.count(), equalTo(0)); + assertFalse(bytesValues.advanceExact(7)); } } @@ -236,7 +235,7 @@ public class ParentChildFieldDataTests extends AbstractFieldDataTestCase { ids[j] = BytesRef.deepCopyOf(id); } } - expected.put(context.reader().getCoreCacheKey(), ids); + expected.put(context.reader().getCoreCacheHelper().getKey(), ids); } for (int i = 0; i < numThreads; ++i) { @@ -249,7 +248,7 @@ public class ParentChildFieldDataTests extends AbstractFieldDataTestCase { for (LeafReaderContext context : reader.leaves()) { AtomicParentChildFieldData leafData = global.load(context); SortedDocValues parentIds = leafData.getOrdinalsValues(parentType); - final BytesRef[] expectedIds = expected.get(context.reader().getCoreCacheKey()); + final BytesRef[] expectedIds = expected.get(context.reader().getCoreCacheHelper().getKey()); for (int j = 0; j < parentIds.getValueCount(); ++j) { final BytesRef id = parentIds.lookupOrd(j); assertEquals(expectedIds[j], id); diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesDatesTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesDatesTests.java index f8579efae73..626327d4549 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesDatesTests.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesDatesTests.java @@ -19,15 +19,16 @@ package org.elasticsearch.index.fielddata; -import org.apache.lucene.index.SortedNumericDocValues; import org.elasticsearch.index.fielddata.ScriptDocValues.Dates; import org.elasticsearch.test.ESTestCase; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.ReadableDateTime; +import java.io.IOException; + public class ScriptDocValuesDatesTests extends ESTestCase { - public void test() { + public void test() throws IOException { long[][] values = new long[between(3, 10)][]; ReadableDateTime[][] expectedDates = new ReadableDateTime[values.length][]; for (int d = 0; d < values.length; d++) { @@ -56,20 +57,23 @@ public class ScriptDocValuesDatesTests extends ESTestCase { } private Dates wrap(long[][] values) { - return new Dates(new SortedNumericDocValues() { + return new Dates(new AbstractSortedNumericDocValues() { long[] current; + int i; @Override - public void setDocument(int doc) { + public boolean advanceExact(int doc) { current = values[doc]; + i = 0; + return current.length > 0; } @Override - public int count() { + public int docValueCount() { return current.length; } @Override - public long valueAt(int index) { - return current[index]; + public long nextValue() { + return current[i++]; } }); } diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesGeoPointsTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesGeoPointsTests.java index 466ca0b6991..1f71a808ab5 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesGeoPointsTests.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesGeoPointsTests.java @@ -23,6 +23,7 @@ import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.test.ESTestCase; +import java.io.IOException; import java.util.Arrays; public class ScriptDocValuesGeoPointsTests extends ESTestCase { @@ -30,22 +31,24 @@ public class ScriptDocValuesGeoPointsTests extends ESTestCase { private static MultiGeoPointValues wrap(final GeoPoint... points) { return new MultiGeoPointValues() { int docID = -1; + int i; @Override - public GeoPoint valueAt(int i) { + public GeoPoint nextValue() { if (docID != 0) { fail(); } - return points[i]; + return points[i++]; } @Override - public void setDocument(int docId) { - this.docID = docId; + public boolean advanceExact(int docId) { + docID = docId; + return points.length > 0; } @Override - public int count() { + public int docValueCount() { if (docID != 0) { return 0; } @@ -62,7 +65,7 @@ public class ScriptDocValuesGeoPointsTests extends ESTestCase { return randomDouble() * 360 - 180; } - public void testGeoGetLatLon() { + public void testGeoGetLatLon() throws IOException { final double lat1 = randomLat(); final double lat2 = randomLat(); final double lon1 = randomLon(); @@ -81,7 +84,7 @@ public class ScriptDocValuesGeoPointsTests extends ESTestCase { assertTrue(Arrays.equals(new double[] {lon1, lon2}, script.getLons())); } - public void testGeoDistance() { + public void testGeoDistance() throws IOException { final double lat = randomLat(); final double lon = randomLon(); final MultiGeoPointValues values = wrap(new GeoPoint(lat, lon)); diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesLongsTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesLongsTests.java index c6ec8ce7494..1b3e8fa2274 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesLongsTests.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesLongsTests.java @@ -19,15 +19,16 @@ package org.elasticsearch.index.fielddata; -import org.apache.lucene.index.SortedNumericDocValues; import org.elasticsearch.index.fielddata.ScriptDocValues.Longs; import org.elasticsearch.test.ESTestCase; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.ReadableDateTime; +import java.io.IOException; + public class ScriptDocValuesLongsTests extends ESTestCase { - public void testLongs() { + public void testLongs() throws IOException { long[][] values = new long[between(3, 10)][]; for (int d = 0; d < values.length; d++) { values[d] = new long[randomBoolean() ? randomBoolean() ? 0 : 1 : between(2, 100)]; @@ -54,7 +55,7 @@ public class ScriptDocValuesLongsTests extends ESTestCase { } } - public void testDates() { + public void testDates() throws IOException { long[][] values = new long[between(3, 10)][]; ReadableDateTime[][] dates = new ReadableDateTime[values.length][]; for (int d = 0; d < values.length; d++) { @@ -87,20 +88,23 @@ public class ScriptDocValuesLongsTests extends ESTestCase { } private Longs wrap(long[][] values) { - return new Longs(new SortedNumericDocValues() { + return new Longs(new AbstractSortedNumericDocValues() { long[] current; + int i; @Override - public void setDocument(int doc) { + public boolean advanceExact(int doc) { + i = 0; current = values[doc]; + return current.length > 0; } @Override - public int count() { + public int docValueCount() { return current.length; } @Override - public long valueAt(int index) { - return current[index]; + public long nextValue() { + return current[i++]; } }); } diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/fieldcomparator/ReplaceMissingTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/fieldcomparator/ReplaceMissingTests.java index a291311c3bc..65c6335ebbc 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/fieldcomparator/ReplaceMissingTests.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/fieldcomparator/ReplaceMissingTests.java @@ -62,49 +62,68 @@ public class ReplaceMissingTests extends ESTestCase { assertEquals("cat", dv.lookupOrd(0).utf8ToString()); assertEquals("dog", dv.lookupOrd(1).utf8ToString()); - assertEquals(0, dv.getOrd(0)); - assertEquals(0, dv.getOrd(1)); - assertEquals(1, dv.getOrd(2)); + assertTrue(dv.advanceExact(0)); + assertEquals(0, dv.ordValue()); + assertTrue(dv.advanceExact(1)); + assertEquals(0, dv.ordValue()); + assertTrue(dv.advanceExact(2)); + assertEquals(1, dv.ordValue()); + raw = ar.getSortedDocValues("field"); dv = new BytesRefFieldComparatorSource.ReplaceMissing(raw, new BytesRef("dog")); assertEquals(2, dv.getValueCount()); assertEquals("cat", dv.lookupOrd(0).utf8ToString()); assertEquals("dog", dv.lookupOrd(1).utf8ToString()); - assertEquals(0, dv.getOrd(0)); - assertEquals(1, dv.getOrd(1)); - assertEquals(1, dv.getOrd(2)); + assertTrue(dv.advanceExact(0)); + assertEquals(0, dv.ordValue()); + assertTrue(dv.advanceExact(1)); + assertEquals(1, dv.ordValue()); + assertTrue(dv.advanceExact(2)); + assertEquals(1, dv.ordValue()); // non-existing values + raw = ar.getSortedDocValues("field"); dv = new BytesRefFieldComparatorSource.ReplaceMissing(raw, new BytesRef("apple")); assertEquals(3, dv.getValueCount()); assertEquals("apple", dv.lookupOrd(0).utf8ToString()); assertEquals("cat", dv.lookupOrd(1).utf8ToString()); assertEquals("dog", dv.lookupOrd(2).utf8ToString()); - assertEquals(1, dv.getOrd(0)); - assertEquals(0, dv.getOrd(1)); - assertEquals(2, dv.getOrd(2)); + assertTrue(dv.advanceExact(0)); + assertEquals(1, dv.ordValue()); + assertTrue(dv.advanceExact(1)); + assertEquals(0, dv.ordValue()); + assertTrue(dv.advanceExact(2)); + assertEquals(2, dv.ordValue()); + raw = ar.getSortedDocValues("field"); dv = new BytesRefFieldComparatorSource.ReplaceMissing(raw, new BytesRef("company")); assertEquals(3, dv.getValueCount()); assertEquals("cat", dv.lookupOrd(0).utf8ToString()); assertEquals("company", dv.lookupOrd(1).utf8ToString()); assertEquals("dog", dv.lookupOrd(2).utf8ToString()); - assertEquals(0, dv.getOrd(0)); - assertEquals(1, dv.getOrd(1)); - assertEquals(2, dv.getOrd(2)); + assertTrue(dv.advanceExact(0)); + assertEquals(0, dv.ordValue()); + assertTrue(dv.advanceExact(1)); + assertEquals(1, dv.ordValue()); + assertTrue(dv.advanceExact(2)); + assertEquals(2, dv.ordValue()); + raw = ar.getSortedDocValues("field"); dv = new BytesRefFieldComparatorSource.ReplaceMissing(raw, new BytesRef("ebay")); assertEquals(3, dv.getValueCount()); assertEquals("cat", dv.lookupOrd(0).utf8ToString()); assertEquals("dog", dv.lookupOrd(1).utf8ToString()); assertEquals("ebay", dv.lookupOrd(2).utf8ToString()); - assertEquals(0, dv.getOrd(0)); - assertEquals(2, dv.getOrd(1)); - assertEquals(1, dv.getOrd(2)); + assertTrue(dv.advanceExact(0)); + assertEquals(0, dv.ordValue()); + assertTrue(dv.advanceExact(1)); + assertEquals(2, dv.ordValue()); + assertTrue(dv.advanceExact(2)); + assertEquals(1, dv.ordValue()); reader.close(); dir.close(); diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/ordinals/MultiOrdinalsTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/ordinals/MultiOrdinalsTests.java index c594427b7e8..1ae6197c547 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/ordinals/MultiOrdinalsTests.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/ordinals/MultiOrdinalsTests.java @@ -18,8 +18,8 @@ */ package org.elasticsearch.index.fielddata.ordinals; -import org.apache.lucene.index.RandomAccessOrds; import org.apache.lucene.index.SortedDocValues; +import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.util.packed.PackedInts; import org.elasticsearch.index.fielddata.FieldData; import org.elasticsearch.search.MultiValueMode; @@ -102,7 +102,7 @@ public class MultiOrdinalsTests extends ESTestCase { } }); Ordinals ords = creationMultiOrdinals(builder); - RandomAccessOrds docs = ords.ordinals(); + SortedSetDocValues docs = ords.ordinals(); final SortedDocValues singleOrds = MultiValueMode.MIN.select(docs); int docId = ordsAndIds.get(0).id; List docOrds = new ArrayList<>(); @@ -111,22 +111,18 @@ public class MultiOrdinalsTests extends ESTestCase { docOrds.add(ordAndId.ord); } else { if (!docOrds.isEmpty()) { - assertThat((long) singleOrds.getOrd(docId), equalTo(docOrds.get(0))); + assertTrue(singleOrds.advanceExact(docId)); + assertThat((long) singleOrds.ordValue(), equalTo(docOrds.get(0))); - docs.setDocument(docId); - final int numOrds = docs.cardinality(); - assertThat(numOrds, equalTo(docOrds.size())); - for (int i = 0; i < numOrds; i++) { - assertThat(docs.nextOrd(), equalTo(docOrds.get(i))); + assertTrue(docs.advanceExact(docId)); + for (Long ord : docOrds) { + assertThat(docs.nextOrd(), equalTo(ord)); } - final long[] array = new long[docOrds.size()]; - for (int i = 0; i < array.length; i++) { - array[i] = docOrds.get(i); - } - assertIter(docs, docId, array); + assertEquals(SortedSetDocValues.NO_MORE_ORDS, docs.nextOrd()); } for (int i = docId + 1; i < ordAndId.id; i++) { - assertThat((long) singleOrds.getOrd(i), equalTo(RandomAccessOrds.NO_MORE_ORDS)); + assertFalse(singleOrds.advanceExact(i)); + assertFalse(docs.advanceExact(i)); } docId = ordAndId.id; docOrds.clear(); @@ -209,18 +205,10 @@ public class MultiOrdinalsTests extends ESTestCase { }; Ordinals ordinals = creationMultiOrdinals(builder); - RandomAccessOrds docs = ordinals.ordinals(); + SortedSetDocValues docs = ordinals.ordinals(); assertEquals(docs, ordinalPlan); } - protected static void assertIter(RandomAccessOrds docs, int docId, long... expectedOrdinals) { - docs.setDocument(docId); - assertThat(docs.cardinality(), equalTo(expectedOrdinals.length)); - for (long expectedOrdinal : expectedOrdinals) { - assertThat(docs.nextOrd(), equalTo(expectedOrdinal)); - } - } - public void testMultiValuesDocsWithOverlappingStorageArrays() throws Exception { int maxDoc = 7; long maxOrds = 15; @@ -261,11 +249,11 @@ public class MultiOrdinalsTests extends ESTestCase { }; Ordinals ordinals = new MultiOrdinals(builder, PackedInts.FASTEST); - RandomAccessOrds docs = ordinals.ordinals(); + SortedSetDocValues docs = ordinals.ordinals(); assertEquals(docs, ordinalPlan); } - private void assertEquals(RandomAccessOrds docs, long[][] ordinalPlan) { + private void assertEquals(SortedSetDocValues docs, long[][] ordinalPlan) throws IOException { long maxOrd = 0; for (int doc = 0; doc < ordinalPlan.length; ++doc) { if (ordinalPlan[doc].length > 0) { @@ -276,10 +264,12 @@ public class MultiOrdinalsTests extends ESTestCase { assertThat(FieldData.isMultiValued(docs), equalTo(true)); for (int doc = 0; doc < ordinalPlan.length; ++doc) { long[] ords = ordinalPlan[doc]; - docs.setDocument(doc); - assertThat(docs.cardinality(), equalTo(ords.length)); - for (int i = 0; i < ords.length; ++i) { - assertThat(docs.ordAt(i), equalTo(ords[i])); + assertEquals(ords.length > 0, docs.advanceExact(doc)); + if (ords.length > 0) { + for (long ord : ords) { + assertThat(docs.nextOrd(), equalTo(ord)); + } + assertThat(docs.nextOrd(), equalTo(SortedSetDocValues.NO_MORE_ORDS)); } } } diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/ordinals/SingleOrdinalsTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/ordinals/SingleOrdinalsTests.java index 9eb73e93324..b0fbda6a940 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/ordinals/SingleOrdinalsTests.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/ordinals/SingleOrdinalsTests.java @@ -19,8 +19,8 @@ package org.elasticsearch.index.fielddata.ordinals; import org.apache.lucene.index.DocValues; -import org.apache.lucene.index.RandomAccessOrds; import org.apache.lucene.index.SortedDocValues; +import org.apache.lucene.index.SortedSetDocValues; import org.elasticsearch.test.ESTestCase; import java.io.IOException; @@ -48,12 +48,13 @@ public class SingleOrdinalsTests extends ESTestCase { Ordinals ords = builder.build(); assertThat(ords, instanceOf(SinglePackedOrdinals.class)); - RandomAccessOrds docs = ords.ordinals(); + SortedSetDocValues docs = ords.ordinals(); final SortedDocValues singleOrds = DocValues.unwrapSingleton(docs); assertNotNull(singleOrds); for (Map.Entry entry : controlDocToOrdinal.entrySet()) { - assertThat(entry.getValue(), equalTo((long) singleOrds.getOrd(entry.getKey()))); + assertTrue(singleOrds.advanceExact(entry.getKey())); + assertEquals(singleOrds.ordValue(), (long) entry.getValue()); } } diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/plain/HalfFloatFielddataTests.java b/core/src/test/java/org/elasticsearch/index/fielddata/plain/HalfFloatFielddataTests.java index af15114eae7..19972e389b2 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/plain/HalfFloatFielddataTests.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/plain/HalfFloatFielddataTests.java @@ -51,9 +51,9 @@ public class HalfFloatFielddataTests extends ESTestCase { SortedNumericDoubleValues values = new SortedNumericDVIndexFieldData.SortedNumericHalfFloatFieldData( reader, "half_float").getDoubleValues(); assertNotNull(FieldData.unwrapSingleton(values)); - values.setDocument(0); - assertEquals(1, values.count()); - assertEquals(3f, values.valueAt(0), 0f); + assertTrue(values.advanceExact(0)); + assertEquals(1, values.docValueCount()); + assertEquals(3f, values.nextValue(), 0f); IOUtils.close(dirReader, w, dir); } @@ -73,10 +73,10 @@ public class HalfFloatFielddataTests extends ESTestCase { SortedNumericDoubleValues values = new SortedNumericDVIndexFieldData.SortedNumericHalfFloatFieldData( reader, "half_float").getDoubleValues(); assertNull(FieldData.unwrapSingleton(values)); - values.setDocument(0); - assertEquals(2, values.count()); - assertEquals(2f, values.valueAt(0), 0f); - assertEquals(3f, values.valueAt(1), 0f); + assertTrue(values.advanceExact(0)); + assertEquals(2, values.docValueCount()); + assertEquals(2f, values.nextValue(), 0f); + assertEquals(3f, values.nextValue(), 0f); IOUtils.close(dirReader, w, dir); } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java index 9c672924317..e6a1c0a69d8 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/BooleanFieldMapperTests.java @@ -97,9 +97,9 @@ public class BooleanFieldMapperTests extends ESSingleNodeTestCase { assertEquals(new BytesRef("T"), leaf.terms("field").iterator().next()); SortedNumericDocValues values = leaf.getSortedNumericDocValues("field"); assertNotNull(values); - values.setDocument(0); - assertEquals(1, values.count()); - assertEquals(1, values.valueAt(0)); + assertTrue(values.advanceExact(0)); + assertEquals(1, values.docValueCount()); + assertEquals(1, values.nextValue()); } } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/FakeStringFieldMapper.java b/core/src/test/java/org/elasticsearch/index/mapper/FakeStringFieldMapper.java index 2969b8392b5..642282c9d5c 100755 --- a/core/src/test/java/org/elasticsearch/index/mapper/FakeStringFieldMapper.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/FakeStringFieldMapper.java @@ -121,11 +121,6 @@ public class FakeStringFieldMapper extends FieldMapper { super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, copyTo); } - @Override - protected boolean customBoost() { - return true; - } - @Override protected void parseCreateField(ParseContext context, List fields) throws IOException { String value; diff --git a/core/src/test/java/org/elasticsearch/index/mapper/ScaledFloatFieldTypeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/ScaledFloatFieldTypeTests.java index dd664219867..1945c164fc4 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/ScaledFloatFieldTypeTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/ScaledFloatFieldTypeTests.java @@ -195,19 +195,19 @@ public class ScaledFloatFieldTypeTests extends FieldTypeTestCase { assertEquals(fielddata.getNumericType(), IndexNumericFieldData.NumericType.DOUBLE); AtomicNumericFieldData leafFieldData = fielddata.load(reader.leaves().get(0)); SortedNumericDoubleValues values = leafFieldData.getDoubleValues(); - values.setDocument(0); - assertEquals(1, values.count()); - assertEquals(10/ft.getScalingFactor(), values.valueAt(0), 10e-5); + assertTrue(values.advanceExact(0)); + assertEquals(1, values.docValueCount()); + assertEquals(10/ft.getScalingFactor(), values.nextValue(), 10e-5); // multi-valued ft.setName("scaled_float2"); fielddata = (IndexNumericFieldData) ft.fielddataBuilder().build(indexSettings, ft, null, null, null); leafFieldData = fielddata.load(reader.leaves().get(0)); values = leafFieldData.getDoubleValues(); - values.setDocument(0); - assertEquals(2, values.count()); - assertEquals(5/ft.getScalingFactor(), values.valueAt(0), 10e-5); - assertEquals(12/ft.getScalingFactor(), values.valueAt(1), 10e-5); + assertTrue(values.advanceExact(0)); + assertEquals(2, values.docValueCount()); + assertEquals(5/ft.getScalingFactor(), values.nextValue(), 10e-5); + assertEquals(12/ft.getScalingFactor(), values.nextValue(), 10e-5); } IOUtils.close(w, dir); } diff --git a/core/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java index 339c9a9136e..68d2d369bbe 100644 --- a/core/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java @@ -53,9 +53,6 @@ public class BoolQueryBuilderTests extends AbstractQueryTestCase field = fieldsIterator.next(); assertTermOrBoostQuery(booleanClause.getQuery(), field.getKey(), queryBuilder.value(), field.getValue()); } - if (queryBuilder.minimumShouldMatch() != null && !boolQuery.isCoordDisabled()) { + /** + * TODO: + * Test disabled because we cannot check min should match consistently: + * https://github.com/elastic/elasticsearch/issues/23966 + * + if (queryBuilder.minimumShouldMatch() != null && !boolQuery.isCoordDisabled()) { assertThat(boolQuery.getMinimumNumberShouldMatch(), greaterThan(0)); - } + } + * + **/ } else if (queryBuilder.fields().size() == 1) { Map.Entry field = queryBuilder.fields().entrySet().iterator().next(); assertTermOrBoostQuery(query, field.getKey(), queryBuilder.value(), field.getValue()); @@ -331,6 +337,7 @@ public class SimpleQueryStringBuilderTests extends AbstractQueryTestCase { throw new UnsupportedOperationException(); }), new BytesRef("baz"), null, 1f, new FieldAndFieldType(ft1, 2), new FieldAndFieldType(ft2, 3)); @@ -126,7 +126,7 @@ public class MultiMatchQueryTests extends ESSingleNodeTestCase { ft2.setBoost(10); Term[] terms = new Term[] { new Term("foo", "baz"), new Term("bar", "baz") }; float[] boosts = new float[] {200, 30}; - Query expected = BlendedTermQuery.booleanBlendedQuery(terms, boosts, false); + Query expected = BlendedTermQuery.booleanBlendedQuery(terms, boosts); Query actual = MultiMatchQuery.blendTerm( indexService.newQueryShardContext(randomInt(20), null, () -> { throw new UnsupportedOperationException(); }), new BytesRef("baz"), null, 1f, new FieldAndFieldType(ft1, 2), new FieldAndFieldType(ft2, 3)); @@ -145,7 +145,7 @@ public class MultiMatchQueryTests extends ESSingleNodeTestCase { ft2.setName("bar"); Term[] terms = new Term[] { new Term("foo", "baz") }; float[] boosts = new float[] {2}; - Query expected = BlendedTermQuery.booleanBlendedQuery(terms, boosts, false); + Query expected = BlendedTermQuery.booleanBlendedQuery(terms, boosts); Query actual = MultiMatchQuery.blendTerm( indexService.newQueryShardContext(randomInt(20), null, () -> { throw new UnsupportedOperationException(); }), new BytesRef("baz"), null, 1f, new FieldAndFieldType(ft1, 2), new FieldAndFieldType(ft2, 3)); @@ -164,9 +164,9 @@ public class MultiMatchQueryTests extends ESSingleNodeTestCase { ft2.setName("bar"); Term[] terms = new Term[] { new Term("foo", "baz") }; float[] boosts = new float[] {2}; - Query expectedClause1 = BlendedTermQuery.booleanBlendedQuery(terms, boosts, false); + Query expectedClause1 = BlendedTermQuery.booleanBlendedQuery(terms, boosts); Query expectedClause2 = new BoostQuery(new MatchAllDocsQuery(), 3); - Query expected = new BooleanQuery.Builder().setDisableCoord(true) + Query expected = new BooleanQuery.Builder() .add(expectedClause1, Occur.SHOULD) .add(expectedClause2, Occur.SHOULD) .build(); diff --git a/core/src/test/java/org/elasticsearch/index/shard/IndexSearcherWrapperTests.java b/core/src/test/java/org/elasticsearch/index/shard/IndexSearcherWrapperTests.java index 6b7c77fdf5e..f5d4f048c27 100644 --- a/core/src/test/java/org/elasticsearch/index/shard/IndexSearcherWrapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/shard/IndexSearcherWrapperTests.java @@ -76,8 +76,8 @@ public class IndexSearcherWrapperTests extends ESTestCase { try (Engine.Searcher engineSearcher = new Engine.Searcher("foo", searcher)) { final Engine.Searcher wrap = wrapper.wrap(engineSearcher); assertEquals(1, wrap.reader().getRefCount()); - ElasticsearchDirectoryReader.addReaderCloseListener(wrap.getDirectoryReader(), reader -> { - if (reader == open) { + ElasticsearchDirectoryReader.addReaderCloseListener(wrap.getDirectoryReader(), key -> { + if (key == open.getReaderCacheHelper().getKey()) { count.incrementAndGet(); } outerCount.incrementAndGet(); @@ -123,11 +123,11 @@ public class IndexSearcherWrapperTests extends ESTestCase { final ConcurrentHashMap cache = new ConcurrentHashMap<>(); try (Engine.Searcher engineSearcher = new Engine.Searcher("foo", searcher)) { try (Engine.Searcher wrap = wrapper.wrap(engineSearcher)) { - ElasticsearchDirectoryReader.addReaderCloseListener(wrap.getDirectoryReader(), reader -> { - cache.remove(reader.getCoreCacheKey()); + ElasticsearchDirectoryReader.addReaderCloseListener(wrap.getDirectoryReader(), key -> { + cache.remove(key); }); TopDocs search = wrap.searcher().search(new TermQuery(new Term("field", "doc")), 1); - cache.put(wrap.reader().getCoreCacheKey(), search); + cache.put(wrap.reader().getReaderCacheHelper().getKey(), search); } } assertEquals(1, closeCalls.get()); @@ -158,49 +158,6 @@ public class IndexSearcherWrapperTests extends ESTestCase { IOUtils.close(open, writer, dir); } - public void testWrappedReaderMustDelegateCoreCacheKey() throws IOException { - Directory dir = newDirectory(); - IndexWriterConfig iwc = newIndexWriterConfig(); - IndexWriter writer = new IndexWriter(dir, iwc); - Document doc = new Document(); - doc.add(new StringField("id", "1", random().nextBoolean() ? Field.Store.YES : Field.Store.NO)); - doc.add(new TextField("field", "doc", random().nextBoolean() ? Field.Store.YES : Field.Store.NO)); - writer.addDocument(doc); - DirectoryReader open = ElasticsearchDirectoryReader.wrap(DirectoryReader.open(writer), new ShardId("foo", "_na_", 1)); - IndexSearcher searcher = new IndexSearcher(open); - assertEquals(1, searcher.search(new TermQuery(new Term("field", "doc")), 1).totalHits); - searcher.setSimilarity(iwc.getSimilarity()); - IndexSearcherWrapper wrapper = new IndexSearcherWrapper() { - @Override - protected DirectoryReader wrap(DirectoryReader reader) throws IOException { - return new BrokenWrapper(reader, false); - } - }; - try (Engine.Searcher engineSearcher = new Engine.Searcher("foo", searcher)) { - try { - wrapper.wrap(engineSearcher); - fail("reader must delegate cache key"); - } catch (IllegalStateException ex) { - // all is well - } - } - wrapper = new IndexSearcherWrapper() { - @Override - protected DirectoryReader wrap(DirectoryReader reader) throws IOException { - return new BrokenWrapper(reader, true); - } - }; - try (Engine.Searcher engineSearcher = new Engine.Searcher("foo", searcher)) { - try { - wrapper.wrap(engineSearcher); - fail("reader must delegate cache key"); - } catch (IllegalStateException ex) { - // all is well - } - } - IOUtils.close(open, writer, dir); - } - private static class FieldMaskingReader extends FilterDirectoryReader { private final String field; private final AtomicInteger closeCalls; @@ -222,8 +179,8 @@ public class IndexSearcherWrapperTests extends ESTestCase { } @Override - public Object getCoreCacheKey() { - return in.getCoreCacheKey(); + public CacheHelper getReaderCacheHelper() { + return in.getReaderCacheHelper(); } @Override @@ -233,44 +190,4 @@ public class IndexSearcherWrapperTests extends ESTestCase { } } - private static class BrokenWrapper extends FilterDirectoryReader { - - private final boolean hideDelegate; - - BrokenWrapper(DirectoryReader in, boolean hideDelegate) throws IOException { - super(in, new SubReaderWrapper() { - @Override - public LeafReader wrap(LeafReader reader) { - return reader; - } - }); - this.hideDelegate = hideDelegate; - } - - @Override - protected DirectoryReader doWrapDirectoryReader(DirectoryReader in) throws IOException { - return new BrokenWrapper(in, hideDelegate); - } - - @Override - public DirectoryReader getDelegate() { - if (hideDelegate) { - try { - return ElasticsearchDirectoryReader.wrap(super.getDelegate(), new ShardId("foo", "_na_", 1)); - } catch (IOException e) { - throw new RuntimeException(e); - } - } - return super.getDelegate(); - } - - @Override - public Object getCoreCacheKey() { - if (hideDelegate == false) { - return super.getCoreCacheKey(); - } else { - return in.getCoreCacheKey(); - } - } - } } diff --git a/core/src/test/java/org/elasticsearch/index/store/StoreTests.java b/core/src/test/java/org/elasticsearch/index/store/StoreTests.java index dfc24d73c97..1a164154df2 100644 --- a/core/src/test/java/org/elasticsearch/index/store/StoreTests.java +++ b/core/src/test/java/org/elasticsearch/index/store/StoreTests.java @@ -865,7 +865,7 @@ public class StoreTests extends ESTestCase { String translogId = "a translog id"; commitData.put(Engine.SYNC_COMMIT_ID, syncId); commitData.put(Translog.TRANSLOG_GENERATION_KEY, translogId); - writer.setCommitData(commitData); + writer.setLiveCommitData(commitData.entrySet()); writer.commit(); writer.close(); Store.MetadataSnapshot metadata; diff --git a/core/src/test/java/org/elasticsearch/indices/IndicesQueryCacheTests.java b/core/src/test/java/org/elasticsearch/indices/IndicesQueryCacheTests.java index cd94ee0f8e9..10f098787c0 100644 --- a/core/src/test/java/org/elasticsearch/indices/IndicesQueryCacheTests.java +++ b/core/src/test/java/org/elasticsearch/indices/IndicesQueryCacheTests.java @@ -69,9 +69,9 @@ public class IndicesQueryCacheTests extends ESTestCase { } @Override - public Weight createWeight(IndexSearcher searcher, boolean needsScores) + public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException { - return new ConstantScoreWeight(this) { + return new ConstantScoreWeight(this, boost) { @Override public Scorer scorer(LeafReaderContext context) throws IOException { return new ConstantScoreScorer(this, score(), DocIdSetIterator.all(context.reader().maxDoc())); diff --git a/core/src/test/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java b/core/src/test/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java index 2f336d8d69c..6dd9126a519 100644 --- a/core/src/test/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java +++ b/core/src/test/java/org/elasticsearch/indices/memory/breaker/RandomExceptionCircuitBreakerIT.java @@ -41,7 +41,6 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.search.basic.SearchWithRandomExceptionsIT; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.engine.MockEngineSupport; @@ -298,5 +297,10 @@ public class RandomExceptionCircuitBreakerIT extends ESIntegTestCase { protected DirectoryReader doWrapDirectoryReader(DirectoryReader in) throws IOException { return new RandomExceptionDirectoryReaderWrapper(in, settings); } + + @Override + public CacheHelper getReaderCacheHelper() { + return in.getReaderCacheHelper(); + } } } diff --git a/core/src/test/java/org/elasticsearch/search/MultiValueModeTests.java b/core/src/test/java/org/elasticsearch/search/MultiValueModeTests.java index 5caba0fb441..1a357c55eb0 100644 --- a/core/src/test/java/org/elasticsearch/search/MultiValueModeTests.java +++ b/core/src/test/java/org/elasticsearch/search/MultiValueModeTests.java @@ -20,21 +20,28 @@ package org.elasticsearch.search; import com.carrotsearch.randomizedtesting.generators.RandomStrings; + import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.DocValues; import org.apache.lucene.index.NumericDocValues; -import org.apache.lucene.index.RandomAccessOrds; import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.util.BitSetIterator; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.FixedBitSet; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.index.fielddata.AbstractBinaryDocValues; +import org.elasticsearch.index.fielddata.AbstractNumericDocValues; +import org.elasticsearch.index.fielddata.AbstractSortedDocValues; +import org.elasticsearch.index.fielddata.AbstractSortedNumericDocValues; +import org.elasticsearch.index.fielddata.AbstractSortedSetDocValues; import org.elasticsearch.index.fielddata.FieldData; import org.elasticsearch.index.fielddata.NumericDoubleValues; import org.elasticsearch.index.fielddata.SortedBinaryDocValues; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; +import org.elasticsearch.search.MultiValueMode.UnsortedNumericDoubleValues; import org.elasticsearch.test.ESTestCase; import java.io.IOException; @@ -44,6 +51,11 @@ import static org.hamcrest.Matchers.equalTo; public class MultiValueModeTests extends ESTestCase { + @FunctionalInterface + private interface Supplier { + T get() throws IOException; + } + private static FixedBitSet randomRootDocs(int maxDoc) { FixedBitSet set = new FixedBitSet(maxDoc); for (int i = 0; i < maxDoc; ++i) { @@ -80,17 +92,27 @@ public class MultiValueModeTests extends ESTestCase { docsWithValue.set(i); } } - final NumericDocValues singleValues = new NumericDocValues() { + + final Supplier multiValues = () -> DocValues.singleton(new AbstractNumericDocValues() { + int docId = -1; @Override - public long get(int docID) { - return array[docID]; + public boolean advanceExact(int target) throws IOException { + this.docId = target; + return docsWithValue == null ? true : docsWithValue.get(docId); } - }; - final SortedNumericDocValues multiValues = DocValues.singleton(singleValues, docsWithValue); - verify(multiValues, numDocs); + @Override + public int docID() { + return docId; + } + @Override + public long longValue() { + return array[docId]; + } + }); + verifySortedNumeric(multiValues, numDocs); final FixedBitSet rootDocs = randomRootDocs(numDocs); final FixedBitSet innerDocs = randomInnerDocs(rootDocs); - verify(multiValues, numDocs, rootDocs, innerDocs); + verifySortedNumeric(multiValues, numDocs, rootDocs, innerDocs); } public void testMultiValuedLongs() throws Exception { @@ -104,42 +126,46 @@ public class MultiValueModeTests extends ESTestCase { Arrays.sort(values); array[i] = values; } - final SortedNumericDocValues multiValues = new SortedNumericDocValues() { + final Supplier multiValues = () -> new AbstractSortedNumericDocValues() { int doc; + int i; @Override - public long valueAt(int index) { - return array[doc][index]; + public long nextValue() { + return array[doc][i++]; } @Override - public void setDocument(int doc) { + public boolean advanceExact(int doc) { this.doc = doc; + i = 0; + return array[doc].length > 0; } @Override - public int count() { + public int docValueCount() { return array[doc].length; } }; - verify(multiValues, numDocs); + verifySortedNumeric(multiValues, numDocs); final FixedBitSet rootDocs = randomRootDocs(numDocs); final FixedBitSet innerDocs = randomInnerDocs(rootDocs); - verify(multiValues, numDocs, rootDocs, innerDocs); + verifySortedNumeric(multiValues, numDocs, rootDocs, innerDocs); } - private void verify(SortedNumericDocValues values, int maxDoc) { + private void verifySortedNumeric(Supplier supplier, int maxDoc) throws IOException { for (long missingValue : new long[] { 0, randomLong() }) { for (MultiValueMode mode : MultiValueMode.values()) { + SortedNumericDocValues values = supplier.get(); final NumericDocValues selected = mode.select(values, missingValue); for (int i = 0; i < maxDoc; ++i) { - final long actual = selected.get(i); + assertTrue(selected.advanceExact(i)); + final long actual = selected.longValue(); long expected = 0; - values.setDocument(i); - int numValues = values.count(); - if (numValues == 0) { + if (values.advanceExact(i) == false) { expected = missingValue; } else { + int numValues = values.docValueCount(); if (mode == MultiValueMode.MAX) { expected = Long.MIN_VALUE; } else if (mode == MultiValueMode.MIN) { @@ -147,11 +173,11 @@ public class MultiValueModeTests extends ESTestCase { } for (int j = 0; j < numValues; ++j) { if (mode == MultiValueMode.SUM || mode == MultiValueMode.AVG) { - expected += values.valueAt(j); + expected += values.nextValue(); } else if (mode == MultiValueMode.MIN) { - expected = Math.min(expected, values.valueAt(j)); + expected = Math.min(expected, values.nextValue()); } else if (mode == MultiValueMode.MAX) { - expected = Math.max(expected, values.valueAt(j)); + expected = Math.max(expected, values.nextValue()); } } if (mode == MultiValueMode.AVG) { @@ -159,9 +185,15 @@ public class MultiValueModeTests extends ESTestCase { } else if (mode == MultiValueMode.MEDIAN) { int value = numValues/2; if (numValues % 2 == 0) { - expected = Math.round((values.valueAt(value - 1) + values.valueAt(value))/2.0); + for (int j = 0; j < value - 1; ++j) { + values.nextValue(); + } + expected = Math.round(((double) values.nextValue() + values.nextValue())/2.0); } else { - expected = values.valueAt(value); + for (int j = 0; j < value; ++j) { + values.nextValue(); + } + expected = values.nextValue(); } } } @@ -172,13 +204,15 @@ public class MultiValueModeTests extends ESTestCase { } } - private void verify(SortedNumericDocValues values, int maxDoc, FixedBitSet rootDocs, FixedBitSet innerDocs) throws IOException { + private void verifySortedNumeric(Supplier supplier, int maxDoc, FixedBitSet rootDocs, FixedBitSet innerDocs) throws IOException { for (long missingValue : new long[] { 0, randomLong() }) { for (MultiValueMode mode : new MultiValueMode[] {MultiValueMode.MIN, MultiValueMode.MAX, MultiValueMode.SUM, MultiValueMode.AVG}) { + SortedNumericDocValues values = supplier.get(); final NumericDocValues selected = mode.select(values, missingValue, rootDocs, new BitSetIterator(innerDocs, 0L), maxDoc); int prevRoot = -1; for (int root = rootDocs.nextSetBit(0); root != -1; root = root + 1 < maxDoc ? rootDocs.nextSetBit(root + 1) : -1) { - final long actual = selected.get(root); + assertTrue(selected.advanceExact(root)); + final long actual = selected.longValue(); long expected = 0; if (mode == MultiValueMode.MAX) { expected = Long.MIN_VALUE; @@ -187,16 +221,17 @@ public class MultiValueModeTests extends ESTestCase { } int numValues = 0; for (int child = innerDocs.nextSetBit(prevRoot + 1); child != -1 && child < root; child = innerDocs.nextSetBit(child + 1)) { - values.setDocument(child); - for (int j = 0; j < values.count(); ++j) { - if (mode == MultiValueMode.SUM || mode == MultiValueMode.AVG) { - expected += values.valueAt(j); - } else if (mode == MultiValueMode.MIN) { - expected = Math.min(expected, values.valueAt(j)); - } else if (mode == MultiValueMode.MAX) { - expected = Math.max(expected, values.valueAt(j)); + if (values.advanceExact(child)) { + for (int j = 0; j < values.docValueCount(); ++j) { + if (mode == MultiValueMode.SUM || mode == MultiValueMode.AVG) { + expected += values.nextValue(); + } else if (mode == MultiValueMode.MIN) { + expected = Math.min(expected, values.nextValue()); + } else if (mode == MultiValueMode.MAX) { + expected = Math.max(expected, values.nextValue()); + } + ++numValues; } - ++numValues; } } if (numValues == 0) { @@ -227,17 +262,22 @@ public class MultiValueModeTests extends ESTestCase { docsWithValue.set(i); } } - final NumericDoubleValues singleValues = new NumericDoubleValues() { + final Supplier multiValues = () -> FieldData.singleton(new NumericDoubleValues() { + int docID; @Override - public double get(int docID) { + public boolean advanceExact(int doc) throws IOException { + docID = doc; + return docsWithValue == null || docsWithValue.get(doc); + } + @Override + public double doubleValue() { return array[docID]; } - }; - final SortedNumericDoubleValues multiValues = FieldData.singleton(singleValues, docsWithValue); - verify(multiValues, numDocs); + }); + verifySortedNumericDouble(multiValues, numDocs); final FixedBitSet rootDocs = randomRootDocs(numDocs); final FixedBitSet innerDocs = randomInnerDocs(rootDocs); - verify(multiValues, numDocs, rootDocs, innerDocs); + verifySortedNumericDouble(multiValues, numDocs, rootDocs, innerDocs); } public void testMultiValuedDoubles() throws Exception { @@ -251,45 +291,49 @@ public class MultiValueModeTests extends ESTestCase { Arrays.sort(values); array[i] = values; } - final SortedNumericDoubleValues multiValues = new SortedNumericDoubleValues() { + final Supplier multiValues = () -> new SortedNumericDoubleValues() { int doc; + int i; @Override - public double valueAt(int index) { - return array[doc][index]; + public double nextValue() { + return array[doc][i++]; } @Override - public void setDocument(int doc) { + public boolean advanceExact(int doc) { this.doc = doc; + i = 0; + return array[doc].length > 0; } @Override - public int count() { + public int docValueCount() { return array[doc].length; } }; - verify(multiValues, numDocs); + verifySortedNumericDouble(multiValues, numDocs); final FixedBitSet rootDocs = randomRootDocs(numDocs); final FixedBitSet innerDocs = randomInnerDocs(rootDocs); - verify(multiValues, numDocs, rootDocs, innerDocs); + verifySortedNumericDouble(multiValues, numDocs, rootDocs, innerDocs); } - private void verify(SortedNumericDoubleValues values, int maxDoc) { + private void verifySortedNumericDouble(Supplier supplier, int maxDoc) throws IOException { for (long missingValue : new long[] { 0, randomLong() }) { for (MultiValueMode mode : MultiValueMode.values()) { if (MultiValueMode.MEDIAN.equals(mode)) { continue; } + SortedNumericDoubleValues values = supplier.get(); final NumericDoubleValues selected = mode.select(values, missingValue); for (int i = 0; i < maxDoc; ++i) { - final double actual = selected.get(i); + assertTrue(selected.advanceExact(i)); + final double actual = selected.doubleValue(); double expected = 0.0; - values.setDocument(i); - int numValues = values.count(); - if (numValues == 0) { + if (values.advanceExact(i) == false) { expected = missingValue; } else { + int numValues = values.docValueCount(); if (mode == MultiValueMode.MAX) { expected = Long.MIN_VALUE; } else if (mode == MultiValueMode.MIN) { @@ -297,11 +341,11 @@ public class MultiValueModeTests extends ESTestCase { } for (int j = 0; j < numValues; ++j) { if (mode == MultiValueMode.SUM || mode == MultiValueMode.AVG) { - expected += values.valueAt(j); + expected += values.nextValue(); } else if (mode == MultiValueMode.MIN) { - expected = Math.min(expected, values.valueAt(j)); + expected = Math.min(expected, values.nextValue()); } else if (mode == MultiValueMode.MAX) { - expected = Math.max(expected, values.valueAt(j)); + expected = Math.max(expected, values.nextValue()); } } if (mode == MultiValueMode.AVG) { @@ -309,9 +353,15 @@ public class MultiValueModeTests extends ESTestCase { } else if (mode == MultiValueMode.MEDIAN) { int value = numValues/2; if (numValues % 2 == 0) { - expected = (values.valueAt(value - 1) + values.valueAt(value))/2.0; + for (int j = 0; j < value - 1; ++j) { + values.nextValue(); + } + expected = (values.nextValue() + values.nextValue())/2.0; } else { - expected = values.valueAt(value); + for (int j = 0; j < value; ++j) { + values.nextValue(); + } + expected = values.nextValue(); } } } @@ -322,13 +372,15 @@ public class MultiValueModeTests extends ESTestCase { } } - private void verify(SortedNumericDoubleValues values, int maxDoc, FixedBitSet rootDocs, FixedBitSet innerDocs) throws IOException { + private void verifySortedNumericDouble(Supplier supplier, int maxDoc, FixedBitSet rootDocs, FixedBitSet innerDocs) throws IOException { for (long missingValue : new long[] { 0, randomLong() }) { for (MultiValueMode mode : new MultiValueMode[] {MultiValueMode.MIN, MultiValueMode.MAX, MultiValueMode.SUM, MultiValueMode.AVG}) { + SortedNumericDoubleValues values = supplier.get(); final NumericDoubleValues selected = mode.select(values, missingValue, rootDocs, new BitSetIterator(innerDocs, 0L), maxDoc); int prevRoot = -1; for (int root = rootDocs.nextSetBit(0); root != -1; root = root + 1 < maxDoc ? rootDocs.nextSetBit(root + 1) : -1) { - final double actual = selected.get(root); + assertTrue(selected.advanceExact(root)); + final double actual = selected.doubleValue();; double expected = 0.0; if (mode == MultiValueMode.MAX) { expected = Long.MIN_VALUE; @@ -337,16 +389,17 @@ public class MultiValueModeTests extends ESTestCase { } int numValues = 0; for (int child = innerDocs.nextSetBit(prevRoot + 1); child != -1 && child < root; child = innerDocs.nextSetBit(child + 1)) { - values.setDocument(child); - for (int j = 0; j < values.count(); ++j) { - if (mode == MultiValueMode.SUM || mode == MultiValueMode.AVG) { - expected += values.valueAt(j); - } else if (mode == MultiValueMode.MIN) { - expected = Math.min(expected, values.valueAt(j)); - } else if (mode == MultiValueMode.MAX) { - expected = Math.max(expected, values.valueAt(j)); + if (values.advanceExact(child)) { + for (int j = 0; j < values.docValueCount(); ++j) { + if (mode == MultiValueMode.SUM || mode == MultiValueMode.AVG) { + expected += values.nextValue(); + } else if (mode == MultiValueMode.MIN) { + expected = Math.min(expected, values.nextValue()); + } else if (mode == MultiValueMode.MAX) { + expected = Math.max(expected, values.nextValue()); + } + ++numValues; } - ++numValues; } } if (numValues == 0) { @@ -380,17 +433,22 @@ public class MultiValueModeTests extends ESTestCase { } } } - final BinaryDocValues singleValues = new BinaryDocValues() { + final Supplier multiValues = () -> FieldData.singleton(new AbstractBinaryDocValues() { + int docID; @Override - public BytesRef get(int docID) { + public boolean advanceExact(int target) throws IOException { + docID = target; + return docsWithValue == null || docsWithValue.get(docID); + } + @Override + public BytesRef binaryValue() { return BytesRef.deepCopyOf(array[docID]); } - }; - final SortedBinaryDocValues multiValues = FieldData.singleton(singleValues, docsWithValue); - verify(multiValues, numDocs); + }); + verifySortedBinary(multiValues, numDocs); final FixedBitSet rootDocs = randomRootDocs(numDocs); final FixedBitSet innerDocs = randomInnerDocs(rootDocs); - verify(multiValues, numDocs, rootDocs, innerDocs); + verifySortedBinary(multiValues, numDocs, rootDocs, innerDocs); } public void testMultiValuedStrings() throws Exception { @@ -404,50 +462,55 @@ public class MultiValueModeTests extends ESTestCase { Arrays.sort(values); array[i] = values; } - final SortedBinaryDocValues multiValues = new SortedBinaryDocValues() { + final Supplier multiValues = () -> new SortedBinaryDocValues() { int doc; + int i; @Override - public BytesRef valueAt(int index) { - return BytesRef.deepCopyOf(array[doc][index]); + public BytesRef nextValue() { + return BytesRef.deepCopyOf(array[doc][i++]); } @Override - public void setDocument(int doc) { + public boolean advanceExact(int doc) { this.doc = doc; + i = 0; + return array[doc].length > 0; } @Override - public int count() { + public int docValueCount() { return array[doc].length; } }; - verify(multiValues, numDocs); + verifySortedBinary(multiValues, numDocs); final FixedBitSet rootDocs = randomRootDocs(numDocs); final FixedBitSet innerDocs = randomInnerDocs(rootDocs); - verify(multiValues, numDocs, rootDocs, innerDocs); + verifySortedBinary(multiValues, numDocs, rootDocs, innerDocs); } - private void verify(SortedBinaryDocValues values, int maxDoc) { + private void verifySortedBinary(Supplier supplier, int maxDoc) throws IOException { for (BytesRef missingValue : new BytesRef[] { new BytesRef(), new BytesRef(RandomStrings.randomAsciiOfLength(random(), 8)) }) { for (MultiValueMode mode : new MultiValueMode[] {MultiValueMode.MIN, MultiValueMode.MAX}) { + SortedBinaryDocValues values = supplier.get(); final BinaryDocValues selected = mode.select(values, missingValue); for (int i = 0; i < maxDoc; ++i) { - final BytesRef actual = selected.get(i); + assertTrue(selected.advanceExact(i)); + final BytesRef actual = selected.binaryValue(); BytesRef expected = null; - values.setDocument(i); - int numValues = values.count(); - if (numValues == 0) { + if (values.advanceExact(i) == false) { expected = missingValue; } else { + int numValues = values.docValueCount(); for (int j = 0; j < numValues; ++j) { if (expected == null) { - expected = BytesRef.deepCopyOf(values.valueAt(j)); + expected = BytesRef.deepCopyOf(values.nextValue()); } else { + BytesRef value = values.nextValue(); if (mode == MultiValueMode.MIN) { - expected = expected.compareTo(values.valueAt(j)) <= 0 ? expected : BytesRef.deepCopyOf(values.valueAt(j)); + expected = expected.compareTo(value) <= 0 ? expected : BytesRef.deepCopyOf(value); } else if (mode == MultiValueMode.MAX) { - expected = expected.compareTo(values.valueAt(j)) > 0 ? expected : BytesRef.deepCopyOf(values.valueAt(j)); + expected = expected.compareTo(value) > 0 ? expected : BytesRef.deepCopyOf(value); } } } @@ -462,24 +525,28 @@ public class MultiValueModeTests extends ESTestCase { } } - private void verify(SortedBinaryDocValues values, int maxDoc, FixedBitSet rootDocs, FixedBitSet innerDocs) throws IOException { + private void verifySortedBinary(Supplier supplier, int maxDoc, FixedBitSet rootDocs, FixedBitSet innerDocs) throws IOException { for (BytesRef missingValue : new BytesRef[] { new BytesRef(), new BytesRef(RandomStrings.randomAsciiOfLength(random(), 8)) }) { for (MultiValueMode mode : new MultiValueMode[] {MultiValueMode.MIN, MultiValueMode.MAX}) { + SortedBinaryDocValues values = supplier.get(); final BinaryDocValues selected = mode.select(values, missingValue, rootDocs, new BitSetIterator(innerDocs, 0L), maxDoc); int prevRoot = -1; for (int root = rootDocs.nextSetBit(0); root != -1; root = root + 1 < maxDoc ? rootDocs.nextSetBit(root + 1) : -1) { - final BytesRef actual = selected.get(root); + assertTrue(selected.advanceExact(root)); + final BytesRef actual = selected.binaryValue(); BytesRef expected = null; for (int child = innerDocs.nextSetBit(prevRoot + 1); child != -1 && child < root; child = innerDocs.nextSetBit(child + 1)) { - values.setDocument(child); - for (int j = 0; j < values.count(); ++j) { - if (expected == null) { - expected = BytesRef.deepCopyOf(values.valueAt(j)); - } else { - if (mode == MultiValueMode.MIN) { - expected = expected.compareTo(values.valueAt(j)) <= 0 ? expected : BytesRef.deepCopyOf(values.valueAt(j)); - } else if (mode == MultiValueMode.MAX) { - expected = expected.compareTo(values.valueAt(j)) > 0 ? expected : BytesRef.deepCopyOf(values.valueAt(j)); + if (values.advanceExact(child)) { + for (int j = 0; j < values.docValueCount(); ++j) { + if (expected == null) { + expected = BytesRef.deepCopyOf(values.nextValue()); + } else { + BytesRef value = values.nextValue(); + if (mode == MultiValueMode.MIN) { + expected = expected.compareTo(value) <= 0 ? expected : BytesRef.deepCopyOf(value); + } else if (mode == MultiValueMode.MAX) { + expected = expected.compareTo(value) > 0 ? expected : BytesRef.deepCopyOf(value); + } } } } @@ -507,9 +574,21 @@ public class MultiValueModeTests extends ESTestCase { array[i] = -1; } } - final SortedDocValues singleValues = new SortedDocValues() { + final Supplier multiValues = () -> DocValues.singleton(new AbstractSortedDocValues() { + private int docID = -1; @Override - public int getOrd(int docID) { + public boolean advanceExact(int target) throws IOException { + docID = target; + return array[docID] != -1; + } + + @Override + public int docID() { + return docID; + } + + @Override + public int ordValue() { return array[docID]; } @@ -522,12 +601,11 @@ public class MultiValueModeTests extends ESTestCase { public int getValueCount() { return 1 << 20; } - }; - final RandomAccessOrds multiValues = (RandomAccessOrds) DocValues.singleton(singleValues); - verify(multiValues, numDocs); + }); + verifySortedSet(multiValues, numDocs); final FixedBitSet rootDocs = randomRootDocs(numDocs); final FixedBitSet innerDocs = randomInnerDocs(rootDocs); - verify(multiValues, numDocs, rootDocs, innerDocs); + verifySortedSet(multiValues, numDocs, rootDocs, innerDocs); } public void testMultiValuedOrds() throws Exception { @@ -540,27 +618,24 @@ public class MultiValueModeTests extends ESTestCase { } array[i] = values; } - final RandomAccessOrds multiValues = new RandomAccessOrds() { + final Supplier multiValues = () -> new AbstractSortedSetDocValues() { int doc; - - @Override - public long ordAt(int index) { - return array[doc][index]; - } - - @Override - public int cardinality() { - return array[doc].length; - } + int i; @Override public long nextOrd() { - throw new UnsupportedOperationException(); + if (i < array[doc].length) { + return array[doc][i++]; + } else { + return NO_MORE_ORDS; + } } @Override - public void setDocument(int docID) { + public boolean advanceExact(int docID) { this.doc = docID; + i = 0; + return array[doc].length > 0; } @Override @@ -573,27 +648,29 @@ public class MultiValueModeTests extends ESTestCase { return 1 << 20; } }; - verify(multiValues, numDocs); + verifySortedSet(multiValues, numDocs); final FixedBitSet rootDocs = randomRootDocs(numDocs); final FixedBitSet innerDocs = randomInnerDocs(rootDocs); - verify(multiValues, numDocs, rootDocs, innerDocs); + verifySortedSet(multiValues, numDocs, rootDocs, innerDocs); } - private void verify(RandomAccessOrds values, int maxDoc) { + private void verifySortedSet(Supplier supplier, int maxDoc) throws IOException { for (MultiValueMode mode : new MultiValueMode[] {MultiValueMode.MIN, MultiValueMode.MAX}) { + SortedSetDocValues values = supplier.get(); final SortedDocValues selected = mode.select(values); for (int i = 0; i < maxDoc; ++i) { - final long actual = selected.getOrd(i); + final long actual = selected.advanceExact(i) ? selected.ordValue() : -1; int expected = -1; - values.setDocument(i); - for (int j = 0; j < values.cardinality(); ++j) { - if (expected == -1) { - expected = (int) values.ordAt(j); - } else { - if (mode == MultiValueMode.MIN) { - expected = Math.min(expected, (int)values.ordAt(j)); - } else if (mode == MultiValueMode.MAX) { - expected = Math.max(expected, (int)values.ordAt(j)); + if (values.advanceExact(i)) { + for (long ord = values.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = values.nextOrd()) { + if (expected == -1) { + expected = (int) ord; + } else { + if (mode == MultiValueMode.MIN) { + expected = Math.min(expected, (int) ord); + } else if (mode == MultiValueMode.MAX) { + expected = Math.max(expected, (int) ord); + } } } } @@ -603,23 +680,25 @@ public class MultiValueModeTests extends ESTestCase { } } - private void verify(RandomAccessOrds values, int maxDoc, FixedBitSet rootDocs, FixedBitSet innerDocs) throws IOException { + private void verifySortedSet(Supplier supplier, int maxDoc, FixedBitSet rootDocs, FixedBitSet innerDocs) throws IOException { for (MultiValueMode mode : new MultiValueMode[] {MultiValueMode.MIN, MultiValueMode.MAX}) { + SortedSetDocValues values = supplier.get(); final SortedDocValues selected = mode.select(values, rootDocs, new BitSetIterator(innerDocs, 0L)); int prevRoot = -1; for (int root = rootDocs.nextSetBit(0); root != -1; root = root + 1 < maxDoc ? rootDocs.nextSetBit(root + 1) : -1) { - final int actual = selected.getOrd(root); + final int actual = selected.advanceExact(root) ? selected.ordValue() : -1; int expected = -1; for (int child = innerDocs.nextSetBit(prevRoot + 1); child != -1 && child < root; child = innerDocs.nextSetBit(child + 1)) { - values.setDocument(child); - for (int j = 0; j < values.cardinality(); ++j) { - if (expected == -1) { - expected = (int) values.ordAt(j); - } else { - if (mode == MultiValueMode.MIN) { - expected = Math.min(expected, (int)values.ordAt(j)); - } else if (mode == MultiValueMode.MAX) { - expected = Math.max(expected, (int)values.ordAt(j)); + if (values.advanceExact(child)) { + for (long ord = values.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = values.nextOrd()) { + if (expected == -1) { + expected = (int) ord; + } else { + if (mode == MultiValueMode.MIN) { + expected = Math.min(expected, (int) ord); + } else if (mode == MultiValueMode.MAX) { + expected = Math.max(expected, (int) ord); + } } } } @@ -647,30 +726,36 @@ public class MultiValueModeTests extends ESTestCase { } } final NumericDoubleValues singleValues = new NumericDoubleValues() { + private int docID; @Override - public double get(int docID) { + public boolean advanceExact(int doc) throws IOException { + docID = doc; + return docsWithValue == null || docsWithValue.get(docID); + } + @Override + public double doubleValue() { return array[docID]; } }; - final SortedNumericDoubleValues singletonValues = FieldData.singleton(singleValues, docsWithValue); + final SortedNumericDoubleValues singletonValues = FieldData.singleton(singleValues); final MultiValueMode.UnsortedNumericDoubleValues multiValues = new MultiValueMode.UnsortedNumericDoubleValues() { @Override - public int count() { - return singletonValues.count(); + public int docValueCount() { + return singletonValues.docValueCount(); } @Override - public void setDocument(int doc) { - singletonValues.setDocument(doc); + public boolean advanceExact(int doc) throws IOException { + return singletonValues.advanceExact(doc); } @Override - public double valueAt(int index) { - return Math.cos(singletonValues.valueAt(index)); + public double nextValue() throws IOException { + return Math.cos(singletonValues.nextValue()); } }; - verify(multiValues, numDocs); + verifyUnsortedNumeric(() -> multiValues, numDocs); } public void testUnsortedMultiValuedDoubles() throws Exception { @@ -686,37 +771,41 @@ public class MultiValueModeTests extends ESTestCase { } final MultiValueMode.UnsortedNumericDoubleValues multiValues = new MultiValueMode.UnsortedNumericDoubleValues() { int doc; + int i; @Override - public int count() { + public int docValueCount() { return array[doc].length; } @Override - public void setDocument(int doc) { + public boolean advanceExact(int doc) { this.doc = doc; + i = 0; + return array[doc].length > 0; } @Override - public double valueAt(int index) { - return Math.sin(array[doc][index]); + public double nextValue() { + return Math.sin(array[doc][i++]); } }; - verify(multiValues, numDocs); + verifyUnsortedNumeric(() -> multiValues, numDocs); } - private void verify(MultiValueMode.UnsortedNumericDoubleValues values, int maxDoc) { + private void verifyUnsortedNumeric(Supplier supplier, int maxDoc) throws IOException { for (double missingValue : new double[] { 0, randomDouble() }) { for (MultiValueMode mode : new MultiValueMode[] {MultiValueMode.MIN, MultiValueMode.MAX, MultiValueMode.SUM, MultiValueMode.AVG}) { + UnsortedNumericDoubleValues values = supplier.get(); final NumericDoubleValues selected = mode.select(values, missingValue); for (int i = 0; i < maxDoc; ++i) { - final double actual = selected.get(i); + assertTrue(selected.advanceExact(i)); + final double actual = selected.doubleValue(); double expected = 0.0; - values.setDocument(i); - int numValues = values.count(); - if (numValues == 0) { + if (values.advanceExact(i) == false) { expected = missingValue; } else { + int numValues = values.docValueCount(); if (mode == MultiValueMode.MAX) { expected = Long.MIN_VALUE; } else if (mode == MultiValueMode.MIN) { @@ -724,11 +813,11 @@ public class MultiValueModeTests extends ESTestCase { } for (int j = 0; j < numValues; ++j) { if (mode == MultiValueMode.SUM || mode == MultiValueMode.AVG) { - expected += values.valueAt(j); + expected += values.nextValue(); } else if (mode == MultiValueMode.MIN) { - expected = Math.min(expected, values.valueAt(j)); + expected = Math.min(expected, values.nextValue()); } else if (mode == MultiValueMode.MAX) { - expected = Math.max(expected, values.valueAt(j)); + expected = Math.max(expected, values.nextValue()); } } if (mode == MultiValueMode.AVG) { diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java b/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java index 9ebfb623b64..3dc530204bc 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java @@ -236,7 +236,7 @@ public abstract class AggregatorTestCase extends ESTestCase { List aggs = new ArrayList<> (); Query rewritten = searcher.rewrite(query); - Weight weight = searcher.createWeight(rewritten, true); + Weight weight = searcher.createWeight(rewritten, true, 1f); C root = createAggregator(builder, searcher, fieldTypes); try { for (ShardSearcher subSearcher : subSearchers) { diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregatorTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregatorTests.java index 6ba6da3b555..6ed2c1a3a8c 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregatorTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/range/BinaryRangeAggregatorTests.java @@ -23,9 +23,9 @@ import java.util.Arrays; import java.util.HashSet; import java.util.Set; -import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.TestUtil; +import org.elasticsearch.index.fielddata.AbstractSortedSetDocValues; import org.elasticsearch.index.fielddata.SortedBinaryDocValues; import org.elasticsearch.search.aggregations.LeafBucketCollector; import org.elasticsearch.search.aggregations.bucket.range.BinaryRangeAggregator.SortedBinaryRangeLeafCollector; @@ -36,7 +36,7 @@ import com.carrotsearch.hppc.LongHashSet; public class BinaryRangeAggregatorTests extends ESTestCase { - private static class FakeSortedSetDocValues extends SortedSetDocValues { + private static class FakeSortedSetDocValues extends AbstractSortedSetDocValues { private final BytesRef[] terms; long[] ords; @@ -47,8 +47,9 @@ public class BinaryRangeAggregatorTests extends ESTestCase { } @Override - public void setDocument(int docID) { + public boolean advanceExact(int docID) { i = 0; + return true; } @Override @@ -145,6 +146,7 @@ public class BinaryRangeAggregatorTests extends ESTestCase { private static class FakeSortedBinaryDocValues extends SortedBinaryDocValues { private final BytesRef[] terms; + int i; long[] ords; FakeSortedBinaryDocValues(BytesRef[] terms) { @@ -152,18 +154,19 @@ public class BinaryRangeAggregatorTests extends ESTestCase { } @Override - public void setDocument(int docID) { - // no-op + public boolean advanceExact(int docID) { + i = 0; + return true; } @Override - public int count() { + public int docValueCount() { return ords.length; } @Override - public BytesRef valueAt(int index) { - return terms[(int) ords[index]]; + public BytesRef nextValue() { + return terms[(int) ords[i++]]; } } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/support/IncludeExcludeTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/support/IncludeExcludeTests.java index 42f0df4beb9..c211f754d19 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/support/IncludeExcludeTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/support/IncludeExcludeTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.search.aggregations.support; import org.apache.lucene.index.DocValues; -import org.apache.lucene.index.RandomAccessOrds; import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.LongBitSet; @@ -30,6 +29,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.fielddata.AbstractSortedSetDocValues; import org.elasticsearch.index.query.QueryParseContext; import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.bucket.terms.support.IncludeExclude; @@ -58,13 +58,14 @@ public class IncludeExcludeTests extends ESTestCase { } public void testSingleTermWithOrds() throws IOException { - RandomAccessOrds ords = new RandomAccessOrds() { + SortedSetDocValues ords = new AbstractSortedSetDocValues() { boolean consumed = true; @Override - public void setDocument(int docID) { + public boolean advanceExact(int docID) { consumed = false; + return true; } @Override @@ -88,15 +89,6 @@ public class IncludeExcludeTests extends ESTestCase { return 1; } - @Override - public long ordAt(int index) { - return 0; - } - - @Override - public int cardinality() { - return 1; - } }; IncludeExclude inexcl = new IncludeExclude( new TreeSet<>(Collections.singleton(new BytesRef("foo"))), diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/support/MissingValuesTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/support/MissingValuesTests.java index 2c819a671e5..568b8e7996f 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/support/MissingValuesTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/support/MissingValuesTests.java @@ -21,24 +21,27 @@ package org.elasticsearch.search.aggregations.support; import com.carrotsearch.randomizedtesting.generators.RandomPicks; import com.carrotsearch.randomizedtesting.generators.RandomStrings; -import org.apache.lucene.index.RandomAccessOrds; + import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.TestUtil; import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.index.fielddata.AbstractRandomAccessOrds; +import org.elasticsearch.index.fielddata.AbstractSortedNumericDocValues; +import org.elasticsearch.index.fielddata.AbstractSortedSetDocValues; import org.elasticsearch.index.fielddata.MultiGeoPointValues; import org.elasticsearch.index.fielddata.SortedBinaryDocValues; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; import org.elasticsearch.test.ESTestCase; +import java.io.IOException; import java.util.Arrays; import java.util.HashSet; import java.util.Set; public class MissingValuesTests extends ESTestCase { - public void testMissingBytes() { + public void testMissingBytes() throws IOException { final int numDocs = TestUtil.nextInt(random(), 1, 100); final BytesRef[][] values = new BytesRef[numDocs][]; for (int i = 0; i < numDocs; ++i) { @@ -50,40 +53,43 @@ public class MissingValuesTests extends ESTestCase { } SortedBinaryDocValues asBinaryValues = new SortedBinaryDocValues() { - int i = -1; + int doc = -1; + int i; @Override - public BytesRef valueAt(int index) { - return values[i][index]; + public BytesRef nextValue() { + return values[doc][i++]; } @Override - public void setDocument(int docId) { - i = docId; + public boolean advanceExact(int docId) { + doc = docId; + i = 0; + return values[doc].length > 0; } @Override - public int count() { - return values[i].length; + public int docValueCount() { + return values[doc].length; } }; final BytesRef missing = new BytesRef(RandomStrings.randomAsciiOfLength(random(), 2)); SortedBinaryDocValues withMissingReplaced = MissingValues.replaceMissing(asBinaryValues, missing); for (int i = 0; i < numDocs; ++i) { - withMissingReplaced.setDocument(i); + assertTrue(withMissingReplaced.advanceExact(i)); if (values[i].length > 0) { - assertEquals(values[i].length, withMissingReplaced.count()); + assertEquals(values[i].length, withMissingReplaced.docValueCount()); for (int j = 0; j < values[i].length; ++j) { - assertEquals(values[i][j], withMissingReplaced.valueAt(j)); + assertEquals(values[i][j], withMissingReplaced.nextValue()); } } else { - assertEquals(1, withMissingReplaced.count()); - assertEquals(missing, withMissingReplaced.valueAt(0)); + assertEquals(1, withMissingReplaced.docValueCount()); + assertEquals(missing, withMissingReplaced.nextValue()); } } } - public void testMissingOrds() { + public void testMissingOrds() throws IOException { final int numDocs = TestUtil.nextInt(random(), 1, 100); final int numOrds = TestUtil.nextInt(random(), 1, 10); @@ -105,13 +111,16 @@ public class MissingValuesTests extends ESTestCase { ords[i][j] = TestUtil.nextInt(random(), ords[i][j], maxOrd - 1); } } - RandomAccessOrds asRandomAccessOrds = new AbstractRandomAccessOrds() { + SortedSetDocValues asRandomAccessOrds = new AbstractSortedSetDocValues() { - int i = -1; + int doc = -1; + int i; @Override - public void doSetDocument(int docID) { - i = docID; + public boolean advanceExact(int docID) { + doc = docID; + i = 0; + return ords[doc].length > 0; } @Override @@ -125,13 +134,12 @@ public class MissingValuesTests extends ESTestCase { } @Override - public long ordAt(int index) { - return ords[i][index]; - } - - @Override - public int cardinality() { - return ords[i].length; + public long nextOrd() { + if (i < ords[doc].length) { + return ords[doc][i++]; + } else { + return NO_MORE_ORDS; + } } }; @@ -139,28 +147,29 @@ public class MissingValuesTests extends ESTestCase { final BytesRef missingMissing = new BytesRef(RandomStrings.randomAsciiOfLength(random(), 5)); for (BytesRef missing : Arrays.asList(existingMissing, missingMissing)) { - RandomAccessOrds withMissingReplaced = MissingValues.replaceMissing(asRandomAccessOrds, missing); + SortedSetDocValues withMissingReplaced = MissingValues.replaceMissing(asRandomAccessOrds, missing); if (valueSet.contains(missing)) { assertEquals(values.length, withMissingReplaced.getValueCount()); } else { assertEquals(values.length + 1, withMissingReplaced.getValueCount()); } for (int i = 0; i < numDocs; ++i) { - withMissingReplaced.setDocument(i); + assertTrue(withMissingReplaced.advanceExact(i)); if (ords[i].length > 0) { - assertEquals(ords[i].length, withMissingReplaced.cardinality()); - for (int j = 0; j < ords[i].length; ++j) { - assertEquals(values[ords[i][j]], withMissingReplaced.lookupOrd(withMissingReplaced.ordAt(j))); + for (int ord : ords[i]) { + assertEquals(values[ord], + withMissingReplaced.lookupOrd(withMissingReplaced.nextOrd())); } + assertEquals(SortedSetDocValues.NO_MORE_ORDS, withMissingReplaced.nextOrd()); } else { - assertEquals(1, withMissingReplaced.cardinality()); - assertEquals(missing, withMissingReplaced.lookupOrd(withMissingReplaced.ordAt(0))); + assertEquals(missing, withMissingReplaced.lookupOrd(withMissingReplaced.nextOrd())); + assertEquals(SortedSetDocValues.NO_MORE_ORDS, withMissingReplaced.nextOrd()); } } } } - public void testMissingLongs() { + public void testMissingLongs() throws IOException { final int numDocs = TestUtil.nextInt(random(), 1, 100); final int[][] values = new int[numDocs][]; for (int i = 0; i < numDocs; ++i) { @@ -170,42 +179,45 @@ public class MissingValuesTests extends ESTestCase { } Arrays.sort(values[i]); } - SortedNumericDocValues asNumericValues = new SortedNumericDocValues() { + SortedNumericDocValues asNumericValues = new AbstractSortedNumericDocValues() { - int i = -1; + int doc = -1; + int i; @Override - public long valueAt(int index) { - return values[i][index]; + public long nextValue() { + return values[doc][i++]; } @Override - public void setDocument(int docId) { - i = docId; + public boolean advanceExact(int docId) { + doc = docId; + i = 0; + return values[doc].length > 0; } @Override - public int count() { - return values[i].length; + public int docValueCount() { + return values[doc].length; } }; final long missing = randomInt(); SortedNumericDocValues withMissingReplaced = MissingValues.replaceMissing(asNumericValues, missing); for (int i = 0; i < numDocs; ++i) { - withMissingReplaced.setDocument(i); + assertTrue(withMissingReplaced.advanceExact(i)); if (values[i].length > 0) { - assertEquals(values[i].length, withMissingReplaced.count()); + assertEquals(values[i].length, withMissingReplaced.docValueCount()); for (int j = 0; j < values[i].length; ++j) { - assertEquals(values[i][j], withMissingReplaced.valueAt(j)); + assertEquals(values[i][j], withMissingReplaced.nextValue()); } } else { - assertEquals(1, withMissingReplaced.count()); - assertEquals(missing, withMissingReplaced.valueAt(0)); + assertEquals(1, withMissingReplaced.docValueCount()); + assertEquals(missing, withMissingReplaced.nextValue()); } } } - public void testMissingDoubles() { + public void testMissingDoubles() throws IOException { final int numDocs = TestUtil.nextInt(random(), 1, 100); final double[][] values = new double[numDocs][]; for (int i = 0; i < numDocs; ++i) { @@ -217,40 +229,43 @@ public class MissingValuesTests extends ESTestCase { } SortedNumericDoubleValues asNumericValues = new SortedNumericDoubleValues() { - int i = -1; + int doc = -1; + int i; @Override - public double valueAt(int index) { - return values[i][index]; + public double nextValue() { + return values[doc][i++]; } @Override - public void setDocument(int docId) { - i = docId; + public boolean advanceExact(int docId) { + doc = docId; + i = 0; + return true; } @Override - public int count() { - return values[i].length; + public int docValueCount() { + return values[doc].length; } }; final long missing = randomInt(); SortedNumericDoubleValues withMissingReplaced = MissingValues.replaceMissing(asNumericValues, missing); for (int i = 0; i < numDocs; ++i) { - withMissingReplaced.setDocument(i); + assertTrue(withMissingReplaced.advanceExact(i)); if (values[i].length > 0) { - assertEquals(values[i].length, withMissingReplaced.count()); + assertEquals(values[i].length, withMissingReplaced.docValueCount()); for (int j = 0; j < values[i].length; ++j) { - assertEquals(values[i][j], withMissingReplaced.valueAt(j), 0); + assertEquals(values[i][j], withMissingReplaced.nextValue(), 0); } } else { - assertEquals(1, withMissingReplaced.count()); - assertEquals(missing, withMissingReplaced.valueAt(0), 0); + assertEquals(1, withMissingReplaced.docValueCount()); + assertEquals(missing, withMissingReplaced.nextValue(), 0); } } } - public void testMissingGeoPoints() { + public void testMissingGeoPoints() throws IOException { final int numDocs = TestUtil.nextInt(random(), 1, 100); final GeoPoint[][] values = new GeoPoint[numDocs][]; for (int i = 0; i < numDocs; ++i) { @@ -261,35 +276,38 @@ public class MissingValuesTests extends ESTestCase { } MultiGeoPointValues asGeoValues = new MultiGeoPointValues() { - int i = -1; + int doc = -1; + int i; @Override - public GeoPoint valueAt(int index) { - return values[i][index]; + public GeoPoint nextValue() { + return values[doc][i++]; } @Override - public void setDocument(int docId) { - i = docId; + public boolean advanceExact(int docId) { + doc = docId; + i = 0; + return values[doc].length > 0; } @Override - public int count() { - return values[i].length; + public int docValueCount() { + return values[doc].length; } }; final GeoPoint missing = new GeoPoint(randomDouble() * 90, randomDouble() * 180); MultiGeoPointValues withMissingReplaced = MissingValues.replaceMissing(asGeoValues, missing); for (int i = 0; i < numDocs; ++i) { - withMissingReplaced.setDocument(i); + assertTrue(withMissingReplaced.advanceExact(i)); if (values[i].length > 0) { - assertEquals(values[i].length, withMissingReplaced.count()); + assertEquals(values[i].length, withMissingReplaced.docValueCount()); for (int j = 0; j < values[i].length; ++j) { - assertEquals(values[i][j], withMissingReplaced.valueAt(j)); + assertEquals(values[i][j], withMissingReplaced.nextValue()); } } else { - assertEquals(1, withMissingReplaced.count()); - assertEquals(missing, withMissingReplaced.valueAt(0)); + assertEquals(1, withMissingReplaced.docValueCount()); + assertEquals(missing, withMissingReplaced.nextValue()); } } } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/support/ScriptValuesTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/support/ScriptValuesTests.java index f338e961312..11e03a969d0 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/support/ScriptValuesTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/support/ScriptValuesTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.search.aggregations.support.values.ScriptDoubleValues; import org.elasticsearch.search.aggregations.support.values.ScriptLongValues; import org.elasticsearch.test.ESTestCase; +import java.io.IOException; import java.util.Arrays; import java.util.Map; @@ -87,7 +88,7 @@ public class ScriptValuesTests extends ESTestCase { } - public void testLongs() { + public void testLongs() throws IOException { final Object[][] values = new Long[randomInt(10)][]; for (int i = 0; i < values.length; ++i) { Long[] longs = new Long[randomInt(8)]; @@ -100,15 +101,17 @@ public class ScriptValuesTests extends ESTestCase { FakeSearchScript script = new FakeSearchScript(values); ScriptLongValues scriptValues = new ScriptLongValues(script); for (int i = 0; i < values.length; ++i) { - scriptValues.setDocument(i); - assertEquals(values[i].length, scriptValues.count()); - for (int j = 0; j < values[i].length; ++j) { - assertEquals(values[i][j], scriptValues.valueAt(j)); + assertEquals(values[i].length > 0, scriptValues.advanceExact(i)); + if (values[i].length > 0) { + assertEquals(values[i].length, scriptValues.docValueCount()); + for (int j = 0; j < values[i].length; ++j) { + assertEquals(values[i][j], scriptValues.nextValue()); + } } } } - public void testBooleans() { + public void testBooleans() throws IOException { final Object[][] values = new Boolean[randomInt(10)][]; for (int i = 0; i < values.length; ++i) { Boolean[] booleans = new Boolean[randomInt(8)]; @@ -121,15 +124,17 @@ public class ScriptValuesTests extends ESTestCase { FakeSearchScript script = new FakeSearchScript(values); ScriptLongValues scriptValues = new ScriptLongValues(script); for (int i = 0; i < values.length; ++i) { - scriptValues.setDocument(i); - assertEquals(values[i].length, scriptValues.count()); - for (int j = 0; j < values[i].length; ++j) { - assertEquals(values[i][j], scriptValues.valueAt(j) == 1L); + assertEquals(values[i].length > 0, scriptValues.advanceExact(i)); + if (values[i].length > 0) { + assertEquals(values[i].length, scriptValues.docValueCount()); + for (int j = 0; j < values[i].length; ++j) { + assertEquals(values[i][j], scriptValues.nextValue() == 1L); + } } } } - public void testDoubles() { + public void testDoubles() throws IOException { final Object[][] values = new Double[randomInt(10)][]; for (int i = 0; i < values.length; ++i) { Double[] doubles = new Double[randomInt(8)]; @@ -142,15 +147,17 @@ public class ScriptValuesTests extends ESTestCase { FakeSearchScript script = new FakeSearchScript(values); ScriptDoubleValues scriptValues = new ScriptDoubleValues(script); for (int i = 0; i < values.length; ++i) { - scriptValues.setDocument(i); - assertEquals(values[i].length, scriptValues.count()); - for (int j = 0; j < values[i].length; ++j) { - assertEquals(values[i][j], scriptValues.valueAt(j)); + assertEquals(values[i].length > 0, scriptValues.advanceExact(i)); + if (values[i].length > 0) { + assertEquals(values[i].length, scriptValues.docValueCount()); + for (int j = 0; j < values[i].length; ++j) { + assertEquals(values[i][j], scriptValues.nextValue()); + } } } } - public void testBytes() { + public void testBytes() throws IOException { final String[][] values = new String[randomInt(10)][]; for (int i = 0; i < values.length; ++i) { String[] strings = new String[randomInt(8)]; @@ -163,10 +170,12 @@ public class ScriptValuesTests extends ESTestCase { FakeSearchScript script = new FakeSearchScript(values); ScriptBytesValues scriptValues = new ScriptBytesValues(script); for (int i = 0; i < values.length; ++i) { - scriptValues.setDocument(i); - assertEquals(values[i].length, scriptValues.count()); - for (int j = 0; j < values[i].length; ++j) { - assertEquals(new BytesRef(values[i][j]), scriptValues.valueAt(j)); + assertEquals(values[i].length > 0, scriptValues.advanceExact(i)); + if (values[i].length > 0) { + assertEquals(values[i].length, scriptValues.docValueCount()); + for (int j = 0; j < values[i].length; ++j) { + assertEquals(new BytesRef(values[i][j]), scriptValues.nextValue()); + } } } } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/support/ValuesSourceConfigTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/support/ValuesSourceConfigTests.java index 5f8c658f234..d0c583257ae 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/support/ValuesSourceConfigTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/support/ValuesSourceConfigTests.java @@ -48,9 +48,9 @@ public class ValuesSourceConfigTests extends ESSingleNodeTestCase { ValuesSource.Bytes valuesSource = config.toValuesSource(context); LeafReaderContext ctx = searcher.reader().leaves().get(0); SortedBinaryDocValues values = valuesSource.bytesValues(ctx); - values.setDocument(0); - assertEquals(1, values.count()); - assertEquals(new BytesRef("abc"), values.valueAt(0)); + assertTrue(values.advanceExact(0)); + assertEquals(1, values.docValueCount()); + assertEquals(new BytesRef("abc"), values.nextValue()); } } @@ -70,16 +70,15 @@ public class ValuesSourceConfigTests extends ESSingleNodeTestCase { ValuesSource.Bytes valuesSource = config.toValuesSource(context); LeafReaderContext ctx = searcher.reader().leaves().get(0); SortedBinaryDocValues values = valuesSource.bytesValues(ctx); - values.setDocument(0); - assertEquals(0, values.count()); + assertFalse(values.advanceExact(0)); config = ValuesSourceConfig.resolve( context, null, "bytes", null, "abc", null, null); valuesSource = config.toValuesSource(context); values = valuesSource.bytesValues(ctx); - values.setDocument(0); - assertEquals(1, values.count()); - assertEquals(new BytesRef("abc"), values.valueAt(0)); + assertTrue(values.advanceExact(0)); + assertEquals(1, values.docValueCount()); + assertEquals(new BytesRef("abc"), values.nextValue()); } } @@ -102,9 +101,9 @@ public class ValuesSourceConfigTests extends ESSingleNodeTestCase { valuesSource = config.toValuesSource(context); LeafReaderContext ctx = searcher.reader().leaves().get(0); SortedBinaryDocValues values = valuesSource.bytesValues(ctx); - values.setDocument(0); - assertEquals(1, values.count()); - assertEquals(new BytesRef("abc"), values.valueAt(0)); + assertTrue(values.advanceExact(0)); + assertEquals(1, values.docValueCount()); + assertEquals(new BytesRef("abc"), values.nextValue()); } } @@ -124,9 +123,9 @@ public class ValuesSourceConfigTests extends ESSingleNodeTestCase { ValuesSource.Numeric valuesSource = config.toValuesSource(context); LeafReaderContext ctx = searcher.reader().leaves().get(0); SortedNumericDocValues values = valuesSource.longValues(ctx); - values.setDocument(0); - assertEquals(1, values.count()); - assertEquals(42, values.valueAt(0)); + assertTrue(values.advanceExact(0)); + assertEquals(1, values.docValueCount()); + assertEquals(42, values.nextValue()); } } @@ -146,16 +145,15 @@ public class ValuesSourceConfigTests extends ESSingleNodeTestCase { ValuesSource.Numeric valuesSource = config.toValuesSource(context); LeafReaderContext ctx = searcher.reader().leaves().get(0); SortedNumericDocValues values = valuesSource.longValues(ctx); - values.setDocument(0); - assertEquals(0, values.count()); + assertFalse(values.advanceExact(0)); config = ValuesSourceConfig.resolve( context, null, "long", null, 42, null, null); valuesSource = config.toValuesSource(context); values = valuesSource.longValues(ctx); - values.setDocument(0); - assertEquals(1, values.count()); - assertEquals(42, values.valueAt(0)); + assertTrue(values.advanceExact(0)); + assertEquals(1, values.docValueCount()); + assertEquals(42, values.nextValue()); } } @@ -179,9 +177,9 @@ public class ValuesSourceConfigTests extends ESSingleNodeTestCase { valuesSource = config.toValuesSource(context); LeafReaderContext ctx = searcher.reader().leaves().get(0); SortedNumericDocValues values = valuesSource.longValues(ctx); - values.setDocument(0); - assertEquals(1, values.count()); - assertEquals(42, values.valueAt(0)); + assertTrue(values.advanceExact(0)); + assertEquals(1, values.docValueCount()); + assertEquals(42, values.nextValue()); } } @@ -201,9 +199,9 @@ public class ValuesSourceConfigTests extends ESSingleNodeTestCase { ValuesSource.Numeric valuesSource = config.toValuesSource(context); LeafReaderContext ctx = searcher.reader().leaves().get(0); SortedNumericDocValues values = valuesSource.longValues(ctx); - values.setDocument(0); - assertEquals(1, values.count()); - assertEquals(1, values.valueAt(0)); + assertTrue(values.advanceExact(0)); + assertEquals(1, values.docValueCount()); + assertEquals(1, values.nextValue()); } } @@ -223,16 +221,15 @@ public class ValuesSourceConfigTests extends ESSingleNodeTestCase { ValuesSource.Numeric valuesSource = config.toValuesSource(context); LeafReaderContext ctx = searcher.reader().leaves().get(0); SortedNumericDocValues values = valuesSource.longValues(ctx); - values.setDocument(0); - assertEquals(0, values.count()); + assertFalse(values.advanceExact(0)); config = ValuesSourceConfig.resolve( context, null, "bool", null, true, null, null); valuesSource = config.toValuesSource(context); values = valuesSource.longValues(ctx); - values.setDocument(0); - assertEquals(1, values.count()); - assertEquals(1, values.valueAt(0)); + assertTrue(values.advanceExact(0)); + assertEquals(1, values.docValueCount()); + assertEquals(1, values.nextValue()); } } @@ -256,9 +253,9 @@ public class ValuesSourceConfigTests extends ESSingleNodeTestCase { valuesSource = config.toValuesSource(context); LeafReaderContext ctx = searcher.reader().leaves().get(0); SortedNumericDocValues values = valuesSource.longValues(ctx); - values.setDocument(0); - assertEquals(1, values.count()); - assertEquals(1, values.valueAt(0)); + assertTrue(values.advanceExact(0)); + assertEquals(1, values.docValueCount()); + assertEquals(1, values.nextValue()); } } } diff --git a/core/src/test/java/org/elasticsearch/search/basic/SearchWithRandomExceptionsIT.java b/core/src/test/java/org/elasticsearch/search/basic/SearchWithRandomExceptionsIT.java index 067064dfc15..5b68d3cd1f1 100644 --- a/core/src/test/java/org/elasticsearch/search/basic/SearchWithRandomExceptionsIT.java +++ b/core/src/test/java/org/elasticsearch/search/basic/SearchWithRandomExceptionsIT.java @@ -243,6 +243,11 @@ public class SearchWithRandomExceptionsIT extends ESIntegTestCase { protected DirectoryReader doWrapDirectoryReader(DirectoryReader in) throws IOException { return new RandomExceptionDirectoryReaderWrapper(in, settings); } + + @Override + public CacheHelper getReaderCacheHelper() { + return in.getReaderCacheHelper(); + } } diff --git a/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighterTests.java b/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighterTests.java index b923c2464d8..18ba716f70c 100644 --- a/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighterTests.java +++ b/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/PlainHighlighterTests.java @@ -19,29 +19,12 @@ package org.elasticsearch.search.fetch.subphase.highlight; -import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.MockAnalyzer; -import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.apache.lucene.index.Term; -import org.apache.lucene.search.BooleanClause; -import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.PhraseQuery; import org.apache.lucene.search.Query; -import org.apache.lucene.search.TermQuery; -import org.apache.lucene.search.highlight.InvalidTokenOffsetsException; import org.apache.lucene.search.highlight.QueryScorer; -import org.apache.lucene.spatial.geopoint.search.GeoPointDistanceQuery; -import org.apache.lucene.spatial.geopoint.search.GeoPointInBBoxQuery; -import org.apache.lucene.spatial.geopoint.search.GeoPointInPolygonQuery; import org.apache.lucene.util.LuceneTestCase; -import org.elasticsearch.index.analysis.FieldNameAnalyzer; -import org.elasticsearch.search.fetch.subphase.highlight.CustomQueryScorer; - -import java.io.IOException; -import java.util.HashMap; -import java.util.Map; - -import static org.hamcrest.Matchers.equalTo; public class PlainHighlighterTests extends LuceneTestCase { @@ -55,40 +38,4 @@ public class PlainHighlighterTests extends LuceneTestCase { String[] frags = highlighter.getBestFragments(new MockAnalyzer(random()), "field", "bar foo bar foo", 10); assertArrayEquals(new String[] {"bar foo bar foo"}, frags); } - - public void checkGeoQueryHighlighting(Query geoQuery) throws IOException, InvalidTokenOffsetsException { - Map analysers = new HashMap(); - analysers.put("text", new StandardAnalyzer()); - FieldNameAnalyzer fieldNameAnalyzer = new FieldNameAnalyzer(analysers); - Query termQuery = new TermQuery(new Term("text", "failure")); - Query boolQuery = new BooleanQuery.Builder().add(new BooleanClause(geoQuery, BooleanClause.Occur.SHOULD)) - .add(new BooleanClause(termQuery, BooleanClause.Occur.SHOULD)).build(); - org.apache.lucene.search.highlight.Highlighter highlighter = - new org.apache.lucene.search.highlight.Highlighter(new CustomQueryScorer(boolQuery)); - String fragment = highlighter.getBestFragment(fieldNameAnalyzer.tokenStream("text", "Arbitrary text field which should not cause " + - "a failure"), "Arbitrary text field which should not cause a failure"); - assertThat(fragment, equalTo("Arbitrary text field which should not cause a failure")); - Query rewritten = boolQuery.rewrite(null); - highlighter = new org.apache.lucene.search.highlight.Highlighter(new CustomQueryScorer(rewritten)); - fragment = highlighter.getBestFragment(fieldNameAnalyzer.tokenStream("text", "Arbitrary text field which should not cause " + - "a failure"), "Arbitrary text field which should not cause a failure"); - assertThat(fragment, equalTo("Arbitrary text field which should not cause a failure")); - } - - public void testGeoPointInBBoxQueryHighlighting() throws IOException, InvalidTokenOffsetsException { - Query geoQuery = new GeoPointDistanceQuery("geo_point", -64.92354174306496, -170.15625, 5576757); - checkGeoQueryHighlighting(geoQuery); - } - - public void testGeoPointDistanceQueryHighlighting() throws IOException, InvalidTokenOffsetsException { - Query geoQuery = new GeoPointInBBoxQuery("geo_point", -64.92354174306496, 61.10078883158897, -170.15625, 118.47656249999999); - checkGeoQueryHighlighting(geoQuery); - } - - public void testGeoPointInPolygonQueryHighlighting() throws IOException, InvalidTokenOffsetsException { - double[] polyLats = new double[]{0, 60, 0, 0}; - double[] polyLons = new double[]{0, 60, 90, 0}; - Query geoQuery = new GeoPointInPolygonQuery("geo_point", polyLats, polyLons); - checkGeoQueryHighlighting(geoQuery); - } } diff --git a/core/src/test/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java b/core/src/test/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java index fc9dfb88449..18db8cd539e 100644 --- a/core/src/test/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java +++ b/core/src/test/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java @@ -587,12 +587,12 @@ public class QueryRescorerIT extends ESIntegTestCase { String[] intToEnglish = new String[] { English.intToEnglish(i), English.intToEnglish(i + 1), English.intToEnglish(i + 2), English.intToEnglish(i + 3) }; - QueryBuilder query = boolQuery().disableCoord(true) + QueryBuilder query = boolQuery() .should(functionScoreQuery(termQuery("field1", intToEnglish[0]), weightFactorFunction(2.0f)).boostMode(REPLACE)) .should(functionScoreQuery(termQuery("field1", intToEnglish[1]), weightFactorFunction(3.0f)).boostMode(REPLACE)) .should(functionScoreQuery(termQuery("field1", intToEnglish[2]), weightFactorFunction(5.0f)).boostMode(REPLACE)) .should(functionScoreQuery(termQuery("field1", intToEnglish[3]), weightFactorFunction(0.2f)).boostMode(REPLACE)); - QueryRescorerBuilder rescoreQuery = queryRescorer(boolQuery().disableCoord(true) + QueryRescorerBuilder rescoreQuery = queryRescorer(boolQuery() .should(functionScoreQuery(termQuery("field1", intToEnglish[0]), weightFactorFunction(5.0f)).boostMode(REPLACE)) .should(functionScoreQuery(termQuery("field1", intToEnglish[1]), weightFactorFunction(7.0f)).boostMode(REPLACE)) .should(functionScoreQuery(termQuery("field1", intToEnglish[3]), weightFactorFunction(0.0f)).boostMode(REPLACE))); diff --git a/core/src/test/java/org/elasticsearch/search/query/SimpleQueryStringIT.java b/core/src/test/java/org/elasticsearch/search/query/SimpleQueryStringIT.java index f22ec392b99..58c0bf82e98 100644 --- a/core/src/test/java/org/elasticsearch/search/query/SimpleQueryStringIT.java +++ b/core/src/test/java/org/elasticsearch/search/query/SimpleQueryStringIT.java @@ -120,6 +120,7 @@ public class SimpleQueryStringIT extends ESIntegTestCase { assertSearchHits(searchResponse, "5", "6"); } + @AwaitsFix(bugUrl="https://github.com/elastic/elasticsearch/issues/23966") public void testSimpleQueryStringMinimumShouldMatch() throws Exception { createIndex("test"); ensureGreen("test"); diff --git a/docs/reference/aggregations/metrics/geocentroid-aggregation.asciidoc b/docs/reference/aggregations/metrics/geocentroid-aggregation.asciidoc index f215857bf8c..4756ebdafc8 100644 --- a/docs/reference/aggregations/metrics/geocentroid-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/geocentroid-aggregation.asciidoc @@ -60,8 +60,8 @@ The response for the above aggregation: "aggregations": { "centroid": { "location": { - "lat": 51.009829603135586, - "lon": 3.966213036328554 + "lat": 51.00982963107526, + "lon": 3.9662130922079086 } } } @@ -113,7 +113,7 @@ The response for the above aggregation: "centroid": { "location": { "lat": 52.371655656024814, - "lon": 4.909563269466162 + "lon": 4.909563297405839 } } }, @@ -122,8 +122,8 @@ The response for the above aggregation: "doc_count": 2, "centroid": { "location": { - "lat": 48.86055544484407, - "lon": 2.331694420427084 + "lat": 48.86055548675358, + "lon": 2.3316944623366 } } }, @@ -132,8 +132,8 @@ The response for the above aggregation: "doc_count": 1, "centroid": { "location": { - "lat": 51.222899928689, - "lon": 4.405199903994799 + "lat": 51.22289997059852, + "lon": 4.40519998781383 } } } diff --git a/docs/reference/index-modules/similarity.asciidoc b/docs/reference/index-modules/similarity.asciidoc index 7930ed573b4..61333c41a0f 100644 --- a/docs/reference/index-modules/similarity.asciidoc +++ b/docs/reference/index-modules/similarity.asciidoc @@ -165,14 +165,10 @@ Type name: `LMJelinekMercer` [float] [[default-base]] -==== Default and Base Similarities +==== Default Similarity By default, Elasticsearch will use whatever similarity is configured as -`default`. However, the similarity functions `queryNorm()` and `coord()` -are not per-field. Consequently, for expert users wanting to change the -implementation used for these two methods, while not changing the -`default`, it is possible to configure a similarity with the name -`base`. This similarity will then be used for the two methods. +`default`. You can change the default similarity for all fields in an index when it is <>: @@ -185,7 +181,7 @@ PUT /my_index "index": { "similarity": { "default": { - "type": "classic" + "type": "boolean" } } } @@ -205,7 +201,7 @@ PUT /my_index/_settings "index": { "similarity": { "default": { - "type": "classic" + "type": "boolean" } } } diff --git a/docs/reference/migration/migrate_6_0/search.asciidoc b/docs/reference/migration/migrate_6_0/search.asciidoc index f94d93b6ad5..7b080eeb3be 100644 --- a/docs/reference/migration/migrate_6_0/search.asciidoc +++ b/docs/reference/migration/migrate_6_0/search.asciidoc @@ -48,6 +48,9 @@ * The `ignore_malformed` and `coerce` parameters have been removed from `geo_bounding_box`, `geo_polygon`, and `geo_distance` queries. +* The `disable_coord` parameter of the `bool` and `common_terms` queries has + been removed. If provided, it will be ignored and issue a deprecation warning. + ==== Search shards API The search shards API no longer accepts the `type` url parameter, which didn't diff --git a/docs/reference/migration/migrate_6_0/settings.asciidoc b/docs/reference/migration/migrate_6_0/settings.asciidoc index 44acb999394..fa010aab1fe 100644 --- a/docs/reference/migration/migrate_6_0/settings.asciidoc +++ b/docs/reference/migration/migrate_6_0/settings.asciidoc @@ -59,4 +59,10 @@ will use the best `store` implementation for your operating system. The blocking TCP client, blocking TCP server, and blocking HTTP server have been removed. As a consequence, the `network.tcp.blocking_server`, `network.tcp.blocking_client`, `network.tcp.blocking`,`transport.tcp.blocking_client`, `transport.tcp.blocking_server`, -and `http.tcp.blocking_server` settings are not recognized anymore. \ No newline at end of file +and `http.tcp.blocking_server` settings are not recognized anymore. + +==== Similarity settings + +The `base` similarity is now ignored as coords and query normalization have +been removed. If provided, this setting will be ignored and issue a +deprecation warning. diff --git a/docs/reference/query-dsl/bool-query.asciidoc b/docs/reference/query-dsl/bool-query.asciidoc index 977953affc5..4d66e5e7f64 100644 --- a/docs/reference/query-dsl/bool-query.asciidoc +++ b/docs/reference/query-dsl/bool-query.asciidoc @@ -37,11 +37,6 @@ If this query is used in a filter context and it has `should` clauses then at least one `should` clause is required to match. ======================================================================== -The bool query also supports `disable_coord` parameter (defaults to -`false`). Basically the coord similarity computes a score factor based -on the fraction of all query terms that a document contains. See Lucene -`BooleanQuery` for more details. - The `bool` query takes a _more-matches-is-better_ approach, so the score from each matching `must` or `should` clause will be added together to provide the final `_score` for each document. diff --git a/docs/reference/query-dsl/common-terms-query.asciidoc b/docs/reference/query-dsl/common-terms-query.asciidoc index fcc4ace2ec6..a0c58597f7a 100644 --- a/docs/reference/query-dsl/common-terms-query.asciidoc +++ b/docs/reference/query-dsl/common-terms-query.asciidoc @@ -297,5 +297,5 @@ GET /_search The high frequency generated query is then slightly less restrictive than with an `AND`. -The `common` terms query also supports `boost`, `analyzer` and -`disable_coord` as parameters. +The `common` terms query also supports `boost` and `analyzer` as +parameters. diff --git a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsAggregator.java b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsAggregator.java index 4665e74daa7..5c193828c55 100644 --- a/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsAggregator.java +++ b/modules/aggs-matrix-stats/src/main/java/org/elasticsearch/search/aggregations/matrix/stats/MatrixStatsAggregator.java @@ -100,16 +100,20 @@ public class MatrixStatsAggregator extends MetricsAggregator { /** * return a map of field names and data */ - private boolean includeDocument(int doc) { + private boolean includeDocument(int doc) throws IOException { // loop over fields for (int i = 0; i < fieldVals.length; ++i) { final NumericDoubleValues doubleValues = values[i]; - final double value = doubleValues.get(doc); - // skip if value is missing - if (value == Double.NEGATIVE_INFINITY) { + if (doubleValues.advanceExact(doc)) { + final double value = doubleValues.doubleValue(); + if (value == Double.NEGATIVE_INFINITY) { + // TODO: Fix matrix stats to treat neg inf as any other value + return false; + } + fieldVals[i] = value; + } else { return false; } - fieldVals[i] = value; } return true; } diff --git a/modules/lang-expression/licenses/lucene-expressions-6.5.0.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-6.5.0.jar.sha1 deleted file mode 100644 index 10514bb3d1f..00000000000 --- a/modules/lang-expression/licenses/lucene-expressions-6.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5dfd44932fc77187a233a1cbf228c1a96ac8924f \ No newline at end of file diff --git a/modules/lang-expression/licenses/lucene-expressions-7.0.0-snapshot-89f6d17.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-7.0.0-snapshot-89f6d17.jar.sha1 new file mode 100644 index 00000000000..c47ca7d206c --- /dev/null +++ b/modules/lang-expression/licenses/lucene-expressions-7.0.0-snapshot-89f6d17.jar.sha1 @@ -0,0 +1 @@ +52bc12bbc30db614056896495f30699d69eabae4 \ No newline at end of file diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/CountMethodValueSource.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/CountMethodValueSource.java index 1aa85ef685b..6a36ff7a539 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/CountMethodValueSource.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/CountMethodValueSource.java @@ -50,11 +50,14 @@ final class CountMethodValueSource extends ValueSource { final SortedNumericDoubleValues values = leafData.getDoubleValues(); return new DoubleDocValues(this) { - @Override - public double doubleVal(int doc) { - values.setDocument(doc); - return values.count(); - } + @Override + public double doubleVal(int doc) throws IOException { + if (values.advanceExact(doc)) { + return values.docValueCount(); + } else { + return 0; + } + } }; } diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/DateMethodValueSource.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/DateMethodValueSource.java index 98909f4401a..b0bc7c203b6 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/DateMethodValueSource.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/DateMethodValueSource.java @@ -56,12 +56,16 @@ class DateMethodValueSource extends FieldDataValueSource { final Calendar calendar = Calendar.getInstance(TimeZone.getTimeZone("UTC"), Locale.ROOT); NumericDoubleValues docValues = multiValueMode.select(leafData.getDoubleValues(), 0d); return new DoubleDocValues(this) { - @Override - public double doubleVal(int docId) { - long millis = (long)docValues.get(docId); - calendar.setTimeInMillis(millis); - return calendar.get(calendarType); - } + @Override + public double doubleVal(int docId) throws IOException { + if (docValues.advanceExact(docId)) { + long millis = (long)docValues.doubleValue(); + calendar.setTimeInMillis(millis); + return calendar.get(calendarType); + } else { + return 0; + } + } }; } diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/DateObjectValueSource.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/DateObjectValueSource.java index a9f2018d398..86abcbcbefa 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/DateObjectValueSource.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/DateObjectValueSource.java @@ -58,12 +58,16 @@ class DateObjectValueSource extends FieldDataValueSource { MutableDateTime joda = new MutableDateTime(0, DateTimeZone.UTC); NumericDoubleValues docValues = multiValueMode.select(leafData.getDoubleValues(), 0d); return new DoubleDocValues(this) { - @Override - public double doubleVal(int docId) { - long millis = (long)docValues.get(docId); - joda.setMillis(millis); - return function.applyAsInt(joda); - } + @Override + public double doubleVal(int docId) throws IOException { + if (docValues.advanceExact(docId)) { + long millis = (long)docValues.doubleValue(); + joda.setMillis(millis); + return function.applyAsInt(joda); + } else { + return 0; + } + } }; } diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/EmptyMemberValueSource.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/EmptyMemberValueSource.java index b4c8582e0d6..aa12560e1fc 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/EmptyMemberValueSource.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/EmptyMemberValueSource.java @@ -19,10 +19,6 @@ package org.elasticsearch.script.expression; -import java.io.IOException; -import java.util.Map; -import java.util.Objects; - import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.queries.function.FunctionValues; import org.apache.lucene.queries.function.ValueSource; @@ -31,6 +27,10 @@ import org.elasticsearch.index.fielddata.AtomicNumericFieldData; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.fielddata.SortedNumericDoubleValues; +import java.io.IOException; +import java.util.Map; +import java.util.Objects; + /** * ValueSource to return non-zero if a field is missing. *

@@ -50,12 +50,11 @@ final class EmptyMemberValueSource extends ValueSource { final SortedNumericDoubleValues values = leafData.getDoubleValues(); return new DoubleDocValues(this) { @Override - public double doubleVal(int doc) { - values.setDocument(doc); - if (values.count() == 0) { - return 1; - } else { + public double doubleVal(int doc) throws IOException { + if (values.advanceExact(doc)) { return 0; + } else { + return 1; } } }; diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/FieldDataValueSource.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/FieldDataValueSource.java index 3ac885e49dc..f2aaced3765 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/FieldDataValueSource.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/FieldDataValueSource.java @@ -71,8 +71,12 @@ class FieldDataValueSource extends ValueSource { NumericDoubleValues docValues = multiValueMode.select(leafData.getDoubleValues(), 0d); return new DoubleDocValues(this) { @Override - public double doubleVal(int doc) { - return docValues.get(doc); + public double doubleVal(int doc) throws IOException { + if (docValues.advanceExact(doc)) { + return docValues.doubleValue(); + } else { + return 0; + } } }; } diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/GeoEmptyValueSource.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/GeoEmptyValueSource.java index 2a9b09ba10d..0b16aaf9dcd 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/GeoEmptyValueSource.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/GeoEmptyValueSource.java @@ -48,9 +48,8 @@ final class GeoEmptyValueSource extends ValueSource { final MultiGeoPointValues values = leafData.getGeoPointValues(); return new DoubleDocValues(this) { @Override - public double doubleVal(int doc) { - values.setDocument(doc); - if (values.count() == 0) { + public double doubleVal(int doc) throws IOException { + if (values.advanceExact(doc)) { return 1; } else { return 0; diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/GeoLatitudeValueSource.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/GeoLatitudeValueSource.java index d23eceda2fe..fd812dac5a3 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/GeoLatitudeValueSource.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/GeoLatitudeValueSource.java @@ -48,12 +48,11 @@ final class GeoLatitudeValueSource extends ValueSource { final MultiGeoPointValues values = leafData.getGeoPointValues(); return new DoubleDocValues(this) { @Override - public double doubleVal(int doc) { - values.setDocument(doc); - if (values.count() == 0) { - return 0.0; + public double doubleVal(int doc) throws IOException { + if (values.advanceExact(doc)) { + return values.nextValue().getLat(); } else { - return values.valueAt(0).getLat(); + return 0.0; } } }; diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/GeoLongitudeValueSource.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/GeoLongitudeValueSource.java index 4fbc8fd936c..fd05d92d623 100644 --- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/GeoLongitudeValueSource.java +++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/GeoLongitudeValueSource.java @@ -48,12 +48,11 @@ final class GeoLongitudeValueSource extends ValueSource { final MultiGeoPointValues values = leafData.getGeoPointValues(); return new DoubleDocValues(this) { @Override - public double doubleVal(int doc) { - values.setDocument(doc); - if (values.count() == 0) { - return 0.0; + public double doubleVal(int doc) throws IOException { + if (values.advanceExact(doc)) { + return values.nextValue().getLon(); } else { - return values.valueAt(0).getLon(); + return 0.0; } } }; diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQuery.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQuery.java index 848b882d1ab..c4f321c2ea8 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQuery.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQuery.java @@ -74,9 +74,9 @@ final class PercolateQuery extends Query implements Accountable { } @Override - public Weight createWeight(IndexSearcher searcher, boolean needsScores) throws IOException { - final Weight verifiedMatchesWeight = verifiedMatchesQuery.createWeight(searcher, false); - final Weight candidateMatchesWeight = candidateMatchesQuery.createWeight(searcher, false); + public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) throws IOException { + final Weight verifiedMatchesWeight = verifiedMatchesQuery.createWeight(searcher, false, boost); + final Weight candidateMatchesWeight = candidateMatchesQuery.createWeight(searcher, false, boost); return new Weight(this) { @Override public void extractTerms(Set set) { @@ -104,16 +104,6 @@ final class PercolateQuery extends Query implements Accountable { return Explanation.noMatch("PercolateQuery"); } - @Override - public float getValueForNormalization() throws IOException { - return candidateMatchesWeight.getValueForNormalization(); - } - - @Override - public void normalize(float v, float v1) { - candidateMatchesWeight.normalize(v, v1); - } - @Override public Scorer scorer(LeafReaderContext leafReaderContext) throws IOException { final Scorer approximation = candidateMatchesWeight.scorer(leafReaderContext); diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java index d548a3f354a..9ebb65c3275 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java @@ -33,7 +33,6 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.Weight; import org.apache.lucene.store.RAMDirectory; -import org.apache.lucene.util.Bits; import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ResourceNotFoundException; @@ -485,10 +484,9 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder null; } - Bits bits = leafReader.getDocsWithField(fieldType.queryBuilderField.name()); return docId -> { - if (bits.get(docId)) { - BytesRef qbSource = binaryDocValues.get(docId); + if (binaryDocValues.advanceExact(docId)) { + BytesRef qbSource = binaryDocValues.binaryValue(); if (qbSource.length > 0) { XContent xContent = PercolatorFieldMapper.QUERY_BUILDER_CONTENT_TYPE.xContent(); try (XContentParser sourceParser = xContent.createParser(context.getXContentRegistry(), qbSource.bytes, diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java index 65005e957ac..8c594a0545f 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/CandidateQueryTests.java @@ -79,6 +79,7 @@ import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List; +import java.util.Set; import java.util.function.Function; import static org.hamcrest.Matchers.equalTo; @@ -245,7 +246,7 @@ public class CandidateQueryTests extends ESSingleNodeTestCase { addQuery(commonTermsQuery, documents); BlendedTermQuery blendedTermQuery = BlendedTermQuery.booleanBlendedQuery(new Term[]{new Term("field", "quick"), - new Term("field", "brown"), new Term("field", "fox")}, false); + new Term("field", "brown"), new Term("field", "fox")}); addQuery(blendedTermQuery, documents); SpanNearQuery spanNearQuery = new SpanNearQuery.Builder("field", true) @@ -360,11 +361,14 @@ public class CandidateQueryTests extends ESSingleNodeTestCase { } @Override - public Weight createWeight(IndexSearcher searcher, boolean needsScores) { - return new ConstantScoreWeight(this) { + public Weight createWeight(IndexSearcher searcher, boolean needsScores, float boost) { + return new Weight(this) { float _score; + @Override + public void extractTerms(Set terms) {} + @Override public Explanation explain(LeafReaderContext context, int doc) throws IOException { Scorer scorer = scorer(context); @@ -406,7 +410,7 @@ public class CandidateQueryTests extends ESSingleNodeTestCase { } } }; - return new FilterScorer(new ConstantScoreScorer(this, score(), memoryIndexIterator)) { + return new FilterScorer(new ConstantScoreScorer(this, 1f, memoryIndexIterator)) { @Override public float score() throws IOException { diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryAnalyzerTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryAnalyzerTests.java index 558d237f125..ea18bd38d12 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryAnalyzerTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryAnalyzerTests.java @@ -276,7 +276,7 @@ public class QueryAnalyzerTests extends ESTestCase { public void testExtractQueryMetadata_blendedTermQuery() { Term[] termsArr = new Term[]{new Term("_field", "_term1"), new Term("_field", "_term2")}; - BlendedTermQuery commonTermsQuery = BlendedTermQuery.booleanBlendedQuery(termsArr, false); + BlendedTermQuery commonTermsQuery = BlendedTermQuery.booleanBlendedQuery(termsArr); Result result = analyze(commonTermsQuery); assertThat(result.verified, is(true)); List terms = new ArrayList<>(result.terms); diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-6.5.0.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-6.5.0.jar.sha1 deleted file mode 100644 index 95df77a7521..00000000000 --- a/plugins/analysis-icu/licenses/lucene-analyzers-icu-6.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3a71465f63887f871bc377d87a0838c29b0a857d \ No newline at end of file diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.0.0-snapshot-89f6d17.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.0.0-snapshot-89f6d17.jar.sha1 new file mode 100644 index 00000000000..1ac5e494f00 --- /dev/null +++ b/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.0.0-snapshot-89f6d17.jar.sha1 @@ -0,0 +1 @@ +e6e6d743c100e4d7bc55480d5333d634e41856ca \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-6.5.0.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-6.5.0.jar.sha1 deleted file mode 100644 index 0c928699fc6..00000000000 --- a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-6.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -03353b0d030f6d5a63c4c0d5b64c770f5ba9d829 \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.0.0-snapshot-89f6d17.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.0.0-snapshot-89f6d17.jar.sha1 new file mode 100644 index 00000000000..8be206e2c1a --- /dev/null +++ b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.0.0-snapshot-89f6d17.jar.sha1 @@ -0,0 +1 @@ +e520a9c7f8a2fc9f7c575940d9b24834a592ca25 \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-6.5.0.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-6.5.0.jar.sha1 deleted file mode 100644 index ba2bee28476..00000000000 --- a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-6.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -77ce4fb8c62688d8a094f08a07685c464ec46345 \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.0.0-snapshot-89f6d17.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.0.0-snapshot-89f6d17.jar.sha1 new file mode 100644 index 00000000000..2b69050029b --- /dev/null +++ b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.0.0-snapshot-89f6d17.jar.sha1 @@ -0,0 +1 @@ +32fe29deb1c10cb7ae70d5f4d95fcc414f9813d6 \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-6.5.0.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-6.5.0.jar.sha1 deleted file mode 100644 index 0a0ae4cf401..00000000000 --- a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-6.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -60a780d900e48b0cead42d82fe405ad54bd658c3 \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.0.0-snapshot-89f6d17.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.0.0-snapshot-89f6d17.jar.sha1 new file mode 100644 index 00000000000..5412498f1c8 --- /dev/null +++ b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.0.0-snapshot-89f6d17.jar.sha1 @@ -0,0 +1 @@ +e50f4ab0d6ebf85b282a86707309343e3260c4a2 \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-6.5.0.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-6.5.0.jar.sha1 deleted file mode 100644 index 9a1387fa22f..00000000000 --- a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-6.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -894c42c011d291e72d14db660499c75281de9efd \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.0.0-snapshot-89f6d17.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.0.0-snapshot-89f6d17.jar.sha1 new file mode 100644 index 00000000000..c845088ffb7 --- /dev/null +++ b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.0.0-snapshot-89f6d17.jar.sha1 @@ -0,0 +1 @@ +a41c22ef7dd43991e1f3555ff527ac79eb47fdca \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-6.5.0.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-6.5.0.jar.sha1 deleted file mode 100644 index 89a0283d52e..00000000000 --- a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-6.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -72f0172cf947ab563a7c8166855cf7cbdfe33136 \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.0.0-snapshot-89f6d17.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.0.0-snapshot-89f6d17.jar.sha1 new file mode 100644 index 00000000000..973047cdf89 --- /dev/null +++ b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.0.0-snapshot-89f6d17.jar.sha1 @@ -0,0 +1 @@ +01fe11b45d9f6a68ef1e9994bebd81d26632efc5 \ No newline at end of file diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search_shards/10_basic.yaml b/rest-api-spec/src/main/resources/rest-api-spec/test/search_shards/10_basic.yaml index d48d5088756..42189883b1b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search_shards/10_basic.yaml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search_shards/10_basic.yaml @@ -76,6 +76,5 @@ - lte: { indices.test_index.filter.bool.should.1.term.field.boost: 1.0 } - gte: { indices.test_index.filter.bool.should.1.term.field.boost: 1.0 } - match: { indices.test_index.filter.bool.adjust_pure_negative: true} - - match: { indices.test_index.filter.bool.disable_coord: false} - lte: { indices.test_index.filter.bool.boost: 1.0 } - gte: { indices.test_index.filter.bool.boost: 1.0 } diff --git a/test/framework/src/main/java/org/elasticsearch/AnalysisFactoryTestCase.java b/test/framework/src/main/java/org/elasticsearch/AnalysisFactoryTestCase.java index 76d33a6c425..83f955296b7 100644 --- a/test/framework/src/main/java/org/elasticsearch/AnalysisFactoryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/AnalysisFactoryTestCase.java @@ -285,6 +285,8 @@ public class AnalysisFactoryTestCase extends ESTestCase { .put("fingerprint", Void.class) // for tee-sinks .put("daterecognizer", Void.class) + // for token filters that generate bad offsets, which are now rejected since Lucene 7 + .put("fixbrokenoffsets", Void.class) .immutableMap(); diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 146e62cc617..fab12d2606b 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -37,7 +37,6 @@ import org.apache.logging.log4j.core.config.Configurator; import org.apache.logging.log4j.status.StatusConsoleListener; import org.apache.logging.log4j.status.StatusData; import org.apache.logging.log4j.status.StatusLogger; -import org.apache.lucene.uninverting.UninvertingReader; import org.apache.lucene.util.LuceneTestCase; import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; import org.apache.lucene.util.TestRuleMarkFailure; @@ -387,9 +386,6 @@ public abstract class ESTestCase extends LuceneTestCase { protected static void checkStaticState() throws Exception { MockPageCacheRecycler.ensureAllPagesAreReleased(); MockBigArrays.ensureAllArraysAreReleased(); - // field cache should NEVER get loaded. - String[] entries = UninvertingReader.getUninvertedStats(); - assertEquals("fieldcache must never be used, got=" + Arrays.toString(entries), 0, entries.length); // ensure no one changed the status logger level on us assertThat(StatusLogger.getLogger().getLevel(), equalTo(Level.WARN)); diff --git a/test/framework/src/main/java/org/elasticsearch/test/FieldMaskingReader.java b/test/framework/src/main/java/org/elasticsearch/test/FieldMaskingReader.java index 5ce620166c1..3aba68868f7 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/FieldMaskingReader.java +++ b/test/framework/src/main/java/org/elasticsearch/test/FieldMaskingReader.java @@ -21,7 +21,9 @@ package org.elasticsearch.test; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.FieldFilterLeafReader; import org.apache.lucene.index.FilterDirectoryReader; +import org.apache.lucene.index.FilterLeafReader; import org.apache.lucene.index.LeafReader; +import org.apache.lucene.index.IndexReader.CacheHelper; import java.io.IOException; import java.util.Collections; @@ -32,7 +34,24 @@ public class FieldMaskingReader extends FilterDirectoryReader { super(in, new FilterDirectoryReader.SubReaderWrapper() { @Override public LeafReader wrap(LeafReader reader) { - return new FieldFilterLeafReader(reader, Collections.singleton(field), true); + return new FilterLeafReader(new FieldFilterLeafReader(reader, Collections.singleton(field), true)) { + + // FieldFilterLeafReader does not forward cache helpers + // since it considers it is illegal because of the fact + // that it changes the content of the index. However we + // want this behavior for tests, and security plugins + // are careful to only use the cache when it's valid + + @Override + public CacheHelper getReaderCacheHelper() { + return reader.getReaderCacheHelper(); + } + + @Override + public CacheHelper getCoreCacheHelper() { + return reader.getCoreCacheHelper(); + } + }; } }); this.field = field; @@ -45,7 +64,7 @@ public class FieldMaskingReader extends FilterDirectoryReader { } @Override - public Object getCoreCacheKey() { - return in.getCoreCacheKey(); + public CacheHelper getReaderCacheHelper() { + return in.getReaderCacheHelper(); } } \ No newline at end of file diff --git a/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java b/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java index a7fc61e6913..4c5bd0d3267 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java +++ b/test/framework/src/main/java/org/elasticsearch/test/engine/MockEngineSupport.java @@ -180,11 +180,6 @@ public final class MockEngineSupport { this.subReaderWrapper = subReaderWrapper; } - @Override - public Object getCoreCacheKey() { - return in.getCoreCacheKey(); - } - } public Engine.Searcher wrapSearcher(String source, Engine.Searcher engineSearcher, IndexSearcher searcher, SearcherManager manager) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/engine/ThrowingLeafReaderWrapper.java b/test/framework/src/main/java/org/elasticsearch/test/engine/ThrowingLeafReaderWrapper.java index 9da5d7515c7..314f1b52852 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/engine/ThrowingLeafReaderWrapper.java +++ b/test/framework/src/main/java/org/elasticsearch/test/engine/ThrowingLeafReaderWrapper.java @@ -195,4 +195,15 @@ public class ThrowingLeafReaderWrapper extends FilterLeafReader { thrower.maybeThrow(Flags.Norms); return super.getNormValues(field); } + + + @Override + public CacheHelper getCoreCacheHelper() { + return in.getCoreCacheHelper(); + } + + @Override + public CacheHelper getReaderCacheHelper() { + return in.getReaderCacheHelper(); + } } From cee76295ca0f360f02f6d2878d183659475eae4e Mon Sep 17 00:00:00 2001 From: Suhas Karanth Date: Tue, 18 Apr 2017 19:27:50 +0530 Subject: [PATCH 02/19] Update aggs reference documentation for 'keyed' options (#23758) Add 'keyed' parameter documentation for following: - Date Histogram Aggregation - Date Range Aggregation - Geo Distance Aggregation - Histogram Aggregation - IP range aggregation - Percentiles Aggregation - Percentile Ranks Aggregation --- .../bucket/datehistogram-aggregation.asciidoc | 54 +++++++++ .../bucket/daterange-aggregation.asciidoc | 106 ++++++++++++++++ .../bucket/geodistance-aggregation.asciidoc | 113 ++++++++++++++++++ .../bucket/iprange-aggregation.asciidoc | 90 ++++++++++++++ .../metrics/percentile-aggregation.asciidoc | 71 +++++++++++ .../percentile-rank-aggregation.asciidoc | 48 ++++++++ 6 files changed, 482 insertions(+) diff --git a/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc b/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc index d2ae9052f35..b7619b175df 100644 --- a/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc @@ -302,6 +302,60 @@ documents into buckets starting at 6am: NOTE: The start `offset` of each bucket is calculated after the `time_zone` adjustments have been made. +==== Keyed Response + +Setting the `keyed` flag to `true` will associate a unique string key with each bucket and return the ranges as a hash rather than an array: + +[source,js] +-------------------------------------------------- +POST /sales/_search?size=0 +{ + "aggs" : { + "sales_over_time" : { + "date_histogram" : { + "field" : "date", + "interval" : "1M", + "format" : "yyyy-MM-dd", + "keyed": true + } + } + } +} +-------------------------------------------------- +// CONSOLE +// TEST[setup:sales] + +Response: + +[source,js] +-------------------------------------------------- +{ + ... + "aggregations": { + "sales_over_time": { + "buckets": { + "2015-01-01": { + "key_as_string": "2015-01-01", + "key": 1420070400000, + "doc_count": 3 + }, + "2015-02-01": { + "key_as_string": "2015-02-01", + "key": 1422748800000, + "doc_count": 2 + }, + "2015-03-01": { + "key_as_string": "2015-03-01", + "key": 1425168000000, + "doc_count": 2 + } + } + } + } +} +-------------------------------------------------- +// TESTRESPONSE[s/\.\.\./"took": $body.took,"timed_out": false,"_shards": $body._shards,"hits": $body.hits,/] + ==== Scripts Like with the normal <>, both document level scripts and diff --git a/docs/reference/aggregations/bucket/daterange-aggregation.asciidoc b/docs/reference/aggregations/bucket/daterange-aggregation.asciidoc index df32075583f..42c64f23cd3 100644 --- a/docs/reference/aggregations/bucket/daterange-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/daterange-aggregation.asciidoc @@ -153,3 +153,109 @@ POST /sales/_search?size=0 <1> This date will be converted to `2016-02-15T00:00:00.000+01:00`. <2> `now/d` will be rounded to the beginning of the day in the CET time zone. + +==== Keyed Response + +Setting the `keyed` flag to `true` will associate a unique string key with each bucket and return the ranges as a hash rather than an array: + +[source,js] +-------------------------------------------------- +POST /sales/_search?size=0 +{ + "aggs": { + "range": { + "date_range": { + "field": "date", + "format": "MM-yyy", + "ranges": [ + { "to": "now-10M/M" }, + { "from": "now-10M/M" } + ], + "keyed": true + } + } + } +} +-------------------------------------------------- +// CONSOLE +// TEST[setup:sales s/now-10M\/M/10-2015/] + +Response: + +[source,js] +-------------------------------------------------- +{ + ... + "aggregations": { + "range": { + "buckets": { + "*-10-2015": { + "to": 1.4436576E12, + "to_as_string": "10-2015", + "doc_count": 7 + }, + "10-2015-*": { + "from": 1.4436576E12, + "from_as_string": "10-2015", + "doc_count": 0 + } + } + } + } +} +-------------------------------------------------- +// TESTRESPONSE[s/\.\.\./"took": $body.took,"timed_out": false,"_shards": $body._shards,"hits": $body.hits,/] + +It is also possible to customize the key for each range: + +[source,js] +-------------------------------------------------- +POST /sales/_search?size=0 +{ + "aggs": { + "range": { + "date_range": { + "field": "date", + "format": "MM-yyy", + "ranges": [ + { "from": "01-2015", "to": "03-2015", "key": "quarter_01" }, + { "from": "03-2015", "to": "06-2015", "key": "quarter_02" } + ], + "keyed": true + } + } + } +} +-------------------------------------------------- +// CONSOLE +// TEST[setup:sales] + +Response: + +[source,js] +-------------------------------------------------- +{ + ... + "aggregations": { + "range": { + "buckets": { + "quarter_01": { + "from": 1.4200704E12, + "from_as_string": "01-2015", + "to": 1.425168E12, + "to_as_string": "03-2015", + "doc_count": 5 + }, + "quarter_02": { + "from": 1.425168E12, + "from_as_string": "03-2015", + "to": 1.4331168E12, + "to_as_string": "06-2015", + "doc_count": 2 + } + } + } + } +} +-------------------------------------------------- +// TESTRESPONSE[s/\.\.\./"took": $body.took,"timed_out": false,"_shards": $body._shards,"hits": $body.hits,/] diff --git a/docs/reference/aggregations/bucket/geodistance-aggregation.asciidoc b/docs/reference/aggregations/bucket/geodistance-aggregation.asciidoc index d3d13d4ac6f..c60f8413e46 100644 --- a/docs/reference/aggregations/bucket/geodistance-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/geodistance-aggregation.asciidoc @@ -142,3 +142,116 @@ POST /museums/_search?size=0 -------------------------------------------------- // CONSOLE // TEST[continued] + +==== Keyed Response + +Setting the `keyed` flag to `true` will associate a unique string key with each bucket and return the ranges as a hash rather than an array: + +[source,js] +-------------------------------------------------- +POST /museums/_search?size=0 +{ + "aggs" : { + "rings_around_amsterdam" : { + "geo_distance" : { + "field" : "location", + "origin" : "52.3760, 4.894", + "ranges" : [ + { "to" : 100000 }, + { "from" : 100000, "to" : 300000 }, + { "from" : 300000 } + ], + "keyed": true + } + } + } +} +-------------------------------------------------- +// CONSOLE +// TEST[continued] + +Response: + +[source,js] +-------------------------------------------------- +{ + ... + "aggregations": { + "rings_around_amsterdam" : { + "buckets": { + "*-100000.0": { + "from": 0.0, + "to": 100000.0, + "doc_count": 3 + }, + "100000.0-300000.0": { + "from": 100000.0, + "to": 300000.0, + "doc_count": 1 + }, + "300000.0-*": { + "from": 300000.0, + "doc_count": 2 + } + } + } + } +} +-------------------------------------------------- +// TESTRESPONSE[s/\.\.\./"took": $body.took,"_shards": $body._shards,"hits":$body.hits,"timed_out":false,/] + +It is also possible to customize the key for each range: + +[source,js] +-------------------------------------------------- +POST /museums/_search?size=0 +{ + "aggs" : { + "rings_around_amsterdam" : { + "geo_distance" : { + "field" : "location", + "origin" : "52.3760, 4.894", + "ranges" : [ + { "to" : 100000, "key": "first_ring" }, + { "from" : 100000, "to" : 300000, "key": "second_ring" }, + { "from" : 300000, "key": "third_ring" } + ], + "keyed": true + } + } + } +} +-------------------------------------------------- +// CONSOLE +// TEST[continued] + +Response: + +[source,js] +-------------------------------------------------- +{ + ... + "aggregations": { + "rings_around_amsterdam" : { + "buckets": { + "first_ring": { + "from": 0.0, + "to": 100000.0, + "doc_count": 3 + }, + "second_ring": { + "from": 100000.0, + "to": 300000.0, + "doc_count": 1 + }, + "third_ring": { + "from": 300000.0, + "doc_count": 2 + } + } + } + } +} +-------------------------------------------------- +// TESTRESPONSE[s/\.\.\./"took": $body.took,"_shards": $body._shards,"hits":$body.hits,"timed_out":false,/] + diff --git a/docs/reference/aggregations/bucket/iprange-aggregation.asciidoc b/docs/reference/aggregations/bucket/iprange-aggregation.asciidoc index bb20b18663e..ee77d57a0b4 100644 --- a/docs/reference/aggregations/bucket/iprange-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/iprange-aggregation.asciidoc @@ -90,3 +90,93 @@ Response: } } -------------------------------------------------- + +==== Keyed Response + +Setting the `keyed` flag to `true` will associate a unique string key with each bucket and return the ranges as a hash rather than an array: + +[source,js] +-------------------------------------------------- +{ + "aggs": { + "ip_ranges": { + "ip_range": { + "field": "remote_ip", + "ranges": [ + { "to" : "10.0.0.5" }, + { "from" : "10.0.0.5" } + ], + "keyed": true + } + } + } +} +-------------------------------------------------- + +Response: + +[source,js] +-------------------------------------------------- +{ + ... + + "aggregations": { + "ip_ranges": { + "buckets": { + "*-10.0.0.5": { + "to": "10.0.0.5", + "doc_count": 1462 + }, + "10.0.0.5-*": { + "from": "10.0.0.5", + "doc_count": 50000 + } + } + } + } +} +-------------------------------------------------- + +It is also possible to customize the key for each range: + +[source,js] +-------------------------------------------------- +{ + "aggs": { + "ip_ranges": { + "ip_range": { + "field": "remote_ip", + "ranges": [ + { "key": "infinity", "to" : "10.0.0.5" }, + { "key": "and-beyond", "from" : "10.0.0.5" } + ], + "keyed": true + } + } + } +} +-------------------------------------------------- + +Response: + +[source,js] +-------------------------------------------------- +{ + ... + + "aggregations": { + "ip_ranges": { + "buckets": { + "infinity": { + "to": "10.0.0.5", + "doc_count": 1462 + }, + "and-beyond": { + "from": "10.0.0.5", + "doc_count": 50000 + } + } + } + } +} +-------------------------------------------------- diff --git a/docs/reference/aggregations/metrics/percentile-aggregation.asciidoc b/docs/reference/aggregations/metrics/percentile-aggregation.asciidoc index db15d0a6a66..a9f49aecccf 100644 --- a/docs/reference/aggregations/metrics/percentile-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/percentile-aggregation.asciidoc @@ -86,7 +86,78 @@ must be a value between 0-100 inclusive): -------------------------------------------------- <1> Use the `percents` parameter to specify particular percentiles to calculate +==== Keyed Response +By default the `keyed` flag is set to `true` which associates a unique string key with each bucket and returns the ranges as a hash rather than an array. Setting the `keyed` flag to `false` will disable this behavior: + +[source,js] +-------------------------------------------------- +POST bank/account/_search?size=0 +{ + "aggs": { + "balance_outlier": { + "percentiles": { + "field": "balance", + "keyed": false + } + } + } +} +-------------------------------------------------- +// CONSOLE +// TEST[setup:bank] + +Response: + +[source,js] +-------------------------------------------------- +{ + ... + + "aggregations": { + "balance_outlier": { + "values": [ + { + "key": 1.0, + "value": 1462.8400000000001 + }, + { + "key": 5.0, + "value": 3591.85 + }, + { + "key": 25.0, + "value": 13709.333333333334 + }, + { + "key": 50.0, + "value": 26020.11666666667 + }, + { + "key": 75.0, + "value": 38139.648148148146 + }, + { + "key": 95.0, + "value": 47551.549999999996 + }, + { + "key": 99.0, + "value": 49339.16 + } + ] + } + } +} +-------------------------------------------------- +// TESTRESPONSE[s/\.\.\./"took": $body.took,"timed_out": false,"_shards": $body._shards,"hits": $body.hits,/] +// TESTRESPONSE[s/1462.8400000000001/$body.aggregations.balance_outlier.values.0.value/] +// TESTRESPONSE[s/3591.85/$body.aggregations.balance_outlier.values.1.value/] +// TESTRESPONSE[s/13709.333333333334/$body.aggregations.balance_outlier.values.2.value/] +// TESTRESPONSE[s/26020.11666666667/$body.aggregations.balance_outlier.values.3.value/] +// TESTRESPONSE[s/38139.648148148146/$body.aggregations.balance_outlier.values.4.value/] +// TESTRESPONSE[s/47551.549999999996/$body.aggregations.balance_outlier.values.5.value/] +// TESTRESPONSE[s/49339.16/$body.aggregations.balance_outlier.values.6.value/] ==== Script diff --git a/docs/reference/aggregations/metrics/percentile-rank-aggregation.asciidoc b/docs/reference/aggregations/metrics/percentile-rank-aggregation.asciidoc index d4df92105de..75e8ca35868 100644 --- a/docs/reference/aggregations/metrics/percentile-rank-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/percentile-rank-aggregation.asciidoc @@ -58,6 +58,54 @@ The response will look like this: From this information you can determine you are hitting the 99% load time target but not quite hitting the 95% load time target +==== Keyed Response + +By default the `keyed` flag is set to `true` associates a unique string key with each bucket and returns the ranges as a hash rather than an array. Setting the `keyed` flag to `false` will disable this behavior: + +[source,js] +-------------------------------------------------- +POST bank/account/_search?size=0 +{ + "aggs": { + "balance_outlier": { + "percentile_ranks": { + "field": "balance", + "values": [25000, 50000], + "keyed": false + } + } + } +} +-------------------------------------------------- +// CONSOLE +// TEST[setup:bank] + +Response: + +[source,js] +-------------------------------------------------- +{ + ... + + "aggregations": { + "balance_outlier": { + "values": [ + { + "key": 25000.0, + "value": 48.537724935732655 + }, + { + "key": 50000.0, + "value": 99.85567010309278 + } + ] + } + } +} +-------------------------------------------------- +// TESTRESPONSE[s/\.\.\./"took": $body.took,"timed_out": false,"_shards": $body._shards,"hits": $body.hits,/] +// TESTRESPONSE[s/48.537724935732655/$body.aggregations.balance_outlier.values.0.value/] +// TESTRESPONSE[s/99.85567010309278/$body.aggregations.balance_outlier.values.1.value/] ==== Script From 8f540346a964df13570fe80439bed0e33eddc0a5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Tue, 18 Apr 2017 16:26:25 +0200 Subject: [PATCH 03/19] Tests: Fixing typo in class name of InternalGlobalTests Renaming from InternalGlogbalTests -> InternalGlobalTests --- .../{InternalGlogbalTests.java => InternalGlobalTests.java} | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) rename core/src/test/java/org/elasticsearch/search/aggregations/bucket/global/{InternalGlogbalTests.java => InternalGlobalTests.java} (94%) diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/global/InternalGlogbalTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/global/InternalGlobalTests.java similarity index 94% rename from core/src/test/java/org/elasticsearch/search/aggregations/bucket/global/InternalGlogbalTests.java rename to core/src/test/java/org/elasticsearch/search/aggregations/bucket/global/InternalGlobalTests.java index 0b55123a1ef..9092c3e0280 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/global/InternalGlogbalTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/global/InternalGlobalTests.java @@ -27,7 +27,7 @@ import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import java.util.List; import java.util.Map; -public class InternalGlogbalTests extends InternalSingleBucketAggregationTestCase { +public class InternalGlobalTests extends InternalSingleBucketAggregationTestCase { @Override protected InternalGlobal createTestInstance(String name, long docCount, InternalAggregations aggregations, List pipelineAggregators, Map metaData) { From 0b15fde27a071037885c8d644b06d607ebe77428 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Tue, 18 Apr 2017 10:39:42 -0400 Subject: [PATCH 04/19] Start on custom whitelists for Painless (#23563) We'd like to be able to support context-sensitive whitelists in Painless but we can't now because the whitelist is a static thing. This begins to de-static the whitelist, in particular removing the static keyword from most of the methods on `Definition` and plumbing the static instance into the appropriate spots as though it weren't static. Once we de-static all the methods we should be able to fairly simply build context-sensitive whitelists. The only "fun" bit of this is that I added another layer in the chain of methods that bootstraps `def` calls. Instead of running `invokedynamic` directly on `DefBootstrap` we now `invokedynamic` `$bootstrapDef` on the script itself loads the `Definition` that the script was compiled against and then calls `DefBootstrap`. I chose to put `Definition` into `Locals` so I didn't have to change the signature of all the `analyze` methods. I could have do it another way, but that seems ok for now. --- .../org/elasticsearch/painless/Compiler.java | 17 ++- .../java/org/elasticsearch/painless/Def.java | 52 ++++---- .../elasticsearch/painless/DefBootstrap.java | 26 ++-- .../elasticsearch/painless/Definition.java | 85 ++++++------- .../elasticsearch/painless/FunctionRef.java | 12 +- .../org/elasticsearch/painless/Locals.java | 25 ++-- .../painless/PainlessExplainError.java | 4 +- .../painless/ScriptInterface.java | 17 +-- .../painless/WriterConstants.java | 21 ++-- .../painless/antlr/EnhancedPainlessLexer.java | 9 +- .../elasticsearch/painless/antlr/Walker.java | 16 ++- .../painless/node/ECapturingFunctionRef.java | 8 +- .../painless/node/EExplicit.java | 5 +- .../painless/node/EFunctionRef.java | 4 +- .../painless/node/EInstanceof.java | 2 +- .../elasticsearch/painless/node/ELambda.java | 4 +- .../painless/node/EListInit.java | 6 +- .../elasticsearch/painless/node/EMapInit.java | 6 +- .../painless/node/ENewArray.java | 13 +- .../elasticsearch/painless/node/ENewObj.java | 2 +- .../elasticsearch/painless/node/EStatic.java | 6 +- .../painless/node/PCallInvoke.java | 3 +- .../painless/node/PSubBrace.java | 2 +- .../elasticsearch/painless/node/SCatch.java | 3 +- .../painless/node/SDeclaration.java | 3 +- .../elasticsearch/painless/node/SEach.java | 3 +- .../painless/node/SFunction.java | 6 +- .../elasticsearch/painless/node/SSource.java | 27 +++- .../painless/node/SSubEachArray.java | 9 +- .../painless/node/SSubEachIterable.java | 8 +- .../elasticsearch/painless/DebugTests.java | 14 ++- .../painless/DefBootstrapTests.java | 115 +++++++++++------- .../painless/ScriptTestCase.java | 6 +- .../painless/node/NodeToStringTests.java | 21 ++-- 34 files changed, 318 insertions(+), 242 deletions(-) diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java index 56c1b2e0ba7..9961dcbe156 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java @@ -100,15 +100,18 @@ final class Compiler { " characters. The passed in script is " + source.length() + " characters. Consider using a" + " plugin if a script longer than this length is a requirement."); } - ScriptInterface scriptInterface = new ScriptInterface(iface); + Definition definition = Definition.BUILTINS; + ScriptInterface scriptInterface = new ScriptInterface(definition, iface); - SSource root = Walker.buildPainlessTree(scriptInterface, name, source, settings, null); + SSource root = Walker.buildPainlessTree(scriptInterface, name, source, settings, definition, + null); - root.analyze(); + root.analyze(definition); root.write(); try { Class clazz = loader.define(CLASS_NAME, root.getBytes()); + clazz.getField("$DEFINITION").set(null, definition); java.lang.reflect.Constructor constructor = clazz.getConstructor(String.class, String.class, BitSet.class); @@ -131,11 +134,13 @@ final class Compiler { " characters. The passed in script is " + source.length() + " characters. Consider using a" + " plugin if a script longer than this length is a requirement."); } - ScriptInterface scriptInterface = new ScriptInterface(iface); + Definition definition = Definition.BUILTINS; + ScriptInterface scriptInterface = new ScriptInterface(definition, iface); - SSource root = Walker.buildPainlessTree(scriptInterface, name, source, settings, debugStream); + SSource root = Walker.buildPainlessTree(scriptInterface, name, source, settings, definition, + debugStream); - root.analyze(); + root.analyze(definition); root.write(); return root.getBytes(); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java index 1438dab084f..5250238c817 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java @@ -175,17 +175,18 @@ public final class Def { * until it finds a matching whitelisted method. If one is not found, it throws an exception. * Otherwise it returns the matching method. *

+ * @params definition the whitelist * @param receiverClass Class of the object to invoke the method on. * @param name Name of the method. * @param arity arity of method * @return matching method to invoke. never returns null. * @throws IllegalArgumentException if no matching whitelisted method was found. */ - static Method lookupMethodInternal(Class receiverClass, String name, int arity) { + static Method lookupMethodInternal(Definition definition, Class receiverClass, String name, int arity) { Definition.MethodKey key = new Definition.MethodKey(name, arity); // check whitelist for matching method for (Class clazz = receiverClass; clazz != null; clazz = clazz.getSuperclass()) { - RuntimeClass struct = Definition.getRuntimeClass(clazz); + RuntimeClass struct = definition.getRuntimeClass(clazz); if (struct != null) { Method method = struct.methods.get(key); @@ -195,7 +196,7 @@ public final class Def { } for (Class iface : clazz.getInterfaces()) { - struct = Definition.getRuntimeClass(iface); + struct = definition.getRuntimeClass(iface); if (struct != null) { Method method = struct.methods.get(key); @@ -220,6 +221,7 @@ public final class Def { * until it finds a matching whitelisted method. If one is not found, it throws an exception. * Otherwise it returns a handle to the matching method. *

+ * @param definition the whitelist * @param lookup caller's lookup * @param callSiteType callsite's type * @param receiverClass Class of the object to invoke the method on. @@ -229,13 +231,13 @@ public final class Def { * @throws IllegalArgumentException if no matching whitelisted method was found. * @throws Throwable if a method reference cannot be converted to an functional interface */ - static MethodHandle lookupMethod(Lookup lookup, MethodType callSiteType, + static MethodHandle lookupMethod(Definition definition, Lookup lookup, MethodType callSiteType, Class receiverClass, String name, Object args[]) throws Throwable { String recipeString = (String) args[0]; int numArguments = callSiteType.parameterCount(); // simple case: no lambdas if (recipeString.isEmpty()) { - return lookupMethodInternal(receiverClass, name, numArguments - 1).handle; + return lookupMethodInternal(definition, receiverClass, name, numArguments - 1).handle; } // convert recipe string to a bitset for convenience (the code below should be refactored...) @@ -258,7 +260,7 @@ public final class Def { // lookup the method with the proper arity, then we know everything (e.g. interface types of parameters). // based on these we can finally link any remaining lambdas that were deferred. - Method method = lookupMethodInternal(receiverClass, name, arity); + Method method = lookupMethodInternal(definition, receiverClass, name, arity); MethodHandle handle = method.handle; int replaced = 0; @@ -282,7 +284,8 @@ public final class Def { if (signature.charAt(0) == 'S') { // the implementation is strongly typed, now that we know the interface type, // we have everything. - filter = lookupReferenceInternal(lookup, + filter = lookupReferenceInternal(definition, + lookup, interfaceType, type, call, @@ -292,7 +295,8 @@ public final class Def { // this is dynamically based on the receiver type (and cached separately, underneath // this cache). It won't blow up since we never nest here (just references) MethodType nestedType = MethodType.methodType(interfaceType.clazz, captures); - CallSite nested = DefBootstrap.bootstrap(lookup, + CallSite nested = DefBootstrap.bootstrap(definition, + lookup, call, nestedType, 0, @@ -319,21 +323,23 @@ public final class Def { * This is just like LambdaMetaFactory, only with a dynamic type. The interface type is known, * so we simply need to lookup the matching implementation method based on receiver type. */ - static MethodHandle lookupReference(Lookup lookup, String interfaceClass, - Class receiverClass, String name) throws Throwable { - Definition.Type interfaceType = Definition.getType(interfaceClass); + static MethodHandle lookupReference(Definition definition, Lookup lookup, String interfaceClass, + Class receiverClass, String name) throws Throwable { + Definition.Type interfaceType = definition.getType(interfaceClass); Method interfaceMethod = interfaceType.struct.getFunctionalMethod(); if (interfaceMethod == null) { throw new IllegalArgumentException("Class [" + interfaceClass + "] is not a functional interface"); } int arity = interfaceMethod.arguments.size(); - Method implMethod = lookupMethodInternal(receiverClass, name, arity); - return lookupReferenceInternal(lookup, interfaceType, implMethod.owner.name, implMethod.name, receiverClass); + Method implMethod = lookupMethodInternal(definition, receiverClass, name, arity); + return lookupReferenceInternal(definition, lookup, interfaceType, implMethod.owner.name, + implMethod.name, receiverClass); } /** Returns a method handle to an implementation of clazz, given method reference signature. */ - private static MethodHandle lookupReferenceInternal(Lookup lookup, Definition.Type clazz, String type, - String call, Class... captures) throws Throwable { + private static MethodHandle lookupReferenceInternal(Definition definition, Lookup lookup, + Definition.Type clazz, String type, String call, Class... captures) + throws Throwable { final FunctionRef ref; if ("this".equals(type)) { // user written method @@ -361,7 +367,7 @@ public final class Def { ref = new FunctionRef(clazz, interfaceMethod, handle, captures.length); } else { // whitelist lookup - ref = new FunctionRef(clazz, type, call, captures.length); + ref = new FunctionRef(definition, clazz, type, call, captures.length); } final CallSite callSite; if (ref.needsBridges()) { @@ -411,15 +417,16 @@ public final class Def { * until it finds a matching whitelisted getter. If one is not found, it throws an exception. * Otherwise it returns a handle to the matching getter. *

+ * @param definition the whitelist * @param receiverClass Class of the object to retrieve the field from. * @param name Name of the field. * @return pointer to matching field. never returns null. * @throws IllegalArgumentException if no matching whitelisted field was found. */ - static MethodHandle lookupGetter(Class receiverClass, String name) { + static MethodHandle lookupGetter(Definition definition, Class receiverClass, String name) { // first try whitelist for (Class clazz = receiverClass; clazz != null; clazz = clazz.getSuperclass()) { - RuntimeClass struct = Definition.getRuntimeClass(clazz); + RuntimeClass struct = definition.getRuntimeClass(clazz); if (struct != null) { MethodHandle handle = struct.getters.get(name); @@ -429,7 +436,7 @@ public final class Def { } for (final Class iface : clazz.getInterfaces()) { - struct = Definition.getRuntimeClass(iface); + struct = definition.getRuntimeClass(iface); if (struct != null) { MethodHandle handle = struct.getters.get(name); @@ -481,15 +488,16 @@ public final class Def { * until it finds a matching whitelisted setter. If one is not found, it throws an exception. * Otherwise it returns a handle to the matching setter. *

+ * @param definition the whitelist * @param receiverClass Class of the object to retrieve the field from. * @param name Name of the field. * @return pointer to matching field. never returns null. * @throws IllegalArgumentException if no matching whitelisted field was found. */ - static MethodHandle lookupSetter(Class receiverClass, String name) { + static MethodHandle lookupSetter(Definition definition, Class receiverClass, String name) { // first try whitelist for (Class clazz = receiverClass; clazz != null; clazz = clazz.getSuperclass()) { - RuntimeClass struct = Definition.getRuntimeClass(clazz); + RuntimeClass struct = definition.getRuntimeClass(clazz); if (struct != null) { MethodHandle handle = struct.setters.get(name); @@ -499,7 +507,7 @@ public final class Def { } for (final Class iface : clazz.getInterfaces()) { - struct = Definition.getRuntimeClass(iface); + struct = definition.getRuntimeClass(iface); if (struct != null) { MethodHandle handle = struct.setters.get(name); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/DefBootstrap.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/DefBootstrap.java index 307316efdf4..31fba8f7579 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/DefBootstrap.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/DefBootstrap.java @@ -104,17 +104,19 @@ public final class DefBootstrap { /** maximum number of types before we go megamorphic */ static final int MAX_DEPTH = 5; + private final Definition definition; private final Lookup lookup; private final String name; private final int flavor; private final Object[] args; int depth; // pkg-protected for testing - PIC(Lookup lookup, String name, MethodType type, int initialDepth, int flavor, Object[] args) { + PIC(Definition definition, Lookup lookup, String name, MethodType type, int initialDepth, int flavor, Object[] args) { super(type); if (type.parameterType(0) != Object.class) { throw new BootstrapMethodError("The receiver type (1st arg) of invokedynamic descriptor must be Object."); } + this.definition = definition; this.lookup = lookup; this.name = name; this.flavor = flavor; @@ -142,11 +144,11 @@ public final class DefBootstrap { private MethodHandle lookup(int flavor, String name, Class receiver) throws Throwable { switch(flavor) { case METHOD_CALL: - return Def.lookupMethod(lookup, type(), receiver, name, args); + return Def.lookupMethod(definition, lookup, type(), receiver, name, args); case LOAD: - return Def.lookupGetter(receiver, name); + return Def.lookupGetter(definition, receiver, name); case STORE: - return Def.lookupSetter(receiver, name); + return Def.lookupSetter(definition, receiver, name); case ARRAY_LOAD: return Def.lookupArrayLoad(receiver); case ARRAY_STORE: @@ -154,7 +156,7 @@ public final class DefBootstrap { case ITERATOR: return Def.lookupIterator(receiver); case REFERENCE: - return Def.lookupReference(lookup, (String) args[0], receiver, name); + return Def.lookupReference(definition, lookup, (String) args[0], receiver, name); case INDEX_NORMALIZE: return Def.lookupIndexNormalize(receiver); default: throw new AssertionError(); @@ -237,7 +239,7 @@ public final class DefBootstrap { */ static final class MIC extends MutableCallSite { private boolean initialized; - + private final String name; private final int flavor; private final int flags; @@ -419,16 +421,18 @@ public final class DefBootstrap { /** * invokeDynamic bootstrap method *

- * In addition to ordinary parameters, we also take some static parameters: + * In addition to ordinary parameters, we also take some parameters defined at the call site: *

    *
  • {@code initialDepth}: initial call site depth. this is used to exercise megamorphic fallback. *
  • {@code flavor}: type of dynamic call it is (and which part of whitelist to look at). *
  • {@code args}: flavor-specific args. *
+ * And we take the {@link Definition} used to compile the script for whitelist checking. *

* see https://docs.oracle.com/javase/specs/jvms/se7/html/jvms-6.html#jvms-6.5.invokedynamic */ - public static CallSite bootstrap(Lookup lookup, String name, MethodType type, int initialDepth, int flavor, Object... args) { + public static CallSite bootstrap(Definition definition, Lookup lookup, String name, MethodType type, int initialDepth, int flavor, + Object... args) { // validate arguments switch(flavor) { // "function-call" like things get a polymorphic cache @@ -447,7 +451,7 @@ public final class DefBootstrap { if (args.length != numLambdas + 1) { throw new BootstrapMethodError("Illegal number of parameters: expected " + numLambdas + " references"); } - return new PIC(lookup, name, type, initialDepth, flavor, args); + return new PIC(definition, lookup, name, type, initialDepth, flavor, args); case LOAD: case STORE: case ARRAY_LOAD: @@ -457,7 +461,7 @@ public final class DefBootstrap { if (args.length > 0) { throw new BootstrapMethodError("Illegal static bootstrap parameters for flavor: " + flavor); } - return new PIC(lookup, name, type, initialDepth, flavor, args); + return new PIC(definition, lookup, name, type, initialDepth, flavor, args); case REFERENCE: if (args.length != 1) { throw new BootstrapMethodError("Invalid number of parameters for reference call"); @@ -465,7 +469,7 @@ public final class DefBootstrap { if (args[0] instanceof String == false) { throw new BootstrapMethodError("Illegal parameter for reference call: " + args[0]); } - return new PIC(lookup, name, type, initialDepth, flavor, args); + return new PIC(definition, lookup, name, type, initialDepth, flavor, args); // operators get monomorphic cache, with a generic impl for a fallback case UNARY_OPERATOR: diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Definition.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Definition.java index 16f0339677e..f8bee4e5cfc 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Definition.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Definition.java @@ -65,33 +65,39 @@ public final class Definition { "java.util.stream.txt", "joda.time.txt")); - private static final Definition INSTANCE = new Definition(); + /** + * Whitelist that is "built in" to Painless and required by all scripts. + */ + public static final Definition BUILTINS = new Definition(); /** Some native types as constants: */ - public static final Type VOID_TYPE = getType("void"); - public static final Type BOOLEAN_TYPE = getType("boolean"); - public static final Type BOOLEAN_OBJ_TYPE = getType("Boolean"); - public static final Type BYTE_TYPE = getType("byte"); - public static final Type BYTE_OBJ_TYPE = getType("Byte"); - public static final Type SHORT_TYPE = getType("short"); - public static final Type SHORT_OBJ_TYPE = getType("Short"); - public static final Type INT_TYPE = getType("int"); - public static final Type INT_OBJ_TYPE = getType("Integer"); - public static final Type LONG_TYPE = getType("long"); - public static final Type LONG_OBJ_TYPE = getType("Long"); - public static final Type FLOAT_TYPE = getType("float"); - public static final Type FLOAT_OBJ_TYPE = getType("Float"); - public static final Type DOUBLE_TYPE = getType("double"); - public static final Type DOUBLE_OBJ_TYPE = getType("Double"); - public static final Type CHAR_TYPE = getType("char"); - public static final Type CHAR_OBJ_TYPE = getType("Character"); - public static final Type OBJECT_TYPE = getType("Object"); - public static final Type DEF_TYPE = getType("def"); - public static final Type NUMBER_TYPE = getType("Number"); - public static final Type STRING_TYPE = getType("String"); - public static final Type EXCEPTION_TYPE = getType("Exception"); - public static final Type PATTERN_TYPE = getType("Pattern"); - public static final Type MATCHER_TYPE = getType("Matcher"); + public static final Type VOID_TYPE = BUILTINS.getType("void"); + public static final Type BOOLEAN_TYPE = BUILTINS.getType("boolean"); + public static final Type BOOLEAN_OBJ_TYPE = BUILTINS.getType("Boolean"); + public static final Type BYTE_TYPE = BUILTINS.getType("byte"); + public static final Type BYTE_OBJ_TYPE = BUILTINS.getType("Byte"); + public static final Type SHORT_TYPE = BUILTINS.getType("short"); + public static final Type SHORT_OBJ_TYPE = BUILTINS.getType("Short"); + public static final Type INT_TYPE = BUILTINS.getType("int"); + public static final Type INT_OBJ_TYPE = BUILTINS.getType("Integer"); + public static final Type LONG_TYPE = BUILTINS.getType("long"); + public static final Type LONG_OBJ_TYPE = BUILTINS.getType("Long"); + public static final Type FLOAT_TYPE = BUILTINS.getType("float"); + public static final Type FLOAT_OBJ_TYPE = BUILTINS.getType("Float"); + public static final Type DOUBLE_TYPE = BUILTINS.getType("double"); + public static final Type DOUBLE_OBJ_TYPE = BUILTINS.getType("Double"); + public static final Type CHAR_TYPE = BUILTINS.getType("char"); + public static final Type CHAR_OBJ_TYPE = BUILTINS.getType("Character"); + public static final Type OBJECT_TYPE = BUILTINS.getType("Object"); + public static final Type DEF_TYPE = BUILTINS.getType("def"); + public static final Type NUMBER_TYPE = BUILTINS.getType("Number"); + public static final Type STRING_TYPE = BUILTINS.getType("String"); + public static final Type EXCEPTION_TYPE = BUILTINS.getType("Exception"); + public static final Type PATTERN_TYPE = BUILTINS.getType("Pattern"); + public static final Type MATCHER_TYPE = BUILTINS.getType("Matcher"); + public static final Type ITERATOR_TYPE = BUILTINS.getType("Iterator"); + public static final Type ARRAY_LIST_TYPE = BUILTINS.getType("ArrayList"); + public static final Type HASH_MAP_TYPE = BUILTINS.getType("HashMap"); public enum Sort { VOID( void.class , Void.class , null , 0 , true , false , false , false ), @@ -483,38 +489,27 @@ public final class Definition { } /** Returns whether or not a non-array type exists. */ - public static boolean isSimpleType(final String name) { - return INSTANCE.structsMap.containsKey(name); - } - - /** Returns whether or not a type exists without an exception. */ - public static boolean isType(final String name) { - try { - INSTANCE.getTypeInternal(name); - - return true; - } catch (IllegalArgumentException exception) { - return false; - } + public boolean isSimpleType(final String name) { + return BUILTINS.structsMap.containsKey(name); } /** Gets the type given by its name */ - public static Type getType(final String name) { - return INSTANCE.getTypeInternal(name); + public Type getType(final String name) { + return BUILTINS.getTypeInternal(name); } /** Creates an array type from the given Struct. */ - public static Type getType(final Struct struct, final int dimensions) { - return INSTANCE.getTypeInternal(struct, dimensions); + public Type getType(final Struct struct, final int dimensions) { + return BUILTINS.getTypeInternal(struct, dimensions); } - public static RuntimeClass getRuntimeClass(Class clazz) { - return INSTANCE.runtimeMap.get(clazz); + public RuntimeClass getRuntimeClass(Class clazz) { + return BUILTINS.runtimeMap.get(clazz); } /** Collection of all simple types. Used by {@code PainlessDocGenerator} to generate an API reference. */ static Collection allSimpleTypes() { - return INSTANCE.simpleTypesMap.values(); + return BUILTINS.simpleTypesMap.values(); } // INTERNAL IMPLEMENTATION: diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java index d5e02e12058..dddd9166311 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java @@ -50,13 +50,16 @@ public class FunctionRef { /** * Creates a new FunctionRef, which will resolve {@code type::call} from the whitelist. + * @param definition the whitelist against which this script is being compiled * @param expected interface type to implement. * @param type the left hand side of a method reference expression * @param call the right hand side of a method reference expression * @param numCaptures number of captured arguments */ - public FunctionRef(Definition.Type expected, String type, String call, int numCaptures) { - this(expected, expected.struct.getFunctionalMethod(), lookup(expected, type, call, numCaptures > 0), numCaptures); + public FunctionRef(Definition definition, Definition.Type expected, String type, String call, + int numCaptures) { + this(expected, expected.struct.getFunctionalMethod(), + lookup(definition, expected, type, call, numCaptures > 0), numCaptures); } /** @@ -134,7 +137,8 @@ public class FunctionRef { /** * Looks up {@code type::call} from the whitelist, and returns a matching method. */ - private static Definition.Method lookup(Definition.Type expected, String type, String call, boolean receiverCaptured) { + private static Definition.Method lookup(Definition definition, Definition.Type expected, + String type, String call, boolean receiverCaptured) { // check its really a functional interface // for e.g. Comparable Method method = expected.struct.getFunctionalMethod(); @@ -144,7 +148,7 @@ public class FunctionRef { } // lookup requested method - Definition.Struct struct = Definition.getType(type).struct; + Definition.Struct struct = definition.getType(type).struct; final Definition.Method impl; // ctor ref if ("new".equals(call)) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Locals.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Locals.java index 9bbe9d9def3..6eff9e3228b 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Locals.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Locals.java @@ -60,7 +60,7 @@ public final class Locals { */ public static Locals newLambdaScope(Locals programScope, Type returnType, List parameters, int captureCount, int maxLoopCounter) { - Locals locals = new Locals(programScope, returnType, KEYWORDS); + Locals locals = new Locals(programScope, programScope.definition, returnType, KEYWORDS); for (int i = 0; i < parameters.size(); i++) { Parameter parameter = parameters.get(i); // TODO: allow non-captures to be r/w: @@ -79,7 +79,7 @@ public final class Locals { /** Creates a new function scope inside the current scope */ public static Locals newFunctionScope(Locals programScope, Type returnType, List parameters, int maxLoopCounter) { - Locals locals = new Locals(programScope, returnType, KEYWORDS); + Locals locals = new Locals(programScope, programScope.definition, returnType, KEYWORDS); for (Parameter parameter : parameters) { locals.addVariable(parameter.location, parameter.type, parameter.name, false); } @@ -92,9 +92,10 @@ public final class Locals { /** Creates a new main method scope */ public static Locals newMainMethodScope(ScriptInterface scriptInterface, Locals programScope, int maxLoopCounter) { - Locals locals = new Locals(programScope, scriptInterface.getExecuteMethodReturnType(), KEYWORDS); + Locals locals = new Locals(programScope, programScope.definition, + scriptInterface.getExecuteMethodReturnType(), KEYWORDS); // This reference. Internal use only. - locals.defineVariable(null, Definition.getType("Object"), THIS, true); + locals.defineVariable(null, programScope.definition.getType("Object"), THIS, true); // Method arguments for (MethodArgument arg : scriptInterface.getExecuteArguments()) { @@ -109,8 +110,8 @@ public final class Locals { } /** Creates a new program scope: the list of methods. It is the parent for all methods */ - public static Locals newProgramScope(Collection methods) { - Locals locals = new Locals(null, null, null); + public static Locals newProgramScope(Definition definition, Collection methods) { + Locals locals = new Locals(null, definition, null, null); for (Method method : methods) { locals.addMethod(method); } @@ -178,8 +179,15 @@ public final class Locals { return locals; } + /** Whitelist against which this script is being compiled. */ + public Definition getDefinition() { + return definition; + } + ///// private impl + /** Whitelist against which thhis script is being compiled. */ + private final Definition definition; // parent scope private final Locals parent; // return type of this scope @@ -197,14 +205,15 @@ public final class Locals { * Create a new Locals */ private Locals(Locals parent) { - this(parent, parent.returnType, parent.keywords); + this(parent, parent.definition, parent.returnType, parent.keywords); } /** * Create a new Locals with specified return type */ - private Locals(Locals parent, Type returnType, Set keywords) { + private Locals(Locals parent, Definition definition, Type returnType, Set keywords) { this.parent = parent; + this.definition = definition; this.returnType = returnType; this.keywords = keywords; if (parent == null) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExplainError.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExplainError.java index fff692fdb9f..291d852bdde 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExplainError.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExplainError.java @@ -46,7 +46,7 @@ public class PainlessExplainError extends Error { /** * Headers to be added to the {@link ScriptException} for structured rendering. */ - public Map> getHeaders() { + public Map> getHeaders(Definition definition) { Map> headers = new TreeMap<>(); String toString = "null"; String javaClassName = null; @@ -54,7 +54,7 @@ public class PainlessExplainError extends Error { if (objectToExplain != null) { toString = objectToExplain.toString(); javaClassName = objectToExplain.getClass().getName(); - Definition.RuntimeClass runtimeClass = Definition.getRuntimeClass(objectToExplain.getClass()); + Definition.RuntimeClass runtimeClass = definition.getRuntimeClass(objectToExplain.getClass()); if (runtimeClass != null) { painlessClassName = runtimeClass.getStruct().name; } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ScriptInterface.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ScriptInterface.java index b8ab32d1c6d..28fa6fd4280 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ScriptInterface.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ScriptInterface.java @@ -40,7 +40,7 @@ public class ScriptInterface { private final List executeArguments; private final List usesMethods; - public ScriptInterface(Class iface) { + public ScriptInterface(Definition definition, Class iface) { this.iface = iface; // Find the main method and the uses$argName methods @@ -77,7 +77,7 @@ public class ScriptInterface { } MethodType methodType = MethodType.methodType(executeMethod.getReturnType(), executeMethod.getParameterTypes()); this.executeMethod = new org.objectweb.asm.commons.Method(executeMethod.getName(), methodType.toMethodDescriptorString()); - executeMethodReturnType = definitionTypeForClass(executeMethod.getReturnType(), + executeMethodReturnType = definitionTypeForClass(definition, executeMethod.getReturnType(), componentType -> "Painless can only implement execute methods returning a whitelisted type but [" + iface.getName() + "#execute] returns [" + componentType.getName() + "] which isn't whitelisted."); @@ -91,7 +91,7 @@ public class ScriptInterface { + iface.getName() + "#execute] takes [1] argument."); } for (int arg = 0; arg < types.length; arg++) { - arguments.add(methodArgument(types[arg], argumentNamesConstant[arg])); + arguments.add(methodArgument(definition, types[arg], argumentNamesConstant[arg])); argumentNames.add(argumentNamesConstant[arg]); } this.executeArguments = unmodifiableList(arguments); @@ -164,13 +164,14 @@ public class ScriptInterface { } } - private static MethodArgument methodArgument(Class type, String argName) { - Definition.Type defType = definitionTypeForClass(type, componentType -> "[" + argName + "] is of unknown type [" + private MethodArgument methodArgument(Definition definition, Class type, String argName) { + Definition.Type defType = definitionTypeForClass(definition, type, componentType -> "[" + argName + "] is of unknown type [" + componentType.getName() + ". Painless interfaces can only accept arguments that are of whitelisted types."); return new MethodArgument(defType, argName); } - private static Definition.Type definitionTypeForClass(Class type, Function, String> unknownErrorMessageSource) { + private static Definition.Type definitionTypeForClass(Definition definition, Class type, + Function, String> unknownErrorMessageSource) { int dimensions = 0; Class componentType = type; while (componentType.isArray()) { @@ -181,13 +182,13 @@ public class ScriptInterface { if (componentType.equals(Object.class)) { struct = Definition.DEF_TYPE.struct; } else { - Definition.RuntimeClass runtimeClass = Definition.getRuntimeClass(componentType); + Definition.RuntimeClass runtimeClass = definition.getRuntimeClass(componentType); if (runtimeClass == null) { throw new IllegalArgumentException(unknownErrorMessageSource.apply(componentType)); } struct = runtimeClass.getStruct(); } - return Definition.getType(struct, dimensions); + return definition.getType(struct, dimensions); } private static String[] readArgumentNamesConstant(Class iface) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterConstants.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterConstants.java index 6ae637e59b1..f29afbb74e6 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterConstants.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterConstants.java @@ -65,7 +65,9 @@ public final class WriterConstants { public static final Type STACK_OVERFLOW_ERROR_TYPE = Type.getType(StackOverflowError.class); public static final Type EXCEPTION_TYPE = Type.getType(Exception.class); public static final Type PAINLESS_EXPLAIN_ERROR_TYPE = Type.getType(PainlessExplainError.class); - public static final Method PAINLESS_EXPLAIN_ERROR_GET_HEADERS_METHOD = getAsmMethod(Map.class, "getHeaders"); + public static final Method PAINLESS_EXPLAIN_ERROR_GET_HEADERS_METHOD = getAsmMethod(Map.class, "getHeaders", Definition.class); + + public static final Type DEFINITION_TYPE = Type.getType(Definition.class); public static final Type COLLECTIONS_TYPE = Type.getType(Collections.class); public static final Method EMPTY_MAP_METHOD = getAsmMethod(Map.class, "emptyMap"); @@ -83,6 +85,8 @@ public final class WriterConstants { public static final Method STRING_TO_CHAR = getAsmMethod(char.class, "StringTochar", String.class); public static final Method CHAR_TO_STRING = getAsmMethod(String.class, "charToString", char.class); + public static final Type OBJECT_ARRAY_TYPE = Type.getType("[Ljava/lang/Object;"); + public static final Type METHOD_HANDLE_TYPE = Type.getType(MethodHandle.class); public static final Type AUGMENTATION_TYPE = Type.getType(Augmentation.class); @@ -98,13 +102,14 @@ public final class WriterConstants { public static final Method MATCHER_MATCHES = getAsmMethod(boolean.class, "matches"); public static final Method MATCHER_FIND = getAsmMethod(boolean.class, "find"); - /** dynamic callsite bootstrap signature */ - static final MethodType DEF_BOOTSTRAP_TYPE = - MethodType.methodType(CallSite.class, MethodHandles.Lookup.class, String.class, MethodType.class, - int.class, int.class, Object[].class); - static final Handle DEF_BOOTSTRAP_HANDLE = - new Handle(Opcodes.H_INVOKESTATIC, Type.getInternalName(DefBootstrap.class), - "bootstrap", DEF_BOOTSTRAP_TYPE.toMethodDescriptorString(), false); + public static final Method DEF_BOOTSTRAP_METHOD = getAsmMethod(CallSite.class, "$bootstrapDef", MethodHandles.Lookup.class, + String.class, MethodType.class, int.class, int.class, Object[].class); + static final Handle DEF_BOOTSTRAP_HANDLE = new Handle(Opcodes.H_INVOKESTATIC, CLASS_TYPE.getInternalName(), "$bootstrapDef", + DEF_BOOTSTRAP_METHOD.getDescriptor(), false); + public static final Type DEF_BOOTSTRAP_DELEGATE_TYPE = Type.getType(DefBootstrap.class); + public static final Method DEF_BOOTSTRAP_DELEGATE_METHOD = getAsmMethod(CallSite.class, "bootstrap", Definition.class, + MethodHandles.Lookup.class, String.class, MethodType.class, int.class, int.class, Object[].class); + public static final Type DEF_UTIL_TYPE = Type.getType(Def.class); public static final Method DEF_TO_BOOLEAN = getAsmMethod(boolean.class, "DefToboolean" , Object.class); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/EnhancedPainlessLexer.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/EnhancedPainlessLexer.java index 640d9c29b20..506ac8fcdec 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/EnhancedPainlessLexer.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/EnhancedPainlessLexer.java @@ -41,13 +41,16 @@ import org.elasticsearch.painless.Location; * */ final class EnhancedPainlessLexer extends PainlessLexer { - final String sourceName; + private final String sourceName; + private final Definition definition; + private Token stashedNext = null; private Token previous = null; - EnhancedPainlessLexer(CharStream charStream, String sourceName) { + EnhancedPainlessLexer(CharStream charStream, String sourceName, Definition definition) { super(charStream); this.sourceName = sourceName; + this.definition = definition; } public Token getPreviousToken() { @@ -93,7 +96,7 @@ final class EnhancedPainlessLexer extends PainlessLexer { @Override protected boolean isSimpleType(String name) { - return Definition.isSimpleType(name); + return definition.isSimpleType(name); } @Override diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java index 19d15a4beb3..51f37447821 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java @@ -29,10 +29,11 @@ import org.antlr.v4.runtime.Recognizer; import org.antlr.v4.runtime.atn.PredictionMode; import org.antlr.v4.runtime.tree.TerminalNode; import org.elasticsearch.painless.CompilerSettings; +import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Location; -import org.elasticsearch.painless.ScriptInterface; import org.elasticsearch.painless.Operation; +import org.elasticsearch.painless.ScriptInterface; import org.elasticsearch.painless.antlr.PainlessParser.AfterthoughtContext; import org.elasticsearch.painless.antlr.PainlessParser.ArgumentContext; import org.elasticsearch.painless.antlr.PainlessParser.ArgumentsContext; @@ -173,9 +174,11 @@ import java.util.List; */ public final class Walker extends PainlessParserBaseVisitor { - public static SSource buildPainlessTree(ScriptInterface mainMethod, String sourceName, String sourceText, CompilerSettings settings, + public static SSource buildPainlessTree(ScriptInterface mainMethod, String sourceName, + String sourceText, CompilerSettings settings, Definition definition, Printer debugStream) { - return new Walker(mainMethod, sourceName, sourceText, settings, debugStream).source; + return new Walker(mainMethod, sourceName, sourceText, settings, definition, + debugStream).source; } private final ScriptInterface scriptInterface; @@ -184,24 +187,27 @@ public final class Walker extends PainlessParserBaseVisitor { private final Printer debugStream; private final String sourceName; private final String sourceText; + private final Definition definition; private final Deque reserved = new ArrayDeque<>(); private final Globals globals; private int syntheticCounter = 0; - private Walker(ScriptInterface scriptInterface, String sourceName, String sourceText, CompilerSettings settings, Printer debugStream) { + private Walker(ScriptInterface scriptInterface, String sourceName, String sourceText, + CompilerSettings settings, Definition definition, Printer debugStream) { this.scriptInterface = scriptInterface; this.debugStream = debugStream; this.settings = settings; this.sourceName = Location.computeSourceName(sourceName, sourceText); this.sourceText = sourceText; this.globals = new Globals(new BitSet(sourceText.length())); + this.definition = definition; this.source = (SSource)visit(buildAntlrTree(sourceText)); } private SourceContext buildAntlrTree(String source) { ANTLRInputStream stream = new ANTLRInputStream(source); - PainlessLexer lexer = new EnhancedPainlessLexer(stream, sourceName); + PainlessLexer lexer = new EnhancedPainlessLexer(stream, sourceName, definition); PainlessParser parser = new PainlessParser(new CommonTokenStream(lexer)); ParserErrorStrategy strategy = new ParserErrorStrategy(sourceName); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java index d305bf08373..717d2a43b1e 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java @@ -60,8 +60,8 @@ public final class ECapturingFunctionRef extends AExpression implements ILambda } @Override - void analyze(Locals variables) { - captured = variables.getVariable(location, variable); + void analyze(Locals locals) { + captured = locals.getVariable(location, variable); if (expected == null) { if (captured.type.sort == Definition.Sort.DEF) { // dynamic implementation @@ -70,13 +70,13 @@ public final class ECapturingFunctionRef extends AExpression implements ILambda // typed implementation defPointer = "S" + captured.type.name + "." + call + ",1"; } - actual = Definition.getType("String"); + actual = locals.getDefinition().getType("String"); } else { defPointer = null; // static case if (captured.type.sort != Definition.Sort.DEF) { try { - ref = new FunctionRef(expected, captured.type.name, call, 1); + ref = new FunctionRef(locals.getDefinition(), expected, captured.type.name, call, 1); } catch (IllegalArgumentException e) { throw createError(e); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EExplicit.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EExplicit.java index f5e6f6f96b2..2624735aa07 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EExplicit.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EExplicit.java @@ -19,10 +19,9 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Globals; -import org.elasticsearch.painless.Location; import org.elasticsearch.painless.Locals; +import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; import java.util.Objects; @@ -51,7 +50,7 @@ public final class EExplicit extends AExpression { @Override void analyze(Locals locals) { try { - actual = Definition.getType(this.type); + actual = locals.getDefinition().getType(type); } catch (IllegalArgumentException exception) { throw createError(new IllegalArgumentException("Not a type [" + this.type + "].")); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java index 48230646b01..0fe11400269 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java @@ -59,7 +59,7 @@ public final class EFunctionRef extends AExpression implements ILambda { void analyze(Locals locals) { if (expected == null) { ref = null; - actual = Definition.getType("String"); + actual = locals.getDefinition().getType("String"); defPointer = "S" + type + "." + call + ",0"; } else { defPointer = null; @@ -79,7 +79,7 @@ public final class EFunctionRef extends AExpression implements ILambda { ref = new FunctionRef(expected, interfaceMethod, implMethod, 0); } else { // whitelist lookup - ref = new FunctionRef(expected, type, call, 0); + ref = new FunctionRef(locals.getDefinition(), expected, type, call, 0); } } catch (IllegalArgumentException e) { throw createError(e); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EInstanceof.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EInstanceof.java index 9bdeea93fde..c9b2c95bb4c 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EInstanceof.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EInstanceof.java @@ -59,7 +59,7 @@ public final class EInstanceof extends AExpression { // ensure the specified type is part of the definition try { - type = Definition.getType(this.type); + type = locals.getDefinition().getType(this.type); } catch (IllegalArgumentException exception) { throw createError(new IllegalArgumentException("Not a type [" + this.type + "].")); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java index 4960c6520c0..ca086561d69 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java @@ -164,14 +164,14 @@ public final class ELambda extends AExpression implements ILambda { // desugar lambda body into a synthetic method desugared = new SFunction(reserved, location, returnType.name, name, paramTypes, paramNames, statements, true); - desugared.generateSignature(); + desugared.generateSignature(locals.getDefinition()); desugared.analyze(Locals.newLambdaScope(locals.getProgramScope(), returnType, desugared.parameters, captures.size(), reserved.getMaxLoopCounter())); // setup method reference to synthetic method if (expected == null) { ref = null; - actual = Definition.getType("String"); + actual = locals.getDefinition().getType("String"); defPointer = "Sthis." + name + "," + captures.size(); } else { defPointer = null; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java index eb6ff14fb8b..999d35551ce 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java @@ -58,11 +58,7 @@ public final class EListInit extends AExpression { throw createError(new IllegalArgumentException("Must read from list initializer.")); } - try { - actual = Definition.getType("ArrayList"); - } catch (IllegalArgumentException exception) { - throw createError(new IllegalStateException("Illegal tree structure.")); - } + actual = Definition.ARRAY_LIST_TYPE; constructor = actual.struct.constructors.get(new MethodKey("", 0)); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java index 1e437d0a71c..0647b5716e0 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java @@ -64,11 +64,7 @@ public final class EMapInit extends AExpression { throw createError(new IllegalArgumentException("Must read from map initializer.")); } - try { - actual = Definition.getType("HashMap"); - } catch (IllegalArgumentException exception) { - throw createError(new IllegalStateException("Illegal tree structure.")); - } + actual = Definition.HASH_MAP_TYPE; constructor = actual.struct.constructors.get(new MethodKey("", 0)); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewArray.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewArray.java index f19bff33e5d..d32a153b797 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewArray.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewArray.java @@ -63,7 +63,7 @@ public final class ENewArray extends AExpression { final Type type; try { - type = Definition.getType(this.type); + type = locals.getDefinition().getType(this.type); } catch (IllegalArgumentException exception) { throw createError(new IllegalArgumentException("Not a type [" + this.type + "].")); } @@ -71,13 +71,14 @@ public final class ENewArray extends AExpression { for (int argument = 0; argument < arguments.size(); ++argument) { AExpression expression = arguments.get(argument); - expression.expected = initialize ? Definition.getType(type.struct, 0) : Definition.INT_TYPE; + expression.expected = initialize ? locals.getDefinition().getType(type.struct, 0) + : Definition.INT_TYPE; expression.internal = true; expression.analyze(locals); arguments.set(argument, expression.cast(locals)); } - actual = Definition.getType(type.struct, initialize ? 1 : arguments.size()); + actual = locals.getDefinition().getType(type.struct, initialize ? 1 : arguments.size()); } @Override @@ -86,7 +87,7 @@ public final class ENewArray extends AExpression { if (initialize) { writer.push(arguments.size()); - writer.newArray(Definition.getType(actual.struct, 0).type); + writer.newArray(actual.struct.type); for (int index = 0; index < arguments.size(); ++index) { AExpression argument = arguments.get(index); @@ -94,7 +95,7 @@ public final class ENewArray extends AExpression { writer.dup(); writer.push(index); argument.write(writer, globals); - writer.arrayStore(Definition.getType(actual.struct, 0).type); + writer.arrayStore(actual.struct.type); } } else { for (AExpression argument : arguments) { @@ -104,7 +105,7 @@ public final class ENewArray extends AExpression { if (arguments.size() > 1) { writer.visitMultiANewArrayInsn(actual.type.getDescriptor(), actual.type.getDimensions()); } else { - writer.newArray(Definition.getType(actual.struct, 0).type); + writer.newArray(actual.struct.type); } } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java index 7f2458a8dc1..08cd76917c5 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java @@ -61,7 +61,7 @@ public final class ENewObj extends AExpression { final Type type; try { - type = Definition.getType(this.type); + type = locals.getDefinition().getType(this.type); } catch (IllegalArgumentException exception) { throw createError(new IllegalArgumentException("Not a type [" + this.type + "].")); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EStatic.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EStatic.java index 9db9f6f8cae..e675fb15108 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EStatic.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EStatic.java @@ -19,16 +19,14 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Globals; +import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; import java.util.Objects; import java.util.Set; -import org.elasticsearch.painless.Locals; - /** * Represents a static type target. */ @@ -50,7 +48,7 @@ public final class EStatic extends AExpression { @Override void analyze(Locals locals) { try { - actual = Definition.getType(type); + actual = locals.getDefinition().getType(type); } catch (IllegalArgumentException exception) { throw createError(new IllegalArgumentException("Not a type [" + type + "].")); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java index 29369cd9456..80bdc3d597c 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Definition.Method; import org.elasticsearch.painless.Definition.MethodKey; import org.elasticsearch.painless.Definition.Sort; @@ -74,7 +73,7 @@ public final class PCallInvoke extends AExpression { Struct struct = prefix.actual.struct; if (prefix.actual.sort.primitive) { - struct = Definition.getType(prefix.actual.sort.boxed.getSimpleName()).struct; + struct = locals.getDefinition().getType(prefix.actual.sort.boxed.getSimpleName()).struct; } MethodKey methodKey = new MethodKey(name, arguments.size()); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubBrace.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubBrace.java index 1a12f23211e..1f3d4109bca 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubBrace.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubBrace.java @@ -55,7 +55,7 @@ final class PSubBrace extends AStoreable { index.analyze(locals); index = index.cast(locals); - actual = Definition.getType(type.struct, type.dimensions - 1); + actual = locals.getDefinition().getType(type.struct, type.dimensions - 1); } @Override diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SCatch.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SCatch.java index 90940ae81d0..6940e48342a 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SCatch.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SCatch.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Definition.Type; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; @@ -69,7 +68,7 @@ public final class SCatch extends AStatement { final Type type; try { - type = Definition.getType(this.type); + type = locals.getDefinition().getType(this.type); } catch (IllegalArgumentException exception) { throw createError(new IllegalArgumentException("Not a type [" + this.type + "].")); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDeclaration.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDeclaration.java index 57477561f7f..ab9e58db23e 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDeclaration.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDeclaration.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Definition.Type; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; @@ -64,7 +63,7 @@ public final class SDeclaration extends AStatement { final Type type; try { - type = Definition.getType(this.type); + type = locals.getDefinition().getType(this.type); } catch (IllegalArgumentException exception) { throw createError(new IllegalArgumentException("Not a type [" + this.type + "].")); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SEach.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SEach.java index 04c9e6697cf..003f303b5e0 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SEach.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SEach.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Definition.Sort; import org.elasticsearch.painless.Definition.Type; import org.elasticsearch.painless.Globals; @@ -72,7 +71,7 @@ public class SEach extends AStatement { final Type type; try { - type = Definition.getType(this.type); + type = locals.getDefinition().getType(this.type); } catch (IllegalArgumentException exception) { throw createError(new IllegalArgumentException("Not a type [" + this.type + "].")); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java index 760b0d15d83..3ef20b023ce 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java @@ -106,9 +106,9 @@ public final class SFunction extends AStatement { throw new IllegalStateException("Illegal tree structure"); } - void generateSignature() { + void generateSignature(Definition definition) { try { - rtnType = Definition.getType(rtnTypeStr); + rtnType = definition.getType(rtnTypeStr); } catch (IllegalArgumentException exception) { throw createError(new IllegalArgumentException("Illegal return type [" + rtnTypeStr + "] for function [" + name + "].")); } @@ -122,7 +122,7 @@ public final class SFunction extends AStatement { for (int param = 0; param < this.paramTypeStrs.size(); ++param) { try { - Type paramType = Definition.getType(this.paramTypeStrs.get(param)); + Type paramType = definition.getType(this.paramTypeStrs.get(param)); paramClasses[param] = paramType.clazz; paramTypes.add(paramType); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java index 56e08b4ddf1..eb323e3293f 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java @@ -21,6 +21,7 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Constant; +import org.elasticsearch.painless.Definition; import org.elasticsearch.painless.Definition.Method; import org.elasticsearch.painless.Definition.MethodKey; import org.elasticsearch.painless.Globals; @@ -58,6 +59,10 @@ import static org.elasticsearch.painless.WriterConstants.CLASS_TYPE; import static org.elasticsearch.painless.WriterConstants.COLLECTIONS_TYPE; import static org.elasticsearch.painless.WriterConstants.CONSTRUCTOR; import static org.elasticsearch.painless.WriterConstants.CONVERT_TO_SCRIPT_EXCEPTION_METHOD; +import static org.elasticsearch.painless.WriterConstants.DEFINITION_TYPE; +import static org.elasticsearch.painless.WriterConstants.DEF_BOOTSTRAP_DELEGATE_METHOD; +import static org.elasticsearch.painless.WriterConstants.DEF_BOOTSTRAP_DELEGATE_TYPE; +import static org.elasticsearch.painless.WriterConstants.DEF_BOOTSTRAP_METHOD; import static org.elasticsearch.painless.WriterConstants.EMPTY_MAP_METHOD; import static org.elasticsearch.painless.WriterConstants.EXCEPTION_TYPE; import static org.elasticsearch.painless.WriterConstants.OUT_OF_MEMORY_ERROR_TYPE; @@ -145,11 +150,11 @@ public final class SSource extends AStatement { throw new IllegalStateException("Illegal tree structure."); } - public void analyze() { + public void analyze(Definition definition) { Map methods = new HashMap<>(); for (SFunction function : functions) { - function.generateSignature(); + function.generateSignature(definition); MethodKey key = new MethodKey(function.name, function.parameters.size()); @@ -158,7 +163,7 @@ public final class SSource extends AStatement { } } - analyze(Locals.newProgramScope(methods.values())); + analyze(Locals.newProgramScope(definition, methods.values())); } @Override @@ -216,6 +221,19 @@ public final class SSource extends AStatement { visitor.visit(WriterConstants.CLASS_VERSION, classAccess, className, null, classBase, classInterfaces); visitor.visitSource(Location.computeSourceName(name, source), null); + // Write the a method to bootstrap def calls + MethodWriter bootstrapDef = new MethodWriter(Opcodes.ACC_STATIC | Opcodes.ACC_VARARGS, DEF_BOOTSTRAP_METHOD, visitor, + globals.getStatements(), settings); + bootstrapDef.visitCode(); + bootstrapDef.getStatic(CLASS_TYPE, "$DEFINITION", DEFINITION_TYPE); + bootstrapDef.loadArgs(); + bootstrapDef.invokeStatic(DEF_BOOTSTRAP_DELEGATE_TYPE, DEF_BOOTSTRAP_DELEGATE_METHOD); + bootstrapDef.returnValue(); + bootstrapDef.endMethod(); + + // Write the static variable used by the method to bootstrap def calls + visitor.visitField(Opcodes.ACC_PUBLIC | Opcodes.ACC_STATIC, "$DEFINITION", DEFINITION_TYPE.getDescriptor(), null, null).visitEnd(); + // Write the constructor: MethodWriter constructor = new MethodWriter(Opcodes.ACC_PUBLIC, CONSTRUCTOR, visitor, globals.getStatements(), settings); constructor.visitCode(); @@ -330,13 +348,14 @@ public final class SSource extends AStatement { writer.goTo(endCatch); // This looks like: // } catch (PainlessExplainError e) { - // throw this.convertToScriptException(e, e.getHeaders()) + // throw this.convertToScriptException(e, e.getHeaders($DEFINITION)) // } writer.visitTryCatchBlock(startTry, endTry, startExplainCatch, PAINLESS_EXPLAIN_ERROR_TYPE.getInternalName()); writer.mark(startExplainCatch); writer.loadThis(); writer.swap(); writer.dup(); + writer.getStatic(CLASS_TYPE, "$DEFINITION", DEFINITION_TYPE); writer.invokeVirtual(PAINLESS_EXPLAIN_ERROR_TYPE, PAINLESS_EXPLAIN_ERROR_GET_HEADERS_METHOD); writer.invokeVirtual(BASE_CLASS_TYPE, CONVERT_TO_SCRIPT_EXCEPTION_METHOD); writer.throwException(); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachArray.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachArray.java index c153eacbf92..2083d3ddbe5 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachArray.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachArray.java @@ -64,9 +64,12 @@ final class SSubEachArray extends AStatement { void analyze(Locals locals) { // We must store the array and index as variables for securing slots on the stack, and // also add the location offset to make the names unique in case of nested for each loops. - array = locals.addVariable(location, expression.actual, "#array" + location.getOffset(), true); - index = locals.addVariable(location, Definition.INT_TYPE, "#index" + location.getOffset(), true); - indexed = Definition.getType(expression.actual.struct, expression.actual.dimensions - 1); + array = locals.addVariable(location, expression.actual, "#array" + location.getOffset(), + true); + index = locals.addVariable(location, Definition.INT_TYPE, "#index" + location.getOffset(), + true); + indexed = locals.getDefinition().getType(expression.actual.struct, + expression.actual.dimensions - 1); cast = AnalyzerCaster.getLegalCast(location, indexed, variable.type, true, true); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java index 845cc264530..b014e952e32 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java @@ -26,7 +26,6 @@ import org.elasticsearch.painless.Definition.Cast; import org.elasticsearch.painless.Definition.Method; import org.elasticsearch.painless.Definition.MethodKey; import org.elasticsearch.painless.Definition.Sort; -import org.elasticsearch.painless.Definition.Type; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Locals.Variable; @@ -72,7 +71,8 @@ final class SSubEachIterable extends AStatement { void analyze(Locals locals) { // We must store the iterator as a variable for securing a slot on the stack, and // also add the location offset to make the name unique in case of nested for each loops. - iterator = locals.addVariable(location, Definition.getType("Iterator"), "#itr" + location.getOffset(), true); + iterator = locals.addVariable(location, locals.getDefinition().getType("Iterator"), + "#itr" + location.getOffset(), true); if (expression.actual.sort == Sort.DEF) { method = null; @@ -95,8 +95,8 @@ final class SSubEachIterable extends AStatement { expression.write(writer, globals); if (method == null) { - Type itr = Definition.getType("Iterator"); - org.objectweb.asm.Type methodType = org.objectweb.asm.Type.getMethodType(itr.type, Definition.DEF_TYPE.type); + org.objectweb.asm.Type methodType = org.objectweb.asm.Type + .getMethodType(Definition.ITERATOR_TYPE.type, Definition.DEF_TYPE.type); writer.invokeDefCall("iterator", methodType, DefBootstrap.ITERATOR); } else { method.write(writer); diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/DebugTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/DebugTests.java index 0c96251a51a..c1098a1e7af 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/DebugTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/DebugTests.java @@ -34,22 +34,24 @@ import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.not; public class DebugTests extends ScriptTestCase { + private final Definition definition = Definition.BUILTINS; + public void testExplain() { // Debug.explain can explain an object Object dummy = new Object(); PainlessExplainError e = expectScriptThrows(PainlessExplainError.class, () -> exec( "Debug.explain(params.a)", singletonMap("a", dummy), true)); assertSame(dummy, e.getObjectToExplain()); - assertThat(e.getHeaders(), hasEntry("es.to_string", singletonList(dummy.toString()))); - assertThat(e.getHeaders(), hasEntry("es.java_class", singletonList("java.lang.Object"))); - assertThat(e.getHeaders(), hasEntry("es.painless_class", singletonList("Object"))); + assertThat(e.getHeaders(definition), hasEntry("es.to_string", singletonList(dummy.toString()))); + assertThat(e.getHeaders(definition), hasEntry("es.java_class", singletonList("java.lang.Object"))); + assertThat(e.getHeaders(definition), hasEntry("es.painless_class", singletonList("Object"))); // Null should be ok e = expectScriptThrows(PainlessExplainError.class, () -> exec("Debug.explain(null)")); assertNull(e.getObjectToExplain()); - assertThat(e.getHeaders(), hasEntry("es.to_string", singletonList("null"))); - assertThat(e.getHeaders(), not(hasKey("es.java_class"))); - assertThat(e.getHeaders(), not(hasKey("es.painless_class"))); + assertThat(e.getHeaders(definition), hasEntry("es.to_string", singletonList("null"))); + assertThat(e.getHeaders(definition), not(hasKey("es.java_class"))); + assertThat(e.getHeaders(definition), not(hasKey("es.painless_class"))); // You can't catch the explain exception e = expectScriptThrows(PainlessExplainError.class, () -> exec( diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/DefBootstrapTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/DefBootstrapTests.java index 68d184cb173..bc7e4b5ebf7 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/DefBootstrapTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/DefBootstrapTests.java @@ -30,14 +30,17 @@ import java.util.HashMap; import org.elasticsearch.test.ESTestCase; public class DefBootstrapTests extends ESTestCase { + private final Definition definition = Definition.BUILTINS; /** calls toString() on integers, twice */ public void testOneType() throws Throwable { - CallSite site = DefBootstrap.bootstrap(MethodHandles.publicLookup(), - "toString", - MethodType.methodType(String.class, Object.class), - 0, - DefBootstrap.METHOD_CALL, ""); + CallSite site = DefBootstrap.bootstrap(definition, + MethodHandles.publicLookup(), + "toString", + MethodType.methodType(String.class, Object.class), + 0, + DefBootstrap.METHOD_CALL, + ""); MethodHandle handle = site.dynamicInvoker(); assertDepthEquals(site, 0); @@ -51,11 +54,13 @@ public class DefBootstrapTests extends ESTestCase { } public void testTwoTypes() throws Throwable { - CallSite site = DefBootstrap.bootstrap(MethodHandles.publicLookup(), - "toString", - MethodType.methodType(String.class, Object.class), - 0, - DefBootstrap.METHOD_CALL, ""); + CallSite site = DefBootstrap.bootstrap(definition, + MethodHandles.publicLookup(), + "toString", + MethodType.methodType(String.class, Object.class), + 0, + DefBootstrap.METHOD_CALL, + ""); MethodHandle handle = site.dynamicInvoker(); assertDepthEquals(site, 0); @@ -74,11 +79,13 @@ public class DefBootstrapTests extends ESTestCase { public void testTooManyTypes() throws Throwable { // if this changes, test must be rewritten assertEquals(5, DefBootstrap.PIC.MAX_DEPTH); - CallSite site = DefBootstrap.bootstrap(MethodHandles.publicLookup(), - "toString", - MethodType.methodType(String.class, Object.class), - 0, - DefBootstrap.METHOD_CALL, ""); + CallSite site = DefBootstrap.bootstrap(definition, + MethodHandles.publicLookup(), + "toString", + MethodType.methodType(String.class, Object.class), + 0, + DefBootstrap.METHOD_CALL, + ""); MethodHandle handle = site.dynamicInvoker(); assertDepthEquals(site, 0); @@ -98,11 +105,13 @@ public class DefBootstrapTests extends ESTestCase { /** test that we revert to the megamorphic classvalue cache and that it works as expected */ public void testMegamorphic() throws Throwable { - DefBootstrap.PIC site = (DefBootstrap.PIC) DefBootstrap.bootstrap(MethodHandles.publicLookup(), + DefBootstrap.PIC site = (DefBootstrap.PIC) DefBootstrap.bootstrap(definition, + MethodHandles.publicLookup(), "size", MethodType.methodType(int.class, Object.class), 0, - DefBootstrap.METHOD_CALL, ""); + DefBootstrap.METHOD_CALL, + ""); site.depth = DefBootstrap.PIC.MAX_DEPTH; // mark megamorphic MethodHandle handle = site.dynamicInvoker(); assertEquals(2, (int)handle.invokeExact((Object) Arrays.asList("1", "2"))); @@ -128,43 +137,51 @@ public class DefBootstrapTests extends ESTestCase { // test operators with null guards public void testNullGuardAdd() throws Throwable { - DefBootstrap.MIC site = (DefBootstrap.MIC) DefBootstrap.bootstrap(MethodHandles.publicLookup(), - "add", - MethodType.methodType(Object.class, Object.class, Object.class), - 0, - DefBootstrap.BINARY_OPERATOR, DefBootstrap.OPERATOR_ALLOWS_NULL); + DefBootstrap.MIC site = (DefBootstrap.MIC) DefBootstrap.bootstrap(definition, + MethodHandles.publicLookup(), + "add", + MethodType.methodType(Object.class, Object.class, Object.class), + 0, + DefBootstrap.BINARY_OPERATOR, + DefBootstrap.OPERATOR_ALLOWS_NULL); MethodHandle handle = site.dynamicInvoker(); assertEquals("nulltest", (Object)handle.invokeExact((Object)null, (Object)"test")); } public void testNullGuardAddWhenCached() throws Throwable { - DefBootstrap.MIC site = (DefBootstrap.MIC) DefBootstrap.bootstrap(MethodHandles.publicLookup(), - "add", - MethodType.methodType(Object.class, Object.class, Object.class), - 0, - DefBootstrap.BINARY_OPERATOR, DefBootstrap.OPERATOR_ALLOWS_NULL); + DefBootstrap.MIC site = (DefBootstrap.MIC) DefBootstrap.bootstrap(definition, + MethodHandles.publicLookup(), + "add", + MethodType.methodType(Object.class, Object.class, Object.class), + 0, + DefBootstrap.BINARY_OPERATOR, + DefBootstrap.OPERATOR_ALLOWS_NULL); MethodHandle handle = site.dynamicInvoker(); assertEquals(2, (Object)handle.invokeExact((Object)1, (Object)1)); assertEquals("nulltest", (Object)handle.invokeExact((Object)null, (Object)"test")); } public void testNullGuardEq() throws Throwable { - DefBootstrap.MIC site = (DefBootstrap.MIC) DefBootstrap.bootstrap(MethodHandles.publicLookup(), - "eq", - MethodType.methodType(boolean.class, Object.class, Object.class), - 0, - DefBootstrap.BINARY_OPERATOR, DefBootstrap.OPERATOR_ALLOWS_NULL); + DefBootstrap.MIC site = (DefBootstrap.MIC) DefBootstrap.bootstrap(definition, + MethodHandles.publicLookup(), + "eq", + MethodType.methodType(boolean.class, Object.class, Object.class), + 0, + DefBootstrap.BINARY_OPERATOR, + DefBootstrap.OPERATOR_ALLOWS_NULL); MethodHandle handle = site.dynamicInvoker(); assertFalse((boolean) handle.invokeExact((Object)null, (Object)"test")); assertTrue((boolean) handle.invokeExact((Object)null, (Object)null)); } public void testNullGuardEqWhenCached() throws Throwable { - DefBootstrap.MIC site = (DefBootstrap.MIC) DefBootstrap.bootstrap(MethodHandles.publicLookup(), - "eq", - MethodType.methodType(boolean.class, Object.class, Object.class), - 0, - DefBootstrap.BINARY_OPERATOR, DefBootstrap.OPERATOR_ALLOWS_NULL); + DefBootstrap.MIC site = (DefBootstrap.MIC) DefBootstrap.bootstrap(definition, + MethodHandles.publicLookup(), + "eq", + MethodType.methodType(boolean.class, Object.class, Object.class), + 0, + DefBootstrap.BINARY_OPERATOR, + DefBootstrap.OPERATOR_ALLOWS_NULL); MethodHandle handle = site.dynamicInvoker(); assertTrue((boolean) handle.invokeExact((Object)1, (Object)1)); assertFalse((boolean) handle.invokeExact((Object)null, (Object)"test")); @@ -176,11 +193,13 @@ public class DefBootstrapTests extends ESTestCase { // and can be disabled in some circumstances. public void testNoNullGuardAdd() throws Throwable { - DefBootstrap.MIC site = (DefBootstrap.MIC) DefBootstrap.bootstrap(MethodHandles.publicLookup(), - "add", - MethodType.methodType(Object.class, int.class, Object.class), - 0, - DefBootstrap.BINARY_OPERATOR, 0); + DefBootstrap.MIC site = (DefBootstrap.MIC) DefBootstrap.bootstrap(definition, + MethodHandles.publicLookup(), + "add", + MethodType.methodType(Object.class, int.class, Object.class), + 0, + DefBootstrap.BINARY_OPERATOR, + 0); MethodHandle handle = site.dynamicInvoker(); expectThrows(NullPointerException.class, () -> { assertNotNull((Object)handle.invokeExact(5, (Object)null)); @@ -188,11 +207,13 @@ public class DefBootstrapTests extends ESTestCase { } public void testNoNullGuardAddWhenCached() throws Throwable { - DefBootstrap.MIC site = (DefBootstrap.MIC) DefBootstrap.bootstrap(MethodHandles.publicLookup(), - "add", - MethodType.methodType(Object.class, int.class, Object.class), - 0, - DefBootstrap.BINARY_OPERATOR, 0); + DefBootstrap.MIC site = (DefBootstrap.MIC) DefBootstrap.bootstrap(definition, + MethodHandles.publicLookup(), + "add", + MethodType.methodType(Object.class, int.class, Object.class), + 0, + DefBootstrap.BINARY_OPERATOR, + 0); MethodHandle handle = site.dynamicInvoker(); assertEquals(2, (Object)handle.invokeExact(1, (Object)1)); expectThrows(NullPointerException.class, () -> { diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java index 544095caf9d..74c6c9a5628 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java @@ -76,11 +76,13 @@ public abstract class ScriptTestCase extends ESTestCase { public Object exec(String script, Map vars, Map compileParams, Scorer scorer, boolean picky) { // test for ambiguity errors before running the actual script if picky is true if (picky) { - ScriptInterface scriptInterface = new ScriptInterface(GenericElasticsearchScript.class); + Definition definition = Definition.BUILTINS; + ScriptInterface scriptInterface = new ScriptInterface(definition, GenericElasticsearchScript.class); CompilerSettings pickySettings = new CompilerSettings(); pickySettings.setPicky(true); pickySettings.setRegexesEnabled(CompilerSettings.REGEX_ENABLED.get(scriptEngineSettings())); - Walker.buildPainlessTree(scriptInterface, getTestName(), script, pickySettings, null); + Walker.buildPainlessTree(scriptInterface, getTestName(), script, pickySettings, + definition, null); } // test actual script execution Object object = scriptEngine.compile(null, script, compileParams); diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/node/NodeToStringTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/node/NodeToStringTests.java index a4530823c9e..df9d0c0f4ea 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/node/NodeToStringTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/node/NodeToStringTests.java @@ -47,6 +47,8 @@ import static java.util.Collections.singletonList; * Tests {@link Object#toString} implementations on all extensions of {@link ANode}. */ public class NodeToStringTests extends ESTestCase { + private final Definition definition = Definition.BUILTINS; + public void testEAssignment() { assertToString( "(SSource\n" @@ -399,7 +401,7 @@ public class NodeToStringTests extends ESTestCase { public void testPSubCallInvoke() { Location l = new Location(getTestName(), 0); - RuntimeClass c = Definition.getRuntimeClass(Integer.class); + RuntimeClass c = definition.getRuntimeClass(Integer.class); Method m = c.methods.get(new MethodKey("toString", 0)); PSubCallInvoke node = new PSubCallInvoke(l, m, null, emptyList()); node.prefix = new EVariable(l, "a"); @@ -454,7 +456,7 @@ public class NodeToStringTests extends ESTestCase { public void testPSubField() { Location l = new Location(getTestName(), 0); - Struct s = Definition.getType(Boolean.class.getSimpleName()).struct; + Struct s = definition.getType(Boolean.class.getSimpleName()).struct; Field f = s.staticMembers.get("TRUE"); PSubField node = new PSubField(l, f); node.prefix = new EStatic(l, "Boolean"); @@ -464,7 +466,7 @@ public class NodeToStringTests extends ESTestCase { public void testPSubListShortcut() { Location l = new Location(getTestName(), 0); - Struct s = Definition.getType(List.class.getSimpleName()).struct; + Struct s = definition.getType(List.class.getSimpleName()).struct; PSubListShortcut node = new PSubListShortcut(l, s, new EConstant(l, 1)); node.prefix = new EVariable(l, "a"); assertEquals("(PSubListShortcut (EVariable a) (EConstant Integer 1))", node.toString()); @@ -472,7 +474,7 @@ public class NodeToStringTests extends ESTestCase { new PSubNullSafeCallInvoke(l, node).toString()); l = new Location(getTestName(), 0); - s = Definition.getType(List.class.getSimpleName()).struct; + s = definition.getType(List.class.getSimpleName()).struct; node = new PSubListShortcut(l, s, new EBinary(l, Operation.ADD, new EConstant(l, 1), new EConstant(l, 4))); node.prefix = new EVariable(l, "a"); assertEquals("(PSubListShortcut (EVariable a) (EBinary (EConstant Integer 1) + (EConstant Integer 4)))", node.toString()); @@ -480,7 +482,7 @@ public class NodeToStringTests extends ESTestCase { public void testPSubMapShortcut() { Location l = new Location(getTestName(), 0); - Struct s = Definition.getType(Map.class.getSimpleName()).struct; + Struct s = definition.getType(Map.class.getSimpleName()).struct; PSubMapShortcut node = new PSubMapShortcut(l, s, new EConstant(l, "cat")); node.prefix = new EVariable(l, "a"); assertEquals("(PSubMapShortcut (EVariable a) (EConstant String 'cat'))", node.toString()); @@ -488,7 +490,7 @@ public class NodeToStringTests extends ESTestCase { new PSubNullSafeCallInvoke(l, node).toString()); l = new Location(getTestName(), 1); - s = Definition.getType(Map.class.getSimpleName()).struct; + s = definition.getType(Map.class.getSimpleName()).struct; node = new PSubMapShortcut(l, s, new EBinary(l, Operation.ADD, new EConstant(l, 1), new EConstant(l, 4))); node.prefix = new EVariable(l, "a"); assertEquals("(PSubMapShortcut (EVariable a) (EBinary (EConstant Integer 1) + (EConstant Integer 4)))", node.toString()); @@ -496,7 +498,7 @@ public class NodeToStringTests extends ESTestCase { public void testPSubShortcut() { Location l = new Location(getTestName(), 0); - Struct s = Definition.getType(FeatureTest.class.getName()).struct; + Struct s = definition.getType(FeatureTest.class.getName()).struct; Method getter = s.methods.get(new MethodKey("getX", 0)); Method setter = s.methods.get(new MethodKey("setX", 1)); PSubShortcut node = new PSubShortcut(l, "x", FeatureTest.class.getName(), getter, setter); @@ -896,11 +898,12 @@ public class NodeToStringTests extends ESTestCase { } private SSource walk(String code) { - ScriptInterface scriptInterface = new ScriptInterface(GenericElasticsearchScript.class); + ScriptInterface scriptInterface = new ScriptInterface(definition, GenericElasticsearchScript.class); CompilerSettings compilerSettings = new CompilerSettings(); compilerSettings.setRegexesEnabled(true); try { - return Walker.buildPainlessTree(scriptInterface, getTestName(), code, compilerSettings, null); + return Walker.buildPainlessTree(scriptInterface, getTestName(), code, compilerSettings, + definition, null); } catch (Exception e) { throw new AssertionError("Failed to compile: " + code, e); } From 829dd068d61da9ec48f9f6d60d8d3b96687f3559 Mon Sep 17 00:00:00 2001 From: Tanguy Leroux Date: Tue, 18 Apr 2017 17:03:32 +0200 Subject: [PATCH 05/19] [Test] Use appropriate DocValueFormats in Aggregations tests (#24155) Some aggregations (like Min, Max etc) use a wrong DocValueFormat in tests (like IP or GeoHash). We should not test aggregations that expect a numeric value with a DocValueFormat like IP. Such wrong DocValueFormat can also prevent the aggregation to be rendered as ToXContent, and this will be an issue for the High Level Rest Client tests which expect to be able to parse back aggregations. --- .../elasticsearch/search/DocValueFormat.java | 17 +++++++++++ .../search/DocValueFormatTests.java | 30 +++++++++++++++++-- .../InternalAggregationTestCase.java | 13 ++++++++ ...ternalSingleBucketAggregationTestCase.java | 9 ++---- .../metrics/InternalMaxTests.java | 5 +--- .../metrics/avg/InternalAvgTests.java | 3 +- .../metrics/min/InternalMinTests.java | 4 +-- .../pipeline/InternalSimpleValueTests.java | 3 +- .../derivative/InternalDerivativeTests.java | 6 ++-- 9 files changed, 65 insertions(+), 25 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/search/DocValueFormat.java b/core/src/main/java/org/elasticsearch/search/DocValueFormat.java index 4c32667aa2a..eb76db3be68 100644 --- a/core/src/main/java/org/elasticsearch/search/DocValueFormat.java +++ b/core/src/main/java/org/elasticsearch/search/DocValueFormat.java @@ -393,5 +393,22 @@ public interface DocValueFormat extends NamedWriteable { public BytesRef parseBytesRef(String value) { throw new UnsupportedOperationException(); } + + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + Decimal that = (Decimal) o; + return Objects.equals(pattern, that.pattern); + } + + @Override + public int hashCode() { + return Objects.hash(pattern); + } } } diff --git a/core/src/test/java/org/elasticsearch/search/DocValueFormatTests.java b/core/src/test/java/org/elasticsearch/search/DocValueFormatTests.java index 049e133b357..7bf5308eb63 100644 --- a/core/src/test/java/org/elasticsearch/search/DocValueFormatTests.java +++ b/core/src/test/java/org/elasticsearch/search/DocValueFormatTests.java @@ -19,9 +19,6 @@ package org.elasticsearch.search; -import java.util.ArrayList; -import java.util.List; - import org.apache.lucene.document.InetAddressPoint; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.io.stream.BytesStreamOutput; @@ -34,6 +31,9 @@ import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.test.ESTestCase; import org.joda.time.DateTimeZone; +import java.util.ArrayList; +import java.util.List; + public class DocValueFormatTests extends ESTestCase { public void testSerialization() throws Exception { @@ -108,6 +108,18 @@ public class DocValueFormatTests extends ESTestCase { DocValueFormat.IP.format(new BytesRef(InetAddressPoint.encode(InetAddresses.forString("::1"))))); } + public void testDecimalFormat() { + DocValueFormat formatter = new DocValueFormat.Decimal("###.##"); + assertEquals("0", formatter.format(0.0d)); + assertEquals("1", formatter.format(1d)); + formatter = new DocValueFormat.Decimal("000.000"); + assertEquals("-000.500", formatter.format(-0.5)); + formatter = new DocValueFormat.Decimal("###,###.###"); + assertEquals("0.86", formatter.format(0.8598023539251286d)); + formatter = new DocValueFormat.Decimal("###,###.###"); + assertEquals("859,802.354", formatter.format(0.8598023539251286d * 1_000_000)); + } + public void testRawParse() { assertEquals(-1L, DocValueFormat.RAW.parseLong("-1", randomBoolean(), null)); assertEquals(1L, DocValueFormat.RAW.parseLong("1", randomBoolean(), null)); @@ -145,4 +157,16 @@ public class DocValueFormatTests extends ESTestCase { assertEquals(new BytesRef(InetAddressPoint.encode(InetAddresses.forString("::1"))), DocValueFormat.IP.parseBytesRef("::1")); } + + public void testDecimalParse() { + DocValueFormat parser = new DocValueFormat.Decimal("###.##"); + assertEquals(0.0d, parser.parseDouble(randomFrom("0.0", "0", ".0", ".0000"), true, null), 0.0d); + assertEquals(-1.0d, parser.parseDouble(randomFrom("-1.0", "-1", "-1.0", "-1.0000"), true, null), 0.0d); + assertEquals(0.0d, parser.parseLong("0", true, null), 0.0d); + assertEquals(1.0d, parser.parseLong("1", true, null), 0.0d); + parser = new DocValueFormat.Decimal("###,###.###"); + assertEquals(859802.354d, parser.parseDouble("859,802.354", true, null), 0.0d); + assertEquals(0.859d, parser.parseDouble("0.859", true, null), 0.0d); + assertEquals(0.8598023539251286d, parser.parseDouble("0.8598023539251286", true, null), 0.0d); + } } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/InternalAggregationTestCase.java b/core/src/test/java/org/elasticsearch/search/aggregations/InternalAggregationTestCase.java index f899fde087c..02abb394d0f 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/InternalAggregationTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/InternalAggregationTestCase.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.script.ScriptService; +import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; import org.elasticsearch.test.AbstractWireSerializingTestCase; @@ -33,6 +34,7 @@ import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.function.Supplier; import static java.util.Collections.emptyList; @@ -122,4 +124,15 @@ public abstract class InternalAggregationTestCase protected NamedWriteableRegistry getNamedWriteableRegistry() { return namedWriteableRegistry; } + + /** + * @return a random {@link DocValueFormat} that can be used in aggregations which + * compute numbers. + */ + protected static DocValueFormat randomNumericDocValueFormat() { + final List> formats = new ArrayList<>(3); + formats.add(() -> DocValueFormat.RAW); + formats.add(() -> new DocValueFormat.Decimal(randomFrom("###.##", "###,###.##"))); + return randomFrom(formats).get(); + } } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/InternalSingleBucketAggregationTestCase.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/InternalSingleBucketAggregationTestCase.java index 6f8dac7eec4..5d2e8affe78 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/InternalSingleBucketAggregationTestCase.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/InternalSingleBucketAggregationTestCase.java @@ -19,7 +19,6 @@ package org.elasticsearch.search.aggregations.bucket; -import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregationTestCase; import org.elasticsearch.search.aggregations.InternalAggregations; @@ -47,14 +46,10 @@ public abstract class InternalSingleBucketAggregationTestCase pipelineAggregators, Map metaData) { List internal = new ArrayList<>(); if (hasInternalMax) { - internal.add(new InternalMax("max", randomDouble(), - randomFrom(DocValueFormat.BOOLEAN, DocValueFormat.GEOHASH, DocValueFormat.IP, DocValueFormat.RAW), emptyList(), - emptyMap())); + internal.add(new InternalMax("max", randomDouble(), randomNumericDocValueFormat(), emptyList(), emptyMap())); } if (hasInternalMin) { - internal.add(new InternalMin("min", randomDouble(), - randomFrom(DocValueFormat.BOOLEAN, DocValueFormat.GEOHASH, DocValueFormat.IP, DocValueFormat.RAW), emptyList(), - emptyMap())); + internal.add(new InternalMin("min", randomDouble(), randomNumericDocValueFormat(), emptyList(), emptyMap())); } // we shouldn't use the full long range here since we sum doc count on reduce, and don't want to overflow the long range there long docCount = between(0, Integer.MAX_VALUE); diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalMaxTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalMaxTests.java index de045ff533e..e2149b24549 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalMaxTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalMaxTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.search.aggregations.metrics; import org.elasticsearch.common.io.stream.Writeable.Reader; -import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.InternalAggregationTestCase; import org.elasticsearch.search.aggregations.metrics.max.InternalMax; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; @@ -31,9 +30,7 @@ import java.util.Map; public class InternalMaxTests extends InternalAggregationTestCase { @Override protected InternalMax createTestInstance(String name, List pipelineAggregators, Map metaData) { - return new InternalMax(name, randomDouble(), - randomFrom(DocValueFormat.BOOLEAN, DocValueFormat.GEOHASH, DocValueFormat.IP, DocValueFormat.RAW), pipelineAggregators, - metaData); + return new InternalMax(name, randomDouble(), randomNumericDocValueFormat(), pipelineAggregators, metaData); } @Override diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/avg/InternalAvgTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/avg/InternalAvgTests.java index 0600d7299b4..1cfb93d6dcc 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/avg/InternalAvgTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/avg/InternalAvgTests.java @@ -32,8 +32,7 @@ public class InternalAvgTests extends InternalAggregationTestCase { @Override protected InternalAvg createTestInstance(String name, List pipelineAggregators, Map metaData) { return new InternalAvg(name, randomDoubleBetween(0, 100000, true), randomNonNegativeLong() % 100000, - randomFrom(DocValueFormat.BOOLEAN, DocValueFormat.GEOHASH, DocValueFormat.IP, DocValueFormat.RAW), pipelineAggregators, - metaData); + randomNumericDocValueFormat(), pipelineAggregators, metaData); } @Override diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/min/InternalMinTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/min/InternalMinTests.java index f93e7c5c81e..d75f03a5de3 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/min/InternalMinTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/min/InternalMinTests.java @@ -30,9 +30,7 @@ import java.util.Map; public class InternalMinTests extends InternalAggregationTestCase { @Override protected InternalMin createTestInstance(String name, List pipelineAggregators, Map metaData) { - return new InternalMin(name, randomDouble(), - randomFrom(DocValueFormat.BOOLEAN, DocValueFormat.GEOHASH, DocValueFormat.IP, DocValueFormat.RAW), pipelineAggregators, - metaData); + return new InternalMin(name, randomDouble(), randomNumericDocValueFormat(), pipelineAggregators, metaData); } @Override diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/InternalSimpleValueTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/InternalSimpleValueTests.java index afb5d869d0e..4dae0a8d653 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/InternalSimpleValueTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/InternalSimpleValueTests.java @@ -32,8 +32,7 @@ public class InternalSimpleValueTests extends InternalAggregationTestCase pipelineAggregators, Map metaData) { - DocValueFormat formatter = randomFrom(DocValueFormat.BOOLEAN, DocValueFormat.GEOHASH, - DocValueFormat.IP, DocValueFormat.RAW); + DocValueFormat formatter = randomNumericDocValueFormat(); double value = randomDoubleBetween(0, 100000, true); return new InternalSimpleValue(name, value, formatter, pipelineAggregators, metaData); } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/derivative/InternalDerivativeTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/derivative/InternalDerivativeTests.java index 1889723fa8a..d2e4f3cb720 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/derivative/InternalDerivativeTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/derivative/InternalDerivativeTests.java @@ -33,12 +33,10 @@ public class InternalDerivativeTests extends InternalAggregationTestCase pipelineAggregators, Map metaData) { - DocValueFormat formatter = randomFrom(DocValueFormat.BOOLEAN, DocValueFormat.GEOHASH, - DocValueFormat.IP, DocValueFormat.RAW); + DocValueFormat formatter = randomNumericDocValueFormat(); double value = randomDoubleBetween(0, 100000, true); double normalizationFactor = randomDoubleBetween(0, 100000, true); - return new InternalDerivative(name, value, normalizationFactor, formatter, - pipelineAggregators, metaData); + return new InternalDerivative(name, value, normalizationFactor, formatter, pipelineAggregators, metaData); } @Override From ab9884b2e9108e25bcc56c54c9cfe9e84385dbca Mon Sep 17 00:00:00 2001 From: Simon Willnauer Date: Tue, 18 Apr 2017 17:19:57 +0200 Subject: [PATCH 06/19] Remove leniency when merging fetched hits in a search response phase (#24158) Today when we merge hits we have a hard check to prevent AIOOB exceptions that simply skips an expected search hit. This can only happen if there is a bug in the code which should be turned into a hard exception or an assertion triggered. This change adds an assertion an removes the lenient check for the fetched hits. --- .../action/search/SearchPhaseController.java | 42 +++++++++---------- 1 file changed, 21 insertions(+), 21 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java b/core/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java index 13b4b2f73d4..e1e0205e7e5 100644 --- a/core/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java +++ b/core/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java @@ -328,15 +328,15 @@ public final class SearchPhaseController extends AbstractComponent { continue; } FetchSearchResult fetchResult = searchResultProvider.fetchResult(); - int fetchResultIndex = fetchResult.counterGetAndIncrement(); - if (fetchResultIndex < fetchResult.hits().internalHits().length) { - SearchHit hit = fetchResult.hits().internalHits()[fetchResultIndex]; - CompletionSuggestion.Entry.Option suggestOption = - suggestionOptions.get(scoreDocIndex - currentOffset); - hit.score(shardDoc.score); - hit.shard(fetchResult.getSearchShardTarget()); - suggestOption.setHit(hit); - } + final int index = fetchResult.counterGetAndIncrement(); + assert index < fetchResult.hits().internalHits().length : "not enough hits fetched. index [" + index + "] length: " + + fetchResult.hits().internalHits().length; + SearchHit hit = fetchResult.hits().internalHits()[index]; + CompletionSuggestion.Entry.Option suggestOption = + suggestionOptions.get(scoreDocIndex - currentOffset); + hit.score(shardDoc.score); + hit.shard(fetchResult.getSearchShardTarget()); + suggestOption.setHit(hit); } currentOffset += suggestionOptions.size(); } @@ -380,20 +380,20 @@ public final class SearchPhaseController extends AbstractComponent { continue; } FetchSearchResult fetchResult = fetchResultProvider.fetchResult(); - int index = fetchResult.counterGetAndIncrement(); - if (index < fetchResult.hits().internalHits().length) { - SearchHit searchHit = fetchResult.hits().internalHits()[index]; - searchHit.score(shardDoc.score); - searchHit.shard(fetchResult.getSearchShardTarget()); - if (sorted) { - FieldDoc fieldDoc = (FieldDoc) shardDoc; - searchHit.sortValues(fieldDoc.fields, reducedQueryPhase.sortValueFormats); - if (sortScoreIndex != -1) { - searchHit.score(((Number) fieldDoc.fields[sortScoreIndex]).floatValue()); - } + final int index = fetchResult.counterGetAndIncrement(); + assert index < fetchResult.hits().internalHits().length : "not enough hits fetched. index [" + index + "] length: " + + fetchResult.hits().internalHits().length; + SearchHit searchHit = fetchResult.hits().internalHits()[index]; + searchHit.score(shardDoc.score); + searchHit.shard(fetchResult.getSearchShardTarget()); + if (sorted) { + FieldDoc fieldDoc = (FieldDoc) shardDoc; + searchHit.sortValues(fieldDoc.fields, reducedQueryPhase.sortValueFormats); + if (sortScoreIndex != -1) { + searchHit.score(((Number) fieldDoc.fields[sortScoreIndex]).floatValue()); } - hits.add(searchHit); } + hits.add(searchHit); } } return new SearchHits(hits.toArray(new SearchHit[hits.size()]), reducedQueryPhase.totalHits, From edff30f82a7b2297eb59b92cc858f7e083ee9d9b Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Tue, 18 Apr 2017 20:11:32 +0200 Subject: [PATCH 07/19] Engine: store maxUnsafeAutoIdTimestamp in commit (#24149) The `maxUnsafeAutoIdTimestamp` timestamp is a safety marker guaranteeing that no retried-indexing operation with a higher auto gen id timestamp was process by the engine. This allows us to safely process documents without checking if they were seen before. Currently this property is maintained in memory and is handed off from the primary to any replica during the recovery process. This commit takes a more natural approach and stores it in the lucene commit, using the same semantics (no retry op with a higher time stamp is part of this commit). This means that the knowledge is transferred during the file copy and also means that we don't need to worry about crazy situations where an original append only request arrives at the engine after a retry was processed *and* the engine was restarted. --- .../index/engine/InternalEngine.java | 16 +++++++- .../index/engine/InternalEngineTests.java | 39 ++++++++++++++++--- 2 files changed, 49 insertions(+), 6 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index 5e5b2ed3fec..0bed51e0e24 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -128,6 +128,7 @@ public class InternalEngine extends Engine { private final AtomicInteger throttleRequestCount = new AtomicInteger(); private final EngineConfig.OpenMode openMode; private final AtomicBoolean pendingTranslogRecovery = new AtomicBoolean(false); + private static final String MAX_UNSAFE_AUTO_ID_TIMESTAMP_COMMIT_ID = "max_unsafe_auto_id_timestamp"; private final AtomicLong maxUnsafeAutoIdTimestamp = new AtomicLong(-1); private final CounterMetric numVersionLookups = new CounterMetric(); private final CounterMetric numIndexVersionsLookups = new CounterMetric(); @@ -178,6 +179,7 @@ public class InternalEngine extends Engine { } logger.trace("recovered [{}]", seqNoStats); seqNoService = sequenceNumberService(shardId, engineConfig.getIndexSettings(), seqNoStats); + updateMaxUnsafeAutoIdTimestampFromWriter(writer); // norelease /* * We have no guarantees that all operations above the local checkpoint are in the Lucene commit or the translog. This means @@ -226,6 +228,17 @@ public class InternalEngine extends Engine { logger.trace("created new InternalEngine"); } + private void updateMaxUnsafeAutoIdTimestampFromWriter(IndexWriter writer) { + long commitMaxUnsafeAutoIdTimestamp = Long.MIN_VALUE; + for (Map.Entry entry : writer.getLiveCommitData()) { + if (entry.getKey().equals(MAX_UNSAFE_AUTO_ID_TIMESTAMP_COMMIT_ID)) { + commitMaxUnsafeAutoIdTimestamp = Long.parseLong(entry.getValue()); + break; + } + } + maxUnsafeAutoIdTimestamp.set(Math.max(maxUnsafeAutoIdTimestamp.get(), commitMaxUnsafeAutoIdTimestamp)); + } + private static SequenceNumbersService sequenceNumberService( final ShardId shardId, final IndexSettings indexSettings, @@ -500,7 +513,7 @@ public class InternalEngine extends Engine { return true; case LOCAL_TRANSLOG_RECOVERY: assert index.isRetry(); - return false; // even if retry is set we never optimize local recovery + return true; // allow to optimize in order to update the max safe time stamp default: throw new IllegalArgumentException("unknown origin " + index.origin()); } @@ -1770,6 +1783,7 @@ public class InternalEngine extends Engine { commitData.put(Engine.SYNC_COMMIT_ID, syncId); } commitData.put(SequenceNumbers.MAX_SEQ_NO, Long.toString(seqNoService().getMaxSeqNo())); + commitData.put(MAX_UNSAFE_AUTO_ID_TIMESTAMP_COMMIT_ID, Long.toString(maxUnsafeAutoIdTimestamp.get())); logger.trace("committing writer with commit data [{}]", commitData); return commitData.entrySet().iterator(); }); diff --git a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index 3f9965c0662..71d754ddfb6 100644 --- a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -3223,11 +3223,40 @@ public class InternalEngineTests extends ESTestCase { } - long maxTimestamp = Math.abs(randomLong()); - try (Store store = createStore(); - Engine engine = new InternalEngine(config(defaultSettings, store, createTempDir(), NoMergePolicy.INSTANCE, - maxTimestamp, null))) { - assertEquals(maxTimestamp, engine.segmentsStats(false).getMaxUnsafeAutoIdTimestamp()); + final long timestamp1 = Math.abs(randomLong()); + final Path storeDir = createTempDir(); + final Path translogDir = createTempDir(); + try (Store store = createStore(newFSDirectory(storeDir)); + Engine engine = new InternalEngine( + config(defaultSettings, store, translogDir, NoMergePolicy.INSTANCE, timestamp1, null))) { + assertEquals(timestamp1, engine.segmentsStats(false).getMaxUnsafeAutoIdTimestamp()); + } + final long timestamp2 = randomNonNegativeLong(); + final long timestamp3 = randomNonNegativeLong(); + final long maxTimestamp12 = Math.max(timestamp1, timestamp2); + final long maxTimestamp123 = Math.max(maxTimestamp12, timestamp3); + try (Store store = createStore(newFSDirectory(storeDir)); + Engine engine = new InternalEngine( + copy(config(defaultSettings, store, translogDir, NoMergePolicy.INSTANCE, timestamp2, null), + randomFrom(EngineConfig.OpenMode.OPEN_INDEX_AND_TRANSLOG, EngineConfig.OpenMode.OPEN_INDEX_CREATE_TRANSLOG)))) { + assertEquals(maxTimestamp12, engine.segmentsStats(false).getMaxUnsafeAutoIdTimestamp()); + if (engine.config().getOpenMode() == EngineConfig.OpenMode.OPEN_INDEX_AND_TRANSLOG) { + // recover from translog and commit maxTimestamp12 + engine.recoverFromTranslog(); + // force flush as the were no ops performed + engine.flush(true, false); + } + final ParsedDocument doc = testParsedDocument("1", "test", null, testDocumentWithTextField(), + new BytesArray("{}".getBytes(Charset.defaultCharset())), null); + engine.index(appendOnlyPrimary(doc, true, timestamp3)); + assertEquals(maxTimestamp123, engine.segmentsStats(false).getMaxUnsafeAutoIdTimestamp()); + } + try (Store store = createStore(newFSDirectory(storeDir)); + Engine engine = new InternalEngine( + config(defaultSettings, store, translogDir, NoMergePolicy.INSTANCE, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, null))) { + assertEquals(maxTimestamp12, engine.segmentsStats(false).getMaxUnsafeAutoIdTimestamp()); + engine.recoverFromTranslog(); + assertEquals(maxTimestamp123, engine.segmentsStats(false).getMaxUnsafeAutoIdTimestamp()); } } From 212f24aa2778868dc2d65a986163d09cedb45aed Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 18 Apr 2017 15:07:08 -0700 Subject: [PATCH 08/19] Tests: Clean up rest test file handling (#21392) This change simplifies how the rest test runner finds test files and removes all leniency. Previously multiple prefixes and suffixes would be tried, and tests could exist inside or outside of the classpath, although outside of the classpath never quite worked. Now only classpath tests are supported, and only one resource prefix is supported, `/rest-api-spec/tests`. closes #20240 --- .../test/rest/DebClientYamlTestSuiteIT.java | 5 +- .../IntegTestZipClientYamlTestSuiteIT.java | 4 +- .../test/rest/RpmClientYamlTestSuiteIT.java | 4 +- .../test/rest/TarClientYamlTestSuiteIT.java | 4 +- .../test/rest/ZipClientYamlTestSuiteIT.java | 4 +- .../smoketest/DocsClientYamlTestSuiteIT.java | 3 +- .../MatrixStatsClientYamlTestSuiteIT.java | 4 +- .../IngestCommonClientYamlTestSuiteIT.java | 4 +- .../LangExpressionClientYamlTestSuiteIT.java | 4 +- .../LangMustacheClientYamlTestSuiteIT.java | 4 +- .../LangPainlessClientYamlTestSuiteIT.java | 4 +- .../PercolatorClientYamlTestSuiteIT.java | 5 +- .../reindex/ReindexClientYamlTestSuiteIT.java | 4 +- .../RepositoryURLClientYamlTestSuiteIT.java | 5 +- .../netty4/Netty4ClientYamlTestSuiteIT.java | 4 +- .../analysis/IcuClientYamlTestSuiteIT.java | 4 +- .../KuromojiClientYamlTestSuiteIT.java | 4 +- .../PhoneticClientYamlTestSuiteIT.java | 4 +- .../SmartCNClientYamlTestSuiteIT.java | 4 +- .../StempelClientYamlTestSuiteIT.java | 4 +- .../UkrainianClientYamlTestSuiteIT.java | 4 +- ...veryAzureClassicClientYamlTestSuiteIT.java | 4 +- .../ec2/CloudAwsClientYamlTestSuiteIT.java | 4 +- ...leBasedDiscoveryClientYamlTestSuiteIT.java | 5 +- .../DiscoveryGceClientYamlTestSuiteIT.java | 4 +- ...IngestAttachmentClientYamlTestSuiteIT.java | 4 +- .../IngestGeoIpClientYamlTestSuiteIT.java | 4 +- .../IngestUserAgentClientYamlTestSuiteIT.java | 4 +- .../JvmExampleClientYamlTestSuiteIT.java | 4 +- .../MapperMurmur3ClientYamlTestSuiteIT.java | 4 +- .../size/MapperSizeClientYamlTestSuiteIT.java | 4 +- .../RepositoryAzureClientYamlTestSuiteIT.java | 4 +- .../RepositoryGcsClientYamlTestSuiteIT.java | 4 +- .../RepositoryHdfsClientYamlTestSuiteIT.java | 4 +- .../s3/RepositoryS3ClientYamlTestSuiteIT.java | 4 +- .../store/StoreSmbClientYamlTestSuiteIT.java | 4 +- .../Backwards50ClientYamlTestSuiteIT.java | 5 +- .../MultiClusterSearchYamlTestSuiteIT.java | 9 +- .../UpgradeClusterClientYamlTestSuiteIT.java | 5 +- ...stIngestDisabledClientYamlTestSuiteIT.java | 4 +- ...ngestWithAllDepsClientYamlTestSuiteIT.java | 4 +- ...okeTestMultiNodeClientYamlTestSuiteIT.java | 6 +- ...SmokeTestPluginsClientYamlTestSuiteIT.java | 4 +- ...ndexWithPainlessClientYamlTestSuiteIT.java | 4 +- .../tribe/TribeClientYamlTestSuiteIT.java | 8 +- .../junit/listeners/ReproduceInfoPrinter.java | 3 +- .../rest/yaml/ESClientYamlSuiteTestCase.java | 155 +++++++--------- .../test/rest/yaml/FileUtils.java | 170 ------------------ .../restspec/ClientYamlSuiteRestSpec.java | 105 ++++++----- ...va => ESClientYamlSuiteTestCaseTests.java} | 44 ++--- 50 files changed, 186 insertions(+), 487 deletions(-) delete mode 100644 test/framework/src/main/java/org/elasticsearch/test/rest/yaml/FileUtils.java rename test/framework/src/test/java/org/elasticsearch/test/rest/yaml/{FileUtilsTests.java => ESClientYamlSuiteTestCaseTests.java} (63%) diff --git a/distribution/deb/src/test/java/org/elasticsearch/test/rest/DebClientYamlTestSuiteIT.java b/distribution/deb/src/test/java/org/elasticsearch/test/rest/DebClientYamlTestSuiteIT.java index 9a7978f69bf..0fcdecde068 100644 --- a/distribution/deb/src/test/java/org/elasticsearch/test/rest/DebClientYamlTestSuiteIT.java +++ b/distribution/deb/src/test/java/org/elasticsearch/test/rest/DebClientYamlTestSuiteIT.java @@ -20,12 +20,9 @@ package org.elasticsearch.test.rest; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; - import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import java.io.IOException; - /** Rest integration test. Runs against a cluster started by {@code gradle integTest} */ public class DebClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { public DebClientYamlTestSuiteIT(ClientYamlTestCandidate testCandidate) { @@ -33,7 +30,7 @@ public class DebClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { } @ParametersFactory - public static Iterable parameters() throws IOException { + public static Iterable parameters() throws Exception { return createParameters(); } } diff --git a/distribution/integ-test-zip/src/test/java/org/elasticsearch/test/rest/IntegTestZipClientYamlTestSuiteIT.java b/distribution/integ-test-zip/src/test/java/org/elasticsearch/test/rest/IntegTestZipClientYamlTestSuiteIT.java index c3a038392cb..cd3a7782801 100644 --- a/distribution/integ-test-zip/src/test/java/org/elasticsearch/test/rest/IntegTestZipClientYamlTestSuiteIT.java +++ b/distribution/integ-test-zip/src/test/java/org/elasticsearch/test/rest/IntegTestZipClientYamlTestSuiteIT.java @@ -24,8 +24,6 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import java.io.IOException; - /** Rest integration test. Runs against a cluster started by {@code gradle integTest} */ public class IntegTestZipClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { public IntegTestZipClientYamlTestSuiteIT(ClientYamlTestCandidate testCandidate) { @@ -33,7 +31,7 @@ public class IntegTestZipClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase } @ParametersFactory - public static Iterable parameters() throws IOException { + public static Iterable parameters() throws Exception { return createParameters(); } } diff --git a/distribution/rpm/src/test/java/org/elasticsearch/test/rest/RpmClientYamlTestSuiteIT.java b/distribution/rpm/src/test/java/org/elasticsearch/test/rest/RpmClientYamlTestSuiteIT.java index 86b49321808..388f5ee6517 100644 --- a/distribution/rpm/src/test/java/org/elasticsearch/test/rest/RpmClientYamlTestSuiteIT.java +++ b/distribution/rpm/src/test/java/org/elasticsearch/test/rest/RpmClientYamlTestSuiteIT.java @@ -24,8 +24,6 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import java.io.IOException; - /** Rest integration test. Runs against a cluster started by {@code gradle integTest} */ public class RpmClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { public RpmClientYamlTestSuiteIT(ClientYamlTestCandidate testCandidate) { @@ -33,7 +31,7 @@ public class RpmClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { } @ParametersFactory - public static Iterable parameters() throws IOException { + public static Iterable parameters() throws Exception { return createParameters(); } } diff --git a/distribution/tar/src/test/java/org/elasticsearch/test/rest/TarClientYamlTestSuiteIT.java b/distribution/tar/src/test/java/org/elasticsearch/test/rest/TarClientYamlTestSuiteIT.java index a86e398830c..391d6fe688f 100644 --- a/distribution/tar/src/test/java/org/elasticsearch/test/rest/TarClientYamlTestSuiteIT.java +++ b/distribution/tar/src/test/java/org/elasticsearch/test/rest/TarClientYamlTestSuiteIT.java @@ -24,8 +24,6 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import java.io.IOException; - /** Rest integration test. Runs against a cluster started by {@code gradle integTest} */ public class TarClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { public TarClientYamlTestSuiteIT(ClientYamlTestCandidate testCandidate) { @@ -33,7 +31,7 @@ public class TarClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { } @ParametersFactory - public static Iterable parameters() throws IOException { + public static Iterable parameters() throws Exception { return createParameters(); } } diff --git a/distribution/zip/src/test/java/org/elasticsearch/test/rest/ZipClientYamlTestSuiteIT.java b/distribution/zip/src/test/java/org/elasticsearch/test/rest/ZipClientYamlTestSuiteIT.java index 93c31c4ab65..dc08af2528b 100644 --- a/distribution/zip/src/test/java/org/elasticsearch/test/rest/ZipClientYamlTestSuiteIT.java +++ b/distribution/zip/src/test/java/org/elasticsearch/test/rest/ZipClientYamlTestSuiteIT.java @@ -24,8 +24,6 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import java.io.IOException; - /** Rest integration test. Runs against a cluster started by {@code gradle integTest} */ public class ZipClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { public ZipClientYamlTestSuiteIT(ClientYamlTestCandidate testCandidate) { @@ -33,7 +31,7 @@ public class ZipClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { } @ParametersFactory - public static Iterable parameters() throws IOException { + public static Iterable parameters() throws Exception { return createParameters(); } } diff --git a/docs/src/test/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java b/docs/src/test/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java index 628c364d455..427a81bdd21 100644 --- a/docs/src/test/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java +++ b/docs/src/test/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java @@ -25,7 +25,6 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import java.io.IOException; import java.util.List; public class DocsClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { @@ -35,7 +34,7 @@ public class DocsClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { } @ParametersFactory - public static Iterable parameters() throws IOException { + public static Iterable parameters() throws Exception { return ESClientYamlSuiteTestCase.createParameters(); } diff --git a/modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/MatrixStatsClientYamlTestSuiteIT.java b/modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/MatrixStatsClientYamlTestSuiteIT.java index 6e11a56d2c6..be27e9050df 100644 --- a/modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/MatrixStatsClientYamlTestSuiteIT.java +++ b/modules/aggs-matrix-stats/src/test/java/org/elasticsearch/search/aggregations/matrix/MatrixStatsClientYamlTestSuiteIT.java @@ -24,15 +24,13 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import java.io.IOException; - public class MatrixStatsClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { public MatrixStatsClientYamlTestSuiteIT(@Name("yaml")ClientYamlTestCandidate testCandidate) { super(testCandidate); } @ParametersFactory - public static Iterable parameters() throws IOException { + public static Iterable parameters() throws Exception { return ESClientYamlSuiteTestCase.createParameters(); } } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/IngestCommonClientYamlTestSuiteIT.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/IngestCommonClientYamlTestSuiteIT.java index 4027a75a2d1..6c10c16494b 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/IngestCommonClientYamlTestSuiteIT.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/IngestCommonClientYamlTestSuiteIT.java @@ -25,8 +25,6 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import java.io.IOException; - public class IngestCommonClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { public IngestCommonClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { @@ -34,7 +32,7 @@ public class IngestCommonClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase } @ParametersFactory - public static Iterable parameters() throws IOException { + public static Iterable parameters() throws Exception { return ESClientYamlSuiteTestCase.createParameters(); } } diff --git a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/LangExpressionClientYamlTestSuiteIT.java b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/LangExpressionClientYamlTestSuiteIT.java index a20050b80ad..295c7a7f86f 100644 --- a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/LangExpressionClientYamlTestSuiteIT.java +++ b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/LangExpressionClientYamlTestSuiteIT.java @@ -25,8 +25,6 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import java.io.IOException; - public class LangExpressionClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { public LangExpressionClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { @@ -34,7 +32,7 @@ public class LangExpressionClientYamlTestSuiteIT extends ESClientYamlSuiteTestCa } @ParametersFactory - public static Iterable parameters() throws IOException { + public static Iterable parameters() throws Exception { return ESClientYamlSuiteTestCase.createParameters(); } } diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/LangMustacheClientYamlTestSuiteIT.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/LangMustacheClientYamlTestSuiteIT.java index 72eb9f2ad79..4e9c8c1b90c 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/LangMustacheClientYamlTestSuiteIT.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/LangMustacheClientYamlTestSuiteIT.java @@ -25,8 +25,6 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import java.io.IOException; - public class LangMustacheClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { public LangMustacheClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { @@ -34,7 +32,7 @@ public class LangMustacheClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase } @ParametersFactory - public static Iterable parameters() throws IOException { + public static Iterable parameters() throws Exception { return ESClientYamlSuiteTestCase.createParameters(); } } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/LangPainlessClientYamlTestSuiteIT.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/LangPainlessClientYamlTestSuiteIT.java index 9d055b74cc8..2d3f299bbcb 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/LangPainlessClientYamlTestSuiteIT.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/LangPainlessClientYamlTestSuiteIT.java @@ -25,8 +25,6 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import java.io.IOException; - /** Runs yaml rest tests */ public class LangPainlessClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { @@ -35,7 +33,7 @@ public class LangPainlessClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase } @ParametersFactory - public static Iterable parameters() throws IOException { + public static Iterable parameters() throws Exception { return ESClientYamlSuiteTestCase.createParameters(); } } diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorClientYamlTestSuiteIT.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorClientYamlTestSuiteIT.java index 28b3e6a2fdd..7012f7bd436 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorClientYamlTestSuiteIT.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorClientYamlTestSuiteIT.java @@ -21,18 +21,17 @@ package org.elasticsearch.percolator; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import java.io.IOException; - public class PercolatorClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { public PercolatorClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { super(testCandidate); } @ParametersFactory - public static Iterable parameters() throws IOException { + public static Iterable parameters() throws Exception { return ESClientYamlSuiteTestCase.createParameters(); } } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexClientYamlTestSuiteIT.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexClientYamlTestSuiteIT.java index 0975cad96a4..3ec84e548a6 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexClientYamlTestSuiteIT.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ReindexClientYamlTestSuiteIT.java @@ -25,15 +25,13 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import java.io.IOException; - public class ReindexClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { public ReindexClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { super(testCandidate); } @ParametersFactory - public static Iterable parameters() throws IOException { + public static Iterable parameters() throws Exception { return ESClientYamlSuiteTestCase.createParameters(); } } diff --git a/modules/repository-url/src/test/java/org/elasticsearch/repositories/url/RepositoryURLClientYamlTestSuiteIT.java b/modules/repository-url/src/test/java/org/elasticsearch/repositories/url/RepositoryURLClientYamlTestSuiteIT.java index 66fba622e60..238b14ce013 100644 --- a/modules/repository-url/src/test/java/org/elasticsearch/repositories/url/RepositoryURLClientYamlTestSuiteIT.java +++ b/modules/repository-url/src/test/java/org/elasticsearch/repositories/url/RepositoryURLClientYamlTestSuiteIT.java @@ -21,11 +21,10 @@ package org.elasticsearch.repositories.url; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import java.io.IOException; - public class RepositoryURLClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { public RepositoryURLClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { @@ -33,7 +32,7 @@ public class RepositoryURLClientYamlTestSuiteIT extends ESClientYamlSuiteTestCas } @ParametersFactory - public static Iterable parameters() throws IOException { + public static Iterable parameters() throws Exception { return ESClientYamlSuiteTestCase.createParameters(); } } diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4ClientYamlTestSuiteIT.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4ClientYamlTestSuiteIT.java index 2341f3905ef..d6628c69b69 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4ClientYamlTestSuiteIT.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4ClientYamlTestSuiteIT.java @@ -27,8 +27,6 @@ import org.apache.lucene.util.TimeUnits; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import java.io.IOException; - //TODO: This is a *temporary* workaround to ensure a timeout does not mask other problems @TimeoutSuite(millis = 30 * TimeUnits.MINUTE) public class Netty4ClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { @@ -38,7 +36,7 @@ public class Netty4ClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { } @ParametersFactory - public static Iterable parameters() throws IOException { + public static Iterable parameters() throws Exception { return ESClientYamlSuiteTestCase.createParameters(); } diff --git a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/IcuClientYamlTestSuiteIT.java b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/IcuClientYamlTestSuiteIT.java index 04fc222063c..0d06f419724 100644 --- a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/IcuClientYamlTestSuiteIT.java +++ b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/analysis/IcuClientYamlTestSuiteIT.java @@ -25,8 +25,6 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import java.io.IOException; - public class IcuClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { public IcuClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { @@ -34,7 +32,7 @@ public class IcuClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { } @ParametersFactory - public static Iterable parameters() throws IOException { + public static Iterable parameters() throws Exception { return ESClientYamlSuiteTestCase.createParameters(); } } diff --git a/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/KuromojiClientYamlTestSuiteIT.java b/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/KuromojiClientYamlTestSuiteIT.java index bc5360f94d1..b3083a2416a 100644 --- a/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/KuromojiClientYamlTestSuiteIT.java +++ b/plugins/analysis-kuromoji/src/test/java/org/elasticsearch/index/analysis/KuromojiClientYamlTestSuiteIT.java @@ -25,8 +25,6 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import java.io.IOException; - public class KuromojiClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { public KuromojiClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { @@ -34,7 +32,7 @@ public class KuromojiClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { } @ParametersFactory - public static Iterable parameters() throws IOException { + public static Iterable parameters() throws Exception { return ESClientYamlSuiteTestCase.createParameters(); } } diff --git a/plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/PhoneticClientYamlTestSuiteIT.java b/plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/PhoneticClientYamlTestSuiteIT.java index 3d892bddee4..12736fa5fd8 100644 --- a/plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/PhoneticClientYamlTestSuiteIT.java +++ b/plugins/analysis-phonetic/src/test/java/org/elasticsearch/index/analysis/PhoneticClientYamlTestSuiteIT.java @@ -25,8 +25,6 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import java.io.IOException; - public class PhoneticClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { public PhoneticClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { @@ -34,7 +32,7 @@ public class PhoneticClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { } @ParametersFactory - public static Iterable parameters() throws IOException { + public static Iterable parameters() throws Exception { return ESClientYamlSuiteTestCase.createParameters(); } } diff --git a/plugins/analysis-smartcn/src/test/java/org/elasticsearch/index/analysis/SmartCNClientYamlTestSuiteIT.java b/plugins/analysis-smartcn/src/test/java/org/elasticsearch/index/analysis/SmartCNClientYamlTestSuiteIT.java index 24a581d9e26..de67e4c4d6c 100644 --- a/plugins/analysis-smartcn/src/test/java/org/elasticsearch/index/analysis/SmartCNClientYamlTestSuiteIT.java +++ b/plugins/analysis-smartcn/src/test/java/org/elasticsearch/index/analysis/SmartCNClientYamlTestSuiteIT.java @@ -25,8 +25,6 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import java.io.IOException; - public class SmartCNClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { public SmartCNClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { @@ -34,7 +32,7 @@ public class SmartCNClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { } @ParametersFactory - public static Iterable parameters() throws IOException { + public static Iterable parameters() throws Exception { return ESClientYamlSuiteTestCase.createParameters(); } } diff --git a/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/StempelClientYamlTestSuiteIT.java b/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/StempelClientYamlTestSuiteIT.java index 371431e1c25..5b061c95775 100644 --- a/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/StempelClientYamlTestSuiteIT.java +++ b/plugins/analysis-stempel/src/test/java/org/elasticsearch/index/analysis/StempelClientYamlTestSuiteIT.java @@ -25,8 +25,6 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import java.io.IOException; - public class StempelClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { public StempelClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { @@ -34,7 +32,7 @@ public class StempelClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { } @ParametersFactory - public static Iterable parameters() throws IOException { + public static Iterable parameters() throws Exception { return ESClientYamlSuiteTestCase.createParameters(); } } diff --git a/plugins/analysis-ukrainian/src/test/java/org/elasticsearch/index/analysis/UkrainianClientYamlTestSuiteIT.java b/plugins/analysis-ukrainian/src/test/java/org/elasticsearch/index/analysis/UkrainianClientYamlTestSuiteIT.java index 50d935e5228..e03b1b1b4cb 100644 --- a/plugins/analysis-ukrainian/src/test/java/org/elasticsearch/index/analysis/UkrainianClientYamlTestSuiteIT.java +++ b/plugins/analysis-ukrainian/src/test/java/org/elasticsearch/index/analysis/UkrainianClientYamlTestSuiteIT.java @@ -25,8 +25,6 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import java.io.IOException; - public class UkrainianClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { public UkrainianClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { @@ -34,7 +32,7 @@ public class UkrainianClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { } @ParametersFactory - public static Iterable parameters() throws IOException { + public static Iterable parameters() throws Exception { return ESClientYamlSuiteTestCase.createParameters(); } } diff --git a/plugins/discovery-azure-classic/src/test/java/org/elasticsearch/discovery/azure/classic/DiscoveryAzureClassicClientYamlTestSuiteIT.java b/plugins/discovery-azure-classic/src/test/java/org/elasticsearch/discovery/azure/classic/DiscoveryAzureClassicClientYamlTestSuiteIT.java index 4f6a44ef235..4ad70a98c8c 100644 --- a/plugins/discovery-azure-classic/src/test/java/org/elasticsearch/discovery/azure/classic/DiscoveryAzureClassicClientYamlTestSuiteIT.java +++ b/plugins/discovery-azure-classic/src/test/java/org/elasticsearch/discovery/azure/classic/DiscoveryAzureClassicClientYamlTestSuiteIT.java @@ -25,8 +25,6 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import java.io.IOException; - public class DiscoveryAzureClassicClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { public DiscoveryAzureClassicClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { @@ -34,7 +32,7 @@ public class DiscoveryAzureClassicClientYamlTestSuiteIT extends ESClientYamlSuit } @ParametersFactory - public static Iterable parameters() throws IOException { + public static Iterable parameters() throws Exception { return ESClientYamlSuiteTestCase.createParameters(); } } diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/CloudAwsClientYamlTestSuiteIT.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/CloudAwsClientYamlTestSuiteIT.java index 168c169dfae..39bc468da89 100644 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/CloudAwsClientYamlTestSuiteIT.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/CloudAwsClientYamlTestSuiteIT.java @@ -25,8 +25,6 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import java.io.IOException; - public class CloudAwsClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { public CloudAwsClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { @@ -34,7 +32,7 @@ public class CloudAwsClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { } @ParametersFactory - public static Iterable parameters() throws IOException { + public static Iterable parameters() throws Exception { return ESClientYamlSuiteTestCase.createParameters(); } } diff --git a/plugins/discovery-file/src/test/java/org/elasticsearch/discovery/file/FileBasedDiscoveryClientYamlTestSuiteIT.java b/plugins/discovery-file/src/test/java/org/elasticsearch/discovery/file/FileBasedDiscoveryClientYamlTestSuiteIT.java index dbc6212af9f..d2ac2095bdf 100644 --- a/plugins/discovery-file/src/test/java/org/elasticsearch/discovery/file/FileBasedDiscoveryClientYamlTestSuiteIT.java +++ b/plugins/discovery-file/src/test/java/org/elasticsearch/discovery/file/FileBasedDiscoveryClientYamlTestSuiteIT.java @@ -21,12 +21,9 @@ package org.elasticsearch.discovery.file; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; - import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import java.io.IOException; - /** * Integration tests to make sure the file-based discovery plugin works in a cluster. */ @@ -37,7 +34,7 @@ public class FileBasedDiscoveryClientYamlTestSuiteIT extends ESClientYamlSuiteTe } @ParametersFactory - public static Iterable parameters() throws IOException { + public static Iterable parameters() throws Exception { return ESClientYamlSuiteTestCase.createParameters(); } } diff --git a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/DiscoveryGceClientYamlTestSuiteIT.java b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/DiscoveryGceClientYamlTestSuiteIT.java index 67ef297609f..fe1e0cf5d77 100644 --- a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/DiscoveryGceClientYamlTestSuiteIT.java +++ b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/DiscoveryGceClientYamlTestSuiteIT.java @@ -25,8 +25,6 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import java.io.IOException; - public class DiscoveryGceClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { public DiscoveryGceClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { @@ -34,7 +32,7 @@ public class DiscoveryGceClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase } @ParametersFactory - public static Iterable parameters() throws IOException { + public static Iterable parameters() throws Exception { return ESClientYamlSuiteTestCase.createParameters(); } } diff --git a/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/IngestAttachmentClientYamlTestSuiteIT.java b/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/IngestAttachmentClientYamlTestSuiteIT.java index 43e9cd394f0..679e950c328 100644 --- a/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/IngestAttachmentClientYamlTestSuiteIT.java +++ b/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/IngestAttachmentClientYamlTestSuiteIT.java @@ -25,8 +25,6 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import java.io.IOException; - public class IngestAttachmentClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { public IngestAttachmentClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { @@ -34,7 +32,7 @@ public class IngestAttachmentClientYamlTestSuiteIT extends ESClientYamlSuiteTest } @ParametersFactory - public static Iterable parameters() throws IOException { + public static Iterable parameters() throws Exception { return ESClientYamlSuiteTestCase.createParameters(); } } diff --git a/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IngestGeoIpClientYamlTestSuiteIT.java b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IngestGeoIpClientYamlTestSuiteIT.java index 33f7c67e018..9d9bdb9c7d0 100644 --- a/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IngestGeoIpClientYamlTestSuiteIT.java +++ b/plugins/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/IngestGeoIpClientYamlTestSuiteIT.java @@ -25,8 +25,6 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import java.io.IOException; - public class IngestGeoIpClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { public IngestGeoIpClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { @@ -34,7 +32,7 @@ public class IngestGeoIpClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase } @ParametersFactory - public static Iterable parameters() throws IOException { + public static Iterable parameters() throws Exception { return ESClientYamlSuiteTestCase.createParameters(); } } diff --git a/plugins/ingest-user-agent/src/test/java/org/elasticsearch/ingest/useragent/IngestUserAgentClientYamlTestSuiteIT.java b/plugins/ingest-user-agent/src/test/java/org/elasticsearch/ingest/useragent/IngestUserAgentClientYamlTestSuiteIT.java index cc70c1ef6ba..9587b4e514c 100644 --- a/plugins/ingest-user-agent/src/test/java/org/elasticsearch/ingest/useragent/IngestUserAgentClientYamlTestSuiteIT.java +++ b/plugins/ingest-user-agent/src/test/java/org/elasticsearch/ingest/useragent/IngestUserAgentClientYamlTestSuiteIT.java @@ -25,8 +25,6 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import java.io.IOException; - public class IngestUserAgentClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { public IngestUserAgentClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { @@ -34,7 +32,7 @@ public class IngestUserAgentClientYamlTestSuiteIT extends ESClientYamlSuiteTestC } @ParametersFactory - public static Iterable parameters() throws IOException { + public static Iterable parameters() throws Exception { return ESClientYamlSuiteTestCase.createParameters(); } } diff --git a/plugins/jvm-example/src/test/java/org/elasticsearch/plugin/example/JvmExampleClientYamlTestSuiteIT.java b/plugins/jvm-example/src/test/java/org/elasticsearch/plugin/example/JvmExampleClientYamlTestSuiteIT.java index d3e84316b14..a43cca71826 100644 --- a/plugins/jvm-example/src/test/java/org/elasticsearch/plugin/example/JvmExampleClientYamlTestSuiteIT.java +++ b/plugins/jvm-example/src/test/java/org/elasticsearch/plugin/example/JvmExampleClientYamlTestSuiteIT.java @@ -25,8 +25,6 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import java.io.IOException; - public class JvmExampleClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { public JvmExampleClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { @@ -34,7 +32,7 @@ public class JvmExampleClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { } @ParametersFactory - public static Iterable parameters() throws IOException { + public static Iterable parameters() throws Exception { return ESClientYamlSuiteTestCase.createParameters(); } } diff --git a/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/MapperMurmur3ClientYamlTestSuiteIT.java b/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/MapperMurmur3ClientYamlTestSuiteIT.java index 9ca9f677b8d..5ef64cd5027 100644 --- a/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/MapperMurmur3ClientYamlTestSuiteIT.java +++ b/plugins/mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3/MapperMurmur3ClientYamlTestSuiteIT.java @@ -25,8 +25,6 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import java.io.IOException; - public class MapperMurmur3ClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { public MapperMurmur3ClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { @@ -34,7 +32,7 @@ public class MapperMurmur3ClientYamlTestSuiteIT extends ESClientYamlSuiteTestCas } @ParametersFactory - public static Iterable parameters() throws IOException { + public static Iterable parameters() throws Exception { return createParameters(); } } diff --git a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/MapperSizeClientYamlTestSuiteIT.java b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/MapperSizeClientYamlTestSuiteIT.java index 3741c5626f6..0d4cb4372b2 100644 --- a/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/MapperSizeClientYamlTestSuiteIT.java +++ b/plugins/mapper-size/src/test/java/org/elasticsearch/index/mapper/size/MapperSizeClientYamlTestSuiteIT.java @@ -25,8 +25,6 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import java.io.IOException; - public class MapperSizeClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { public MapperSizeClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { @@ -34,7 +32,7 @@ public class MapperSizeClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { } @ParametersFactory - public static Iterable parameters() throws IOException { + public static Iterable parameters() throws Exception { return createParameters(); } } diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/RepositoryAzureClientYamlTestSuiteIT.java b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/RepositoryAzureClientYamlTestSuiteIT.java index 5d3d051fb3b..551599b0918 100644 --- a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/RepositoryAzureClientYamlTestSuiteIT.java +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/RepositoryAzureClientYamlTestSuiteIT.java @@ -25,8 +25,6 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import java.io.IOException; - public class RepositoryAzureClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { public RepositoryAzureClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { @@ -34,7 +32,7 @@ public class RepositoryAzureClientYamlTestSuiteIT extends ESClientYamlSuiteTestC } @ParametersFactory - public static Iterable parameters() throws IOException { + public static Iterable parameters() throws Exception { return ESClientYamlSuiteTestCase.createParameters(); } } diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/RepositoryGcsClientYamlTestSuiteIT.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/RepositoryGcsClientYamlTestSuiteIT.java index cdd86bc48b9..5d87e671533 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/RepositoryGcsClientYamlTestSuiteIT.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/RepositoryGcsClientYamlTestSuiteIT.java @@ -25,8 +25,6 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import java.io.IOException; - public class RepositoryGcsClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { public RepositoryGcsClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { @@ -34,7 +32,7 @@ public class RepositoryGcsClientYamlTestSuiteIT extends ESClientYamlSuiteTestCas } @ParametersFactory - public static Iterable parameters() throws IOException { + public static Iterable parameters() throws Exception { return createParameters(); } } diff --git a/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/RepositoryHdfsClientYamlTestSuiteIT.java b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/RepositoryHdfsClientYamlTestSuiteIT.java index 6e66b0d49fe..7293449cc22 100644 --- a/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/RepositoryHdfsClientYamlTestSuiteIT.java +++ b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/RepositoryHdfsClientYamlTestSuiteIT.java @@ -24,8 +24,6 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import java.io.IOException; - public class RepositoryHdfsClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { public RepositoryHdfsClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { @@ -33,7 +31,7 @@ public class RepositoryHdfsClientYamlTestSuiteIT extends ESClientYamlSuiteTestCa } @ParametersFactory - public static Iterable parameters() throws IOException { + public static Iterable parameters() throws Exception { return ESClientYamlSuiteTestCase.createParameters(); } } diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryS3ClientYamlTestSuiteIT.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryS3ClientYamlTestSuiteIT.java index 30056f67d2f..06f97e6ad17 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryS3ClientYamlTestSuiteIT.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryS3ClientYamlTestSuiteIT.java @@ -25,8 +25,6 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import java.io.IOException; - public class RepositoryS3ClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { public RepositoryS3ClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { @@ -34,7 +32,7 @@ public class RepositoryS3ClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase } @ParametersFactory - public static Iterable parameters() throws IOException { + public static Iterable parameters() throws Exception { return ESClientYamlSuiteTestCase.createParameters(); } } diff --git a/plugins/store-smb/src/test/java/org/elasticsearch/index/store/StoreSmbClientYamlTestSuiteIT.java b/plugins/store-smb/src/test/java/org/elasticsearch/index/store/StoreSmbClientYamlTestSuiteIT.java index 0216083c95f..2a35bd1062d 100644 --- a/plugins/store-smb/src/test/java/org/elasticsearch/index/store/StoreSmbClientYamlTestSuiteIT.java +++ b/plugins/store-smb/src/test/java/org/elasticsearch/index/store/StoreSmbClientYamlTestSuiteIT.java @@ -25,8 +25,6 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import java.io.IOException; - public class StoreSmbClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { public StoreSmbClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { @@ -34,7 +32,7 @@ public class StoreSmbClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { } @ParametersFactory - public static Iterable parameters() throws IOException { + public static Iterable parameters() throws Exception { return ESClientYamlSuiteTestCase.createParameters(); } } diff --git a/qa/backwards-5.0/src/test/java/org/elasticsearch/backwards/Backwards50ClientYamlTestSuiteIT.java b/qa/backwards-5.0/src/test/java/org/elasticsearch/backwards/Backwards50ClientYamlTestSuiteIT.java index f6cb67f92fb..75492f6a2a0 100644 --- a/qa/backwards-5.0/src/test/java/org/elasticsearch/backwards/Backwards50ClientYamlTestSuiteIT.java +++ b/qa/backwards-5.0/src/test/java/org/elasticsearch/backwards/Backwards50ClientYamlTestSuiteIT.java @@ -21,13 +21,10 @@ package org.elasticsearch.backwards; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite; - import org.apache.lucene.util.TimeUnits; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import java.io.IOException; - @TimeoutSuite(millis = 40 * TimeUnits.MINUTE) // some of the windows test VMs are slow as hell public class Backwards50ClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { @@ -36,7 +33,7 @@ public class Backwards50ClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase } @ParametersFactory - public static Iterable parameters() throws IOException { + public static Iterable parameters() throws Exception { return createParameters(); } diff --git a/qa/multi-cluster-search/src/test/java/org/elasticsearch/upgrades/MultiClusterSearchYamlTestSuiteIT.java b/qa/multi-cluster-search/src/test/java/org/elasticsearch/upgrades/MultiClusterSearchYamlTestSuiteIT.java index e8c3592e214..fe3a9098831 100644 --- a/qa/multi-cluster-search/src/test/java/org/elasticsearch/upgrades/MultiClusterSearchYamlTestSuiteIT.java +++ b/qa/multi-cluster-search/src/test/java/org/elasticsearch/upgrades/MultiClusterSearchYamlTestSuiteIT.java @@ -19,14 +19,13 @@ package org.elasticsearch.upgrades; +import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite; import org.apache.lucene.util.TimeUnits; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import java.io.IOException; - @TimeoutSuite(millis = 5 * TimeUnits.MINUTE) // to account for slow as hell VMs public class MultiClusterSearchYamlTestSuiteIT extends ESClientYamlSuiteTestCase { @@ -35,13 +34,13 @@ public class MultiClusterSearchYamlTestSuiteIT extends ESClientYamlSuiteTestCase return true; } - public MultiClusterSearchYamlTestSuiteIT(ClientYamlTestCandidate testCandidate) { + public MultiClusterSearchYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { super(testCandidate); } @ParametersFactory - public static Iterable parameters() throws IOException { + public static Iterable parameters() throws Exception { return createParameters(); } -} +} diff --git a/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/UpgradeClusterClientYamlTestSuiteIT.java b/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/UpgradeClusterClientYamlTestSuiteIT.java index 125566d385a..34c27454ec5 100644 --- a/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/UpgradeClusterClientYamlTestSuiteIT.java +++ b/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/UpgradeClusterClientYamlTestSuiteIT.java @@ -21,13 +21,10 @@ package org.elasticsearch.upgrades; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite; - import org.apache.lucene.util.TimeUnits; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import java.io.IOException; - @TimeoutSuite(millis = 5 * TimeUnits.MINUTE) // to account for slow as hell VMs public class UpgradeClusterClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { @@ -46,7 +43,7 @@ public class UpgradeClusterClientYamlTestSuiteIT extends ESClientYamlSuiteTestCa } @ParametersFactory - public static Iterable parameters() throws IOException { + public static Iterable parameters() throws Exception { return createParameters(); } } diff --git a/qa/smoke-test-ingest-disabled/src/test/java/org/elasticsearch/smoketest/SmokeTestIngestDisabledClientYamlTestSuiteIT.java b/qa/smoke-test-ingest-disabled/src/test/java/org/elasticsearch/smoketest/SmokeTestIngestDisabledClientYamlTestSuiteIT.java index 7f4c2c4a4d5..2cefd0f3335 100644 --- a/qa/smoke-test-ingest-disabled/src/test/java/org/elasticsearch/smoketest/SmokeTestIngestDisabledClientYamlTestSuiteIT.java +++ b/qa/smoke-test-ingest-disabled/src/test/java/org/elasticsearch/smoketest/SmokeTestIngestDisabledClientYamlTestSuiteIT.java @@ -25,8 +25,6 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import java.io.IOException; - public class SmokeTestIngestDisabledClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { public SmokeTestIngestDisabledClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { @@ -34,7 +32,7 @@ public class SmokeTestIngestDisabledClientYamlTestSuiteIT extends ESClientYamlSu } @ParametersFactory - public static Iterable parameters() throws IOException { + public static Iterable parameters() throws Exception { return ESClientYamlSuiteTestCase.createParameters(); } diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/test/java/org/elasticsearch/smoketest/SmokeTestIngestWithAllDepsClientYamlTestSuiteIT.java b/qa/smoke-test-ingest-with-all-dependencies/src/test/java/org/elasticsearch/smoketest/SmokeTestIngestWithAllDepsClientYamlTestSuiteIT.java index d1e1adabfd0..aca15564e90 100644 --- a/qa/smoke-test-ingest-with-all-dependencies/src/test/java/org/elasticsearch/smoketest/SmokeTestIngestWithAllDepsClientYamlTestSuiteIT.java +++ b/qa/smoke-test-ingest-with-all-dependencies/src/test/java/org/elasticsearch/smoketest/SmokeTestIngestWithAllDepsClientYamlTestSuiteIT.java @@ -25,8 +25,6 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import java.io.IOException; - public class SmokeTestIngestWithAllDepsClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { public SmokeTestIngestWithAllDepsClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { @@ -34,7 +32,7 @@ public class SmokeTestIngestWithAllDepsClientYamlTestSuiteIT extends ESClientYam } @ParametersFactory - public static Iterable parameters() throws IOException { + public static Iterable parameters() throws Exception { return ESClientYamlSuiteTestCase.createParameters(); } diff --git a/qa/smoke-test-multinode/src/test/java/org/elasticsearch/smoketest/SmokeTestMultiNodeClientYamlTestSuiteIT.java b/qa/smoke-test-multinode/src/test/java/org/elasticsearch/smoketest/SmokeTestMultiNodeClientYamlTestSuiteIT.java index 69abd0b3cc2..ed33c5b05f3 100644 --- a/qa/smoke-test-multinode/src/test/java/org/elasticsearch/smoketest/SmokeTestMultiNodeClientYamlTestSuiteIT.java +++ b/qa/smoke-test-multinode/src/test/java/org/elasticsearch/smoketest/SmokeTestMultiNodeClientYamlTestSuiteIT.java @@ -21,14 +21,12 @@ package org.elasticsearch.smoketest; import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; -import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite; +import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite; import org.apache.lucene.util.TimeUnits; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import java.io.IOException; - @TimeoutSuite(millis = 40 * TimeUnits.MINUTE) // some of the windows test VMs are slow as hell public class SmokeTestMultiNodeClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { @@ -37,7 +35,7 @@ public class SmokeTestMultiNodeClientYamlTestSuiteIT extends ESClientYamlSuiteTe } @ParametersFactory - public static Iterable parameters() throws IOException { + public static Iterable parameters() throws Exception { return ESClientYamlSuiteTestCase.createParameters(); } } diff --git a/qa/smoke-test-plugins/src/test/java/org/elasticsearch/smoketest/SmokeTestPluginsClientYamlTestSuiteIT.java b/qa/smoke-test-plugins/src/test/java/org/elasticsearch/smoketest/SmokeTestPluginsClientYamlTestSuiteIT.java index 2ae7be8fb1e..d1f9e6b7370 100644 --- a/qa/smoke-test-plugins/src/test/java/org/elasticsearch/smoketest/SmokeTestPluginsClientYamlTestSuiteIT.java +++ b/qa/smoke-test-plugins/src/test/java/org/elasticsearch/smoketest/SmokeTestPluginsClientYamlTestSuiteIT.java @@ -25,8 +25,6 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import java.io.IOException; - public class SmokeTestPluginsClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { public SmokeTestPluginsClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { @@ -34,7 +32,7 @@ public class SmokeTestPluginsClientYamlTestSuiteIT extends ESClientYamlSuiteTest } @ParametersFactory - public static Iterable parameters() throws IOException { + public static Iterable parameters() throws Exception { return ESClientYamlSuiteTestCase.createParameters(); } } diff --git a/qa/smoke-test-reindex-with-painless/src/test/java/org/elasticsearch/smoketest/SmokeTestReindexWithPainlessClientYamlTestSuiteIT.java b/qa/smoke-test-reindex-with-painless/src/test/java/org/elasticsearch/smoketest/SmokeTestReindexWithPainlessClientYamlTestSuiteIT.java index 5366eaf4bd1..db1e62a6b15 100644 --- a/qa/smoke-test-reindex-with-painless/src/test/java/org/elasticsearch/smoketest/SmokeTestReindexWithPainlessClientYamlTestSuiteIT.java +++ b/qa/smoke-test-reindex-with-painless/src/test/java/org/elasticsearch/smoketest/SmokeTestReindexWithPainlessClientYamlTestSuiteIT.java @@ -25,15 +25,13 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import java.io.IOException; - public class SmokeTestReindexWithPainlessClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { public SmokeTestReindexWithPainlessClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { super(testCandidate); } @ParametersFactory - public static Iterable parameters() throws IOException { + public static Iterable parameters() throws Exception { return ESClientYamlSuiteTestCase.createParameters(); } } diff --git a/qa/smoke-test-tribe-node/src/test/java/org/elasticsearch/tribe/TribeClientYamlTestSuiteIT.java b/qa/smoke-test-tribe-node/src/test/java/org/elasticsearch/tribe/TribeClientYamlTestSuiteIT.java index 211043ed4b0..1a8e7867dd8 100644 --- a/qa/smoke-test-tribe-node/src/test/java/org/elasticsearch/tribe/TribeClientYamlTestSuiteIT.java +++ b/qa/smoke-test-tribe-node/src/test/java/org/elasticsearch/tribe/TribeClientYamlTestSuiteIT.java @@ -25,8 +25,6 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; -import java.io.IOException; - public class TribeClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { // tribe nodes can not handle delete indices requests @@ -41,13 +39,13 @@ public class TribeClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { return true; } - public TribeClientYamlTestSuiteIT(@Name("yaml") final ClientYamlTestCandidate testCandidate) { + public TribeClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { super(testCandidate); } @ParametersFactory - public static Iterable parameters() throws IOException { + public static Iterable parameters() throws Exception { return createParameters(); } - } + diff --git a/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java b/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java index 3db17f4da0d..72fb5221ed7 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java +++ b/test/framework/src/main/java/org/elasticsearch/test/junit/listeners/ReproduceInfoPrinter.java @@ -37,7 +37,6 @@ import static com.carrotsearch.randomizedtesting.SysGlobals.SYSPROP_ITERATIONS; import static com.carrotsearch.randomizedtesting.SysGlobals.SYSPROP_PREFIX; import static com.carrotsearch.randomizedtesting.SysGlobals.SYSPROP_TESTMETHOD; import static org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase.REST_TESTS_BLACKLIST; -import static org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase.REST_TESTS_SPEC; import static org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase.REST_TESTS_SUITE; /** @@ -152,7 +151,7 @@ public class ReproduceInfoPrinter extends RunListener { } public ReproduceErrorMessageBuilder appendClientYamlSuiteProperties() { - return appendProperties(REST_TESTS_SUITE, REST_TESTS_SPEC, REST_TESTS_BLACKLIST); + return appendProperties(REST_TESTS_SUITE, REST_TESTS_BLACKLIST); } protected ReproduceErrorMessageBuilder appendProperties(String... properties) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java index 32436515fe8..17ecff32653 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java @@ -19,8 +19,18 @@ package org.elasticsearch.test.rest.yaml; -import com.carrotsearch.randomizedtesting.RandomizedTest; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; +import com.carrotsearch.randomizedtesting.RandomizedTest; import org.apache.http.HttpHost; import org.apache.lucene.util.IOUtils; import org.elasticsearch.Version; @@ -31,6 +41,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.FileSystemUtils; +import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestApi; @@ -42,22 +53,6 @@ import org.elasticsearch.test.rest.yaml.section.ExecutableSection; import org.junit.AfterClass; import org.junit.Before; -import java.io.IOException; -import java.io.InputStream; -import java.net.URI; -import java.net.URISyntaxException; -import java.net.URL; -import java.nio.file.FileSystem; -import java.nio.file.FileSystems; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.StandardCopyOption; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; -import java.util.Map; -import java.util.Set; - /** * Runs a suite of yaml tests shared with all the official Elasticsearch clients against against an elasticsearch cluster. */ @@ -77,15 +72,9 @@ public abstract class ESClientYamlSuiteTestCase extends ESRestTestCase { * Property that allows to control whether spec validation is enabled or not (default true). */ private static final String REST_TESTS_VALIDATE_SPEC = "tests.rest.validate_spec"; - /** - * Property that allows to control where the REST spec files need to be loaded from - */ - public static final String REST_TESTS_SPEC = "tests.rest.spec"; - private static final String REST_LOAD_PACKAGED_TESTS = "tests.rest.load_packaged"; - - private static final String DEFAULT_TESTS_PATH = "/rest-api-spec/test"; - private static final String DEFAULT_SPEC_PATH = "/rest-api-spec/api"; + private static final String TESTS_PATH = "/rest-api-spec/test"; + private static final String SPEC_PATH = "/rest-api-spec/api"; /** * This separator pattern matches ',' except it is preceded by a '\'. @@ -109,20 +98,11 @@ public abstract class ESClientYamlSuiteTestCase extends ESRestTestCase { } @Before - public void initAndResetContext() throws IOException { + public void initAndResetContext() throws Exception { if (restTestExecutionContext == null) { assert adminExecutionContext == null; assert blacklistPathMatchers == null; - String[] specPaths = resolvePathsProperty(REST_TESTS_SPEC, DEFAULT_SPEC_PATH); - ClientYamlSuiteRestSpec restSpec = null; - FileSystem fileSystem = getFileSystem(); - // don't make a try-with, getFileSystem returns null - // ... and you can't close() the default filesystem - try { - restSpec = ClientYamlSuiteRestSpec.parseFrom(fileSystem, DEFAULT_SPEC_PATH, specPaths); - } finally { - IOUtils.close(fileSystem); - } + ClientYamlSuiteRestSpec restSpec = ClientYamlSuiteRestSpec.load(SPEC_PATH); validateSpec(restSpec); List hosts = getClusterHosts(); RestClient restClient = client(); @@ -188,41 +168,58 @@ public abstract class ESClientYamlSuiteTestCase extends ESRestTestCase { super.afterIfFailed(errors); } - public static Iterable createParameters() throws IOException { - List restTestCandidates = collectTestCandidates(); - List objects = new ArrayList<>(); - for (ClientYamlTestCandidate restTestCandidate : restTestCandidates) { - objects.add(new Object[]{restTestCandidate}); - } - return objects; - } - - private static List collectTestCandidates() throws IOException { - List testCandidates = new ArrayList<>(); - FileSystem fileSystem = getFileSystem(); - // don't make a try-with, getFileSystem returns null - // ... and you can't close() the default filesystem - try { - String[] paths = resolvePathsProperty(REST_TESTS_SUITE, DEFAULT_TESTS_PATH); - Map> yamlSuites = FileUtils.findYamlSuites(fileSystem, DEFAULT_TESTS_PATH, paths); - //yaml suites are grouped by directory (effectively by api) - for (String api : yamlSuites.keySet()) { - List yamlFiles = new ArrayList<>(yamlSuites.get(api)); - for (Path yamlFile : yamlFiles) { - ClientYamlTestSuite restTestSuite = ClientYamlTestSuite.parse(api, yamlFile); - for (ClientYamlTestSection testSection : restTestSuite.getTestSections()) { - testCandidates.add(new ClientYamlTestCandidate(restTestSuite, testSection)); - } + public static Iterable createParameters() throws Exception { + String[] paths = resolvePathsProperty(REST_TESTS_SUITE, ""); // default to all tests under the test root + List tests = new ArrayList<>(); + Map> yamlSuites = loadYamlSuites(paths); + // yaml suites are grouped by directory (effectively by api) + for (String api : yamlSuites.keySet()) { + List yamlFiles = new ArrayList<>(yamlSuites.get(api)); + for (Path yamlFile : yamlFiles) { + ClientYamlTestSuite restTestSuite = ClientYamlTestSuite.parse(api, yamlFile); + for (ClientYamlTestSection testSection : restTestSuite.getTestSections()) { + tests.add(new Object[]{ new ClientYamlTestCandidate(restTestSuite, testSection) }); } } - } finally { - IOUtils.close(fileSystem); } //sort the candidates so they will always be in the same order before being shuffled, for repeatability - Collections.sort(testCandidates, (o1, o2) -> o1.getTestPath().compareTo(o2.getTestPath())); + Collections.sort(tests, + (o1, o2) -> ((ClientYamlTestCandidate)o1[0]).getTestPath().compareTo(((ClientYamlTestCandidate)o2[0]).getTestPath())); + return tests; + } - return testCandidates; + /** Find all yaml suites that match the given list of paths from the root test path. */ + // pkg private for tests + static Map> loadYamlSuites(String... paths) throws Exception { + Map> files = new HashMap<>(); + Path root = PathUtils.get(ESClientYamlSuiteTestCase.class.getResource(TESTS_PATH).toURI()); + for (String strPath : paths) { + Path path = root.resolve(strPath); + if (Files.isDirectory(path)) { + Files.walk(path).forEach(file -> { + if (file.toString().endsWith(".yaml")) { + addYamlSuite(root, file, files); + } + }); + } else { + path = root.resolve(strPath + ".yaml"); + assert Files.exists(path); + addYamlSuite(root, path, files); + } + } + return files; + } + + /** Add a single suite file to the set of suites. */ + private static void addYamlSuite(Path root, Path file, Map> files) { + String groupName = root.relativize(file.getParent()).toString(); + Set filesSet = files.get(groupName); + if (filesSet == null) { + filesSet = new HashSet<>(); + files.put(groupName, filesSet); + } + filesSet.add(file); } private static String[] resolvePathsProperty(String propertyName, String defaultValue) { @@ -234,34 +231,6 @@ public abstract class ESClientYamlSuiteTestCase extends ESRestTestCase { } } - /** - * Returns a new FileSystem to read REST resources, or null if they - * are available from classpath. - */ - @SuppressForbidden(reason = "proper use of URL, hack around a JDK bug") - protected static FileSystem getFileSystem() throws IOException { - // REST suite handling is currently complicated, with lots of filtering and so on - // For now, to work embedded in a jar, return a ZipFileSystem over the jar contents. - URL codeLocation = FileUtils.class.getProtectionDomain().getCodeSource().getLocation(); - boolean loadPackaged = RandomizedTest.systemPropertyAsBoolean(REST_LOAD_PACKAGED_TESTS, true); - if (codeLocation.getFile().endsWith(".jar") && loadPackaged) { - try { - // hack around a bug in the zipfilesystem implementation before java 9, - // its checkWritable was incorrect and it won't work without write permissions. - // if we add the permission, it will open jars r/w, which is too scary! so copy to a safe r-w location. - Path tmp = Files.createTempFile(null, ".jar"); - try (InputStream in = FileSystemUtils.openFileURLStream(codeLocation)) { - Files.copy(in, tmp, StandardCopyOption.REPLACE_EXISTING); - } - return FileSystems.newFileSystem(new URI("jar:" + tmp.toUri()), Collections.emptyMap()); - } catch (URISyntaxException e) { - throw new IOException("couldn't open zipfilesystem: ", e); - } - } else { - return null; - } - } - protected ClientYamlTestExecutionContext getAdminExecutionContext() { return adminExecutionContext; } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/FileUtils.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/FileUtils.java deleted file mode 100644 index 4519953819a..00000000000 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/FileUtils.java +++ /dev/null @@ -1,170 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.test.rest.yaml; - -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.io.PathUtils; - -import java.io.IOException; -import java.net.URL; -import java.nio.file.DirectoryStream; -import java.nio.file.FileSystem; -import java.nio.file.FileVisitResult; -import java.nio.file.Files; -import java.nio.file.NoSuchFileException; -import java.nio.file.NotDirectoryException; -import java.nio.file.Path; -import java.nio.file.SimpleFileVisitor; -import java.nio.file.attribute.BasicFileAttributes; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Set; - -public final class FileUtils { - - private static final String YAML_SUFFIX = ".yaml"; - private static final String JSON_SUFFIX = ".json"; - - private FileUtils() { - - } - - /** - * Returns the json files found within the directory provided as argument. - * Files are looked up in the classpath, or optionally from {@code fileSystem} if its not null. - */ - public static Set findJsonSpec(FileSystem fileSystem, String optionalPathPrefix, String path) throws IOException { - Path dir = resolveFile(fileSystem, optionalPathPrefix, path, null); - - if (!Files.isDirectory(dir)) { - throw new NotDirectoryException(path); - } - - Set jsonFiles = new HashSet<>(); - try (DirectoryStream stream = Files.newDirectoryStream(dir)) { - for (Path item : stream) { - if (item.toString().endsWith(JSON_SUFFIX)) { - jsonFiles.add(item); - } - } - } - - if (jsonFiles.isEmpty()) { - throw new NoSuchFileException(path, null, "no json files found"); - } - - return jsonFiles; - } - - /** - * Returns the yaml files found within the paths provided. - * Each input path can either be a single file (the .yaml suffix is optional) or a directory. - * Each path is looked up in the classpath, or optionally from {@code fileSystem} if its not null. - */ - public static Map> findYamlSuites(FileSystem fileSystem, String optionalPathPrefix, final String... paths) - throws IOException { - Map> yamlSuites = new HashMap<>(); - for (String path : paths) { - collectFiles(resolveFile(fileSystem, optionalPathPrefix, path, YAML_SUFFIX), YAML_SUFFIX, yamlSuites); - } - return yamlSuites; - } - - private static Path resolveFile(FileSystem fileSystem, String optionalPathPrefix, String path, String optionalFileSuffix) - throws IOException { - if (fileSystem != null) { - Path file = findFile(fileSystem, path, optionalFileSuffix); - if (!lenientExists(file)) { - // try with optional prefix: /rest-api-spec/test (or /rest-api-spec/api) is optional - String newPath = optionalPathPrefix + "/" + path; - file = findFile(fileSystem, newPath, optionalFileSuffix); - if (!lenientExists(file)) { - throw new NoSuchFileException("path prefix: " + optionalPathPrefix + ", path: " + path + ", file suffix: " - + optionalFileSuffix); - } - } - return file; - } else { - //try within classpath - URL resource = findResource(path, optionalFileSuffix); - if (resource == null) { - //try within classpath with optional prefix: /rest-api-spec/test (or /rest-api-spec/api) is optional - String newPath = optionalPathPrefix + "/" + path; - resource = findResource(newPath, optionalFileSuffix); - if (resource == null) { - throw new NoSuchFileException(path); - } - } - try { - return PathUtils.get(resource.toURI()); - } catch (Exception e) { - // some filesystems have REALLY useless exceptions here. - // ZipFileSystem I am looking at you. - throw new RuntimeException("couldn't retrieve URL: " + resource, e); - } - } - } - - private static URL findResource(String path, String optionalFileSuffix) { - URL resource = FileUtils.class.getResource(path); - if (resource == null) { - //if not found we append the file suffix to the path (as it is optional) - if (Strings.hasLength(optionalFileSuffix) && !path.endsWith(optionalFileSuffix)) { - resource = FileUtils.class.getResource(path + optionalFileSuffix); - } - } - return resource; - } - - // used because this test "guesses" from like 4 different places from the filesystem! - private static boolean lenientExists(Path file) { - boolean exists = false; - try { - exists = Files.exists(file); - } catch (SecurityException ok) {} - return exists; - } - - private static Path findFile(FileSystem fileSystem, String path, String optionalFileSuffix) { - Path file = fileSystem.getPath(path); - if (!lenientExists(file)) { - file = fileSystem.getPath(path + optionalFileSuffix); - } - return file; - } - - private static void collectFiles(final Path dir, final String fileSuffix, final Map> files) throws IOException { - Files.walkFileTree(dir, new SimpleFileVisitor() { - @Override - public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { - if (file.toString().endsWith(fileSuffix)) { - String groupName = dir.relativize(file.getParent()).toString(); - Set filesSet = files.get(groupName); - if (filesSet == null) { - filesSet = new HashSet<>(); - files.put(groupName, filesSet); - } - filesSet.add(file); - } - return FileVisitResult.CONTINUE; - } - }); - } -} diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/restspec/ClientYamlSuiteRestSpec.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/restspec/ClientYamlSuiteRestSpec.java index 15f2f7e3016..dd800f5c9dd 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/restspec/ClientYamlSuiteRestSpec.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/restspec/ClientYamlSuiteRestSpec.java @@ -18,14 +18,9 @@ */ package org.elasticsearch.test.rest.yaml.restspec; -import org.elasticsearch.common.xcontent.NamedXContentRegistry; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.json.JsonXContent; -import org.elasticsearch.test.rest.yaml.FileUtils; - import java.io.IOException; import java.io.InputStream; -import java.nio.file.FileSystem; +import java.io.UncheckedIOException; import java.nio.file.Files; import java.nio.file.Path; import java.util.Collection; @@ -33,6 +28,12 @@ import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; +import java.util.stream.Stream; + +import org.elasticsearch.common.io.PathUtils; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.json.JsonXContent; /** * Holds the specification used to turn {@code do} actions in the YAML suite into REST api calls. @@ -41,8 +42,7 @@ public class ClientYamlSuiteRestSpec { private final Set globalParameters = new HashSet<>(); private final Map restApiMap = new HashMap<>(); - private ClientYamlSuiteRestSpec() { - } + private ClientYamlSuiteRestSpec() {} private void addApi(ClientYamlSuiteRestApi restApi) { ClientYamlSuiteRestApi previous = restApiMap.putIfAbsent(restApi.getName(), restApi); @@ -78,51 +78,58 @@ public class ClientYamlSuiteRestSpec { /** * Parses the complete set of REST spec available under the provided directories */ - public static ClientYamlSuiteRestSpec parseFrom(FileSystem fileSystem, String optionalPathPrefix, String... paths) throws IOException { + public static ClientYamlSuiteRestSpec load(String classpathPrefix) throws Exception { + Path dir = PathUtils.get(ClientYamlSuiteRestSpec.class.getResource(classpathPrefix).toURI()); ClientYamlSuiteRestSpec restSpec = new ClientYamlSuiteRestSpec(); ClientYamlSuiteRestApiParser restApiParser = new ClientYamlSuiteRestApiParser(); - for (String path : paths) { - for (Path jsonFile : FileUtils.findJsonSpec(fileSystem, optionalPathPrefix, path)) { - try (InputStream stream = Files.newInputStream(jsonFile)) { - String filename = jsonFile.getFileName().toString(); - try (XContentParser parser = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, stream)) { - if (filename.equals("_common.json")) { - String currentFieldName = null; - while (parser.nextToken() != XContentParser.Token.END_OBJECT) { - if (parser.currentToken() == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else if (parser.currentToken() == XContentParser.Token.START_OBJECT - && "params".equals(currentFieldName)) { - while (parser.nextToken() == XContentParser.Token.FIELD_NAME) { - String param = parser.currentName(); - if (restSpec.globalParameters.contains(param)) { - throw new IllegalArgumentException("Found duplicate global param [" + param + "]"); - } - restSpec.globalParameters.add(param); - parser.nextToken(); - if (parser.currentToken() != XContentParser.Token.START_OBJECT) { - throw new IllegalArgumentException("Expected params field in rest api definition to " + - "contain an object"); - } - parser.skipChildren(); - } - } - } - } else { - ClientYamlSuiteRestApi restApi = restApiParser.parse(jsonFile.toString(), parser); - String expectedApiName = filename.substring(0, filename.lastIndexOf('.')); - if (restApi.getName().equals(expectedApiName) == false) { - throw new IllegalArgumentException("found api [" + restApi.getName() + "] in [" + jsonFile.toString() + - "]. " + "Each api is expected to have the same name as the file that defines it."); - } - restSpec.addApi(restApi); - } - } - } catch (Exception ex) { - throw new IOException("Can't parse rest spec file: [" + jsonFile + "]", ex); + try (Stream stream = Files.walk(dir)) { + stream.forEach(item -> { + if (item.toString().endsWith(".json")) { + parseSpecFile(restApiParser, item, restSpec); } - } + }); } return restSpec; } + + private static void parseSpecFile(ClientYamlSuiteRestApiParser restApiParser, Path jsonFile, ClientYamlSuiteRestSpec restSpec) { + try (InputStream stream = Files.newInputStream(jsonFile)) { + try (XContentParser parser = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, stream)) { + String filename = jsonFile.getFileName().toString(); + if (filename.equals("_common.json")) { + String currentFieldName = null; + while (parser.nextToken() != XContentParser.Token.END_OBJECT) { + if (parser.currentToken() == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if (parser.currentToken() == XContentParser.Token.START_OBJECT + && "params".equals(currentFieldName)) { + while (parser.nextToken() == XContentParser.Token.FIELD_NAME) { + String param = parser.currentName(); + if (restSpec.globalParameters.contains(param)) { + throw new IllegalArgumentException("Found duplicate global param [" + param + "]"); + } + restSpec.globalParameters.add(param); + parser.nextToken(); + if (parser.currentToken() != XContentParser.Token.START_OBJECT) { + throw new IllegalArgumentException("Expected params field in rest api definition to " + + "contain an object"); + } + parser.skipChildren(); + } + } + } + } else { + ClientYamlSuiteRestApi restApi = restApiParser.parse(jsonFile.toString(), parser); + String expectedApiName = filename.substring(0, filename.lastIndexOf('.')); + if (restApi.getName().equals(expectedApiName) == false) { + throw new IllegalArgumentException("found api [" + restApi.getName() + "] in [" + jsonFile.toString() + "]. " + + "Each api is expected to have the same name as the file that defines it."); + } + restSpec.addApi(restApi); + } + } + } catch (IOException ex) { + throw new UncheckedIOException("Can't parse rest spec file: [" + jsonFile + "]", ex); + } + } } diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/FileUtilsTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCaseTests.java similarity index 63% rename from test/framework/src/test/java/org/elasticsearch/test/rest/yaml/FileUtilsTests.java rename to test/framework/src/test/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCaseTests.java index 457152381ba..ee76ad351a6 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/FileUtilsTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCaseTests.java @@ -18,44 +18,43 @@ */ package org.elasticsearch.test.rest.yaml; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.rest.yaml.FileUtils; - import java.nio.file.Files; import java.nio.file.Path; import java.util.Map; import java.util.Set; +import org.elasticsearch.test.ESTestCase; + import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.notNullValue; import static org.hamcrest.Matchers.greaterThan; -public class FileUtilsTests extends ESTestCase { - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/20240") - public void testLoadSingleYamlSuite() throws Exception { - Map> yamlSuites = FileUtils.findYamlSuites(null, "/rest-api-spec/test", "/rest-api-spec/test/suite1/10_basic"); - assertSingleFile(yamlSuites, "suite1", "10_basic.yaml"); +public class ESClientYamlSuiteTestCaseTests extends ESTestCase { - //the path prefix is optional - yamlSuites = FileUtils.findYamlSuites(null, "/rest-api-spec/test", "suite1/10_basic.yaml"); + public void testLoadAllYamlSuites() throws Exception { + Map> yamlSuites = ESClientYamlSuiteTestCase.loadYamlSuites(""); + assertEquals(2, yamlSuites.size()); + } + + public void testLoadSingleYamlSuite() throws Exception { + Map> yamlSuites = ESClientYamlSuiteTestCase.loadYamlSuites("suite1/10_basic"); assertSingleFile(yamlSuites, "suite1", "10_basic.yaml"); //extension .yaml is optional - yamlSuites = FileUtils.findYamlSuites(null, "/rest-api-spec/test", "suite1/10_basic"); + yamlSuites = ESClientYamlSuiteTestCase.loadYamlSuites("suite1/10_basic"); assertSingleFile(yamlSuites, "suite1", "10_basic.yaml"); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/20240") public void testLoadMultipleYamlSuites() throws Exception { //single directory - Map> yamlSuites = FileUtils.findYamlSuites(null, "/rest-api-spec/test", "suite1"); + Map> yamlSuites = ESClientYamlSuiteTestCase.loadYamlSuites("suite1"); assertThat(yamlSuites, notNullValue()); assertThat(yamlSuites.size(), equalTo(1)); assertThat(yamlSuites.containsKey("suite1"), equalTo(true)); assertThat(yamlSuites.get("suite1").size(), greaterThan(1)); //multiple directories - yamlSuites = FileUtils.findYamlSuites(null, "/rest-api-spec/test", "suite1", "suite2"); + yamlSuites = ESClientYamlSuiteTestCase.loadYamlSuites("suite1", "suite2"); assertThat(yamlSuites, notNullValue()); assertThat(yamlSuites.size(), equalTo(2)); assertThat(yamlSuites.containsKey("suite1"), equalTo(true)); @@ -64,7 +63,7 @@ public class FileUtilsTests extends ESTestCase { assertEquals(2, yamlSuites.get("suite2").size()); //multiple paths, which can be both directories or yaml test suites (with optional file extension) - yamlSuites = FileUtils.findYamlSuites(null, "/rest-api-spec/test", "suite2/10_basic", "suite1"); + yamlSuites = ESClientYamlSuiteTestCase.loadYamlSuites("suite2/10_basic", "suite1"); assertThat(yamlSuites, notNullValue()); assertThat(yamlSuites.size(), equalTo(2)); assertThat(yamlSuites.containsKey("suite2"), equalTo(true)); @@ -77,21 +76,6 @@ public class FileUtilsTests extends ESTestCase { Path dir = createTempDir(); Path file = dir.resolve("test_loading.yaml"); Files.createFile(file); - - //load from directory outside of the classpath - yamlSuites = FileUtils.findYamlSuites(dir.getFileSystem(), "/rest-api-spec/test", dir.toAbsolutePath().toString()); - assertThat(yamlSuites, notNullValue()); - assertThat(yamlSuites.size(), equalTo(1)); - assertThat(yamlSuites.containsKey(dir.getFileName().toString()), equalTo(true)); - assertSingleFile(yamlSuites.get(dir.getFileName().toString()), dir.getFileName().toString(), file.getFileName().toString()); - - //load from external file (optional extension) - yamlSuites = FileUtils.findYamlSuites(dir.getFileSystem(), "/rest-api-spec/test", - dir.resolve("test_loading").toAbsolutePath().toString()); - assertThat(yamlSuites, notNullValue()); - assertThat(yamlSuites.size(), equalTo(1)); - assertThat(yamlSuites.containsKey(dir.getFileName().toString()), equalTo(true)); - assertSingleFile(yamlSuites.get(dir.getFileName().toString()), dir.getFileName().toString(), file.getFileName().toString()); } private static void assertSingleFile(Map> yamlSuites, String dirName, String fileName) { From 8f666a74f8aae6c86062dbce37dc0f5b1d4228f6 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 18 Apr 2017 17:26:40 -0700 Subject: [PATCH 09/19] Test: Fix plugin integ test to depend on bundling plugin (#24170) This was broken in the recent refactoring to add dependsOn directly to cluster configuration. --- .../elasticsearch/gradle/plugin/PluginBuildPlugin.groovy | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy index a5845566215..2e11fdc2681 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy @@ -63,13 +63,13 @@ public class PluginBuildPlugin extends BuildPlugin { project.ext.set("nebulaPublish.maven.jar", false) } - project.integTest.dependsOn(project.bundlePlugin) + project.integTestCluster.dependsOn(project.bundlePlugin) project.tasks.run.dependsOn(project.bundlePlugin) if (isModule) { - project.integTest.clusterConfig.module(project) + project.integTestCluster.module(project) project.tasks.run.clusterConfig.module(project) } else { - project.integTest.clusterConfig.plugin(project.path) + project.integTestCluster.plugin(project.path) project.tasks.run.clusterConfig.plugin(project.path) addZipPomGeneration(project) addNoticeGeneration(project) From db0a5e4263400cf612d3c1e61db6041c1bdf9291 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Tue, 18 Apr 2017 22:52:59 -0400 Subject: [PATCH 10/19] Painless: more testing for script_stack (#24168) `script_stack` is super useful when debugging Painless scripts because it skips all the "weird" stuff involved that obfuscates where the actual error is. It skips Painless's internals and call site bootstrapping. It works fine, but it didn't have many tests. This converts a test that we had for line numbers into a test for the `script_stack`. The line numbers test was an indirect test for `script_stack`. --- .../painless/CompilerSettings.java | 3 +- .../painless/ArrayLikeObjectTestCase.java | 4 +- .../painless/BasicExpressionTests.java | 4 +- .../painless/ImplementInterfacesTests.java | 22 ++-- .../elasticsearch/painless/LambdaTests.java | 4 +- .../elasticsearch/painless/RegexTests.java | 7 +- .../painless/ScriptTestCase.java | 38 ++++++ .../elasticsearch/painless/StringTests.java | 4 +- .../painless/WhenThingsGoWrongTests.java | 117 ++++++++++++------ 9 files changed, 142 insertions(+), 61 deletions(-) diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/CompilerSettings.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/CompilerSettings.java index 378cca7f58f..e723081e36c 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/CompilerSettings.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/CompilerSettings.java @@ -43,7 +43,8 @@ public final class CompilerSettings { public static final String PICKY = "picky"; /** - * For testing: do not use. + * Hack to set the initial "depth" for the {@link DefBootstrap.PIC} and {@link DefBootstrap.MIC}. Only used for testing: do not + * overwrite. */ public static final String INITIAL_CALL_SITE_DEPTH = "initialCallSiteDepth"; diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ArrayLikeObjectTestCase.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ArrayLikeObjectTestCase.java index 69b40f141e2..5fc41c8c630 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ArrayLikeObjectTestCase.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ArrayLikeObjectTestCase.java @@ -77,8 +77,8 @@ public abstract class ArrayLikeObjectTestCase extends ScriptTestCase { } private void expectOutOfBounds(int index, String script, Object val) { - IndexOutOfBoundsException e = expectScriptThrows(IndexOutOfBoundsException.class, - () -> exec(script, singletonMap("val", val), true)); + IndexOutOfBoundsException e = expectScriptThrows(IndexOutOfBoundsException.class, () -> + exec(script, singletonMap("val", val), true)); try { assertThat(e.getMessage(), outOfBoundsExceptionMessageMatcher(index, 5)); } catch (AssertionError ae) { diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicExpressionTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicExpressionTests.java index ef2ddad5452..97e1f01fdfc 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicExpressionTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BasicExpressionTests.java @@ -186,7 +186,7 @@ public class BasicExpressionTests extends ScriptTestCase { assertNull( exec("def a = null; return a?.toString()")); assertEquals("foo", exec("def a = 'foo'; return a?.toString()")); // Call with primitive result - assertMustBeNullable( "String a = null; return a?.length()"); + assertMustBeNullable( "String a = null; return a?.length()"); assertMustBeNullable( "String a = 'foo'; return a?.length()"); assertNull( exec("def a = null; return a?.length()")); assertEquals(3, exec("def a = 'foo'; return a?.length()")); @@ -265,7 +265,7 @@ public class BasicExpressionTests extends ScriptTestCase { } private void assertMustBeNullable(String script) { - Exception e = expectScriptThrows(IllegalArgumentException.class , () -> exec(script)); + Exception e = expectScriptThrows(IllegalArgumentException.class, false, () -> exec(script)); assertEquals("Result of null safe operator must be nullable", e.getMessage()); } } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ImplementInterfacesTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ImplementInterfacesTests.java index fe95e8c8c23..c3861add319 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ImplementInterfacesTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ImplementInterfacesTests.java @@ -325,7 +325,7 @@ public class ImplementInterfacesTests extends ScriptTestCase { Object execute(String foo); } public void testNoArgumentsConstant() { - Exception e = expectScriptThrows(IllegalArgumentException.class, () -> + Exception e = expectScriptThrows(IllegalArgumentException.class, false, () -> scriptEngine.compile(NoArgumentsConstant.class, null, "1", emptyMap())); assertThat(e.getMessage(), startsWith("Painless needs a constant [String[] ARGUMENTS] on all interfaces it implements with the " + "names of the method arguments but [" + NoArgumentsConstant.class.getName() + "] doesn't have one.")); @@ -336,7 +336,7 @@ public class ImplementInterfacesTests extends ScriptTestCase { Object execute(String foo); } public void testWrongArgumentsConstant() { - Exception e = expectScriptThrows(IllegalArgumentException.class, () -> + Exception e = expectScriptThrows(IllegalArgumentException.class, false, () -> scriptEngine.compile(WrongArgumentsConstant.class, null, "1", emptyMap())); assertThat(e.getMessage(), startsWith("Painless needs a constant [String[] ARGUMENTS] on all interfaces it implements with the " + "names of the method arguments but [" + WrongArgumentsConstant.class.getName() + "] doesn't have one.")); @@ -347,7 +347,7 @@ public class ImplementInterfacesTests extends ScriptTestCase { Object execute(String foo); } public void testWrongLengthOfArgumentConstant() { - Exception e = expectScriptThrows(IllegalArgumentException.class, () -> + Exception e = expectScriptThrows(IllegalArgumentException.class, false, () -> scriptEngine.compile(WrongLengthOfArgumentConstant.class, null, "1", emptyMap())); assertThat(e.getMessage(), startsWith("[" + WrongLengthOfArgumentConstant.class.getName() + "#ARGUMENTS] has length [2] but [" + WrongLengthOfArgumentConstant.class.getName() + "#execute] takes [1] argument.")); @@ -358,7 +358,7 @@ public class ImplementInterfacesTests extends ScriptTestCase { Object execute(UnknownArgType foo); } public void testUnknownArgType() { - Exception e = expectScriptThrows(IllegalArgumentException.class, () -> + Exception e = expectScriptThrows(IllegalArgumentException.class, false, () -> scriptEngine.compile(UnknownArgType.class, null, "1", emptyMap())); assertEquals("[foo] is of unknown type [" + UnknownArgType.class.getName() + ". Painless interfaces can only accept arguments " + "that are of whitelisted types.", e.getMessage()); @@ -369,7 +369,7 @@ public class ImplementInterfacesTests extends ScriptTestCase { UnknownReturnType execute(String foo); } public void testUnknownReturnType() { - Exception e = expectScriptThrows(IllegalArgumentException.class, () -> + Exception e = expectScriptThrows(IllegalArgumentException.class, false, () -> scriptEngine.compile(UnknownReturnType.class, null, "1", emptyMap())); assertEquals("Painless can only implement execute methods returning a whitelisted type but [" + UnknownReturnType.class.getName() + "#execute] returns [" + UnknownReturnType.class.getName() + "] which isn't whitelisted.", e.getMessage()); @@ -380,7 +380,7 @@ public class ImplementInterfacesTests extends ScriptTestCase { Object execute(UnknownArgTypeInArray[] foo); } public void testUnknownArgTypeInArray() { - Exception e = expectScriptThrows(IllegalArgumentException.class, () -> + Exception e = expectScriptThrows(IllegalArgumentException.class, false, () -> scriptEngine.compile(UnknownArgTypeInArray.class, null, "1", emptyMap())); assertEquals("[foo] is of unknown type [" + UnknownArgTypeInArray.class.getName() + ". Painless interfaces can only accept " + "arguments that are of whitelisted types.", e.getMessage()); @@ -391,7 +391,7 @@ public class ImplementInterfacesTests extends ScriptTestCase { Object execute(boolean foo); } public void testTwoExecuteMethods() { - Exception e = expectScriptThrows(IllegalArgumentException.class, () -> + Exception e = expectScriptThrows(IllegalArgumentException.class, false, () -> scriptEngine.compile(TwoExecuteMethods.class, null, "null", emptyMap())); assertEquals("Painless can only implement interfaces that have a single method named [execute] but [" + TwoExecuteMethods.class.getName() + "] has more than one.", e.getMessage()); @@ -401,7 +401,7 @@ public class ImplementInterfacesTests extends ScriptTestCase { Object something(); } public void testBadMethod() { - Exception e = expectScriptThrows(IllegalArgumentException.class, () -> + Exception e = expectScriptThrows(IllegalArgumentException.class, false, () -> scriptEngine.compile(BadMethod.class, null, "null", emptyMap())); assertEquals("Painless can only implement methods named [execute] and [uses$argName] but [" + BadMethod.class.getName() + "] contains a method named [something]", e.getMessage()); @@ -413,7 +413,7 @@ public class ImplementInterfacesTests extends ScriptTestCase { Object uses$foo(); } public void testBadUsesReturn() { - Exception e = expectScriptThrows(IllegalArgumentException.class, () -> + Exception e = expectScriptThrows(IllegalArgumentException.class, false, () -> scriptEngine.compile(BadUsesReturn.class, null, "null", emptyMap())); assertEquals("Painless can only implement uses$ methods that return boolean but [" + BadUsesReturn.class.getName() + "#uses$foo] returns [java.lang.Object].", e.getMessage()); @@ -425,7 +425,7 @@ public class ImplementInterfacesTests extends ScriptTestCase { boolean uses$bar(boolean foo); } public void testBadUsesParameter() { - Exception e = expectScriptThrows(IllegalArgumentException.class, () -> + Exception e = expectScriptThrows(IllegalArgumentException.class, false, () -> scriptEngine.compile(BadUsesParameter.class, null, "null", emptyMap())); assertEquals("Painless can only implement uses$ methods that do not take parameters but [" + BadUsesParameter.class.getName() + "#uses$bar] does.", e.getMessage()); @@ -437,7 +437,7 @@ public class ImplementInterfacesTests extends ScriptTestCase { boolean uses$baz(); } public void testBadUsesName() { - Exception e = expectScriptThrows(IllegalArgumentException.class, () -> + Exception e = expectScriptThrows(IllegalArgumentException.class, false, () -> scriptEngine.compile(BadUsesName.class, null, "null", emptyMap())); assertEquals("Painless can only implement uses$ methods that match a parameter name but [" + BadUsesName.class.getName() + "#uses$baz] doesn't match any of [foo, bar].", e.getMessage()); diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/LambdaTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/LambdaTests.java index bce70a080db..bcb92a527d9 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/LambdaTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/LambdaTests.java @@ -204,7 +204,7 @@ public class LambdaTests extends ScriptTestCase { public void testWrongArity() { assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9); - IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> { + IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, false, () -> { exec("Optional.empty().orElseGet(x -> x);"); }); assertTrue(expected.getMessage().contains("Incorrect number of parameters")); @@ -220,7 +220,7 @@ public class LambdaTests extends ScriptTestCase { public void testWrongArityNotEnough() { assumeFalse("JDK is JDK 9", Constants.JRE_IS_MINIMUM_JAVA9); - IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> { + IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, false, () -> { exec("List l = new ArrayList(); l.add(1); l.add(1); " + "return l.stream().mapToInt(() -> 5).sum();"); }); diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/RegexTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/RegexTests.java index 83a592b3f26..92ff9ef3c93 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/RegexTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/RegexTests.java @@ -26,10 +26,8 @@ import java.nio.CharBuffer; import java.util.Arrays; import java.util.HashSet; import java.util.regex.Pattern; -import java.util.regex.PatternSyntaxException; import static java.util.Collections.singletonMap; -import static org.hamcrest.Matchers.containsString; public class RegexTests extends ScriptTestCase { @Override @@ -264,8 +262,9 @@ public class RegexTests extends ScriptTestCase { assertEquals("Error compiling regex: Illegal Unicode escape sequence", e.getCause().getMessage()); // And make sure the location of the error points to the offset inside the pattern - assertEquals("/\\ujjjj/", e.getScriptStack().get(0)); - assertEquals(" ^---- HERE", e.getScriptStack().get(1)); + assertScriptStack(e, + "/\\ujjjj/", + " ^---- HERE"); } public void testRegexAgainstNumber() { diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java index 74c6c9a5628..1ab5aa14508 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java @@ -35,6 +35,8 @@ import org.junit.Before; import java.util.HashMap; import java.util.Map; +import static org.hamcrest.Matchers.hasSize; + /** * Base test case for scripting unit tests. *

@@ -114,10 +116,29 @@ public abstract class ScriptTestCase extends ESTestCase { /** Checks a specific exception class is thrown (boxed inside ScriptException) and returns it. */ public static T expectScriptThrows(Class expectedType, ThrowingRunnable runnable) { + return expectScriptThrows(expectedType, true, runnable); + } + + /** Checks a specific exception class is thrown (boxed inside ScriptException) and returns it. */ + public static T expectScriptThrows(Class expectedType, boolean shouldHaveScriptStack, + ThrowingRunnable runnable) { try { runnable.run(); } catch (Throwable e) { if (e instanceof ScriptException) { + boolean hasEmptyScriptStack = ((ScriptException) e).getScriptStack().isEmpty(); + if (shouldHaveScriptStack && hasEmptyScriptStack) { + if (0 != e.getCause().getStackTrace().length) { + // Without -XX:-OmitStackTraceInFastThrow the jvm can eat the stack trace which causes us to ignore script_stack + AssertionFailedError assertion = new AssertionFailedError("ScriptException should have a scriptStack"); + assertion.initCause(e); + throw assertion; + } + } else if (false == shouldHaveScriptStack && false == hasEmptyScriptStack) { + AssertionFailedError assertion = new AssertionFailedError("ScriptException shouldn't have a scriptStack"); + assertion.initCause(e); + throw assertion; + } e = e.getCause(); if (expectedType.isInstance(e)) { return expectedType.cast(e); @@ -134,4 +155,21 @@ public abstract class ScriptTestCase extends ESTestCase { } throw new AssertionFailedError("Expected exception " + expectedType.getSimpleName()); } + + /** + * Asserts that the script_stack looks right. + */ + public static void assertScriptStack(ScriptException e, String... stack) { + // This particular incantation of assertions makes the error messages more useful + try { + assertThat(e.getScriptStack(), hasSize(stack.length)); + for (int i = 0; i < stack.length; i++) { + assertEquals(stack[i], e.getScriptStack().get(i)); + } + } catch (AssertionError assertion) { + assertion.initCause(e); + throw assertion; + } + } + } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/StringTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/StringTests.java index da4558a693a..2888eca3db4 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/StringTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/StringTests.java @@ -165,12 +165,12 @@ public class StringTests extends ScriptTestCase { assertEquals('c', exec("String s = \"c\"; (char)s")); assertEquals('c', exec("String s = 'c'; (char)s")); - ClassCastException expected = expectScriptThrows(ClassCastException.class, () -> { + ClassCastException expected = expectScriptThrows(ClassCastException.class, false, () -> { assertEquals("cc", exec("return (String)(char)\"cc\"")); }); assertTrue(expected.getMessage().contains("Cannot cast [String] with length greater than one to [char].")); - expected = expectScriptThrows(ClassCastException.class, () -> { + expected = expectScriptThrows(ClassCastException.class, false, () -> { assertEquals("cc", exec("return (String)(char)'cc'")); }); assertTrue(expected.getMessage().contains("Cannot cast [String] with length greater than one to [char].")); diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/WhenThingsGoWrongTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/WhenThingsGoWrongTests.java index aaa337ae821..d60da7b795f 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/WhenThingsGoWrongTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/WhenThingsGoWrongTests.java @@ -19,7 +19,10 @@ package org.elasticsearch.painless; +import junit.framework.AssertionFailedError; + import org.apache.lucene.util.Constants; +import org.elasticsearch.script.ScriptException; import java.lang.invoke.WrongMethodTypeException; import java.util.Arrays; @@ -27,52 +30,93 @@ import java.util.Collections; import static java.util.Collections.emptyMap; import static java.util.Collections.singletonMap; +import static org.hamcrest.Matchers.instanceOf; public class WhenThingsGoWrongTests extends ScriptTestCase { public void testNullPointer() { expectScriptThrows(NullPointerException.class, () -> { exec("int x = params['missing']; return x;"); }); + expectScriptThrows(NullPointerException.class, () -> { + exec("Double.parseDouble(params['missing'])"); + }); } - /** test "line numbers" in the bytecode, which are really 1-based offsets */ - public void testLineNumbers() { - // trigger NPE at line 1 of the script - NullPointerException exception = expectScriptThrows(NullPointerException.class, () -> { - exec("String x = null; boolean y = x.isEmpty();\n" + - "return y;"); - }); - // null deref at x.isEmpty(), the '.' is offset 30 (+1) - assertEquals(30 + 1, exception.getStackTrace()[0].getLineNumber()); + /** + * Test that the scriptStack looks good. By implication this tests that we build proper "line numbers" in stack trace. These line + * numbers are really 1 based character numbers. + */ + public void testScriptStack() { + for (String type : new String[] {"String", "def "}) { + // trigger NPE at line 1 of the script + ScriptException exception = expectThrows(ScriptException.class, () -> { + exec(type + " x = null; boolean y = x.isEmpty();\n" + + "return y;"); + }); + // null deref at x.isEmpty(), the '.' is offset 30 + assertScriptElementColumn(30, exception); + assertScriptStack(exception, + "y = x.isEmpty();\n", + " ^---- HERE"); + assertThat(exception.getCause(), instanceOf(NullPointerException.class)); - // trigger NPE at line 2 of the script - exception = expectScriptThrows(NullPointerException.class, () -> { - exec("String x = null;\n" + - "return x.isEmpty();"); - }); - // null deref at x.isEmpty(), the '.' is offset 25 (+1) - assertEquals(25 + 1, exception.getStackTrace()[0].getLineNumber()); + // trigger NPE at line 2 of the script + exception = expectThrows(ScriptException.class, () -> { + exec(type + " x = null;\n" + + "return x.isEmpty();"); + }); + // null deref at x.isEmpty(), the '.' is offset 25 + assertScriptElementColumn(25, exception); + assertScriptStack(exception, + "return x.isEmpty();", + " ^---- HERE"); + assertThat(exception.getCause(), instanceOf(NullPointerException.class)); - // trigger NPE at line 3 of the script - exception = expectScriptThrows(NullPointerException.class, () -> { - exec("String x = null;\n" + - "String y = x;\n" + - "return y.isEmpty();"); - }); - // null deref at y.isEmpty(), the '.' is offset 39 (+1) - assertEquals(39 + 1, exception.getStackTrace()[0].getLineNumber()); + // trigger NPE at line 3 of the script + exception = expectThrows(ScriptException.class, () -> { + exec(type + " x = null;\n" + + type + " y = x;\n" + + "return y.isEmpty();"); + }); + // null deref at y.isEmpty(), the '.' is offset 39 + assertScriptElementColumn(39, exception); + assertScriptStack(exception, + "return y.isEmpty();", + " ^---- HERE"); + assertThat(exception.getCause(), instanceOf(NullPointerException.class)); - // trigger NPE at line 4 in script (inside conditional) - exception = expectScriptThrows(NullPointerException.class, () -> { - exec("String x = null;\n" + - "boolean y = false;\n" + - "if (!y) {\n" + - " y = x.isEmpty();\n" + - "}\n" + - "return y;"); - }); - // null deref at x.isEmpty(), the '.' is offset 53 (+1) - assertEquals(53 + 1, exception.getStackTrace()[0].getLineNumber()); + // trigger NPE at line 4 in script (inside conditional) + exception = expectThrows(ScriptException.class, () -> { + exec(type + " x = null;\n" + + "boolean y = false;\n" + + "if (!y) {\n" + + " y = x.isEmpty();\n" + + "}\n" + + "return y;"); + }); + // null deref at x.isEmpty(), the '.' is offset 53 + assertScriptElementColumn(53, exception); + assertScriptStack(exception, + "y = x.isEmpty();\n}\n", + " ^---- HERE"); + assertThat(exception.getCause(), instanceOf(NullPointerException.class)); + } + } + + private void assertScriptElementColumn(int expectedColumn, ScriptException exception) { + StackTraceElement[] stackTrace = exception.getCause().getStackTrace(); + for (int i = 0; i < stackTrace.length; i++) { + if (WriterConstants.CLASS_NAME.equals(stackTrace[i].getClassName())) { + if (expectedColumn + 1 != stackTrace[i].getLineNumber()) { + AssertionFailedError assertion = new AssertionFailedError("Expected column to be [" + expectedColumn + "] but was [" + + stackTrace[i].getLineNumber() + "]"); + assertion.initCause(exception); + throw assertion; + } + return; + } + } + fail("didn't find script stack element"); } public void testInvalidShift() { @@ -161,7 +205,7 @@ public class WhenThingsGoWrongTests extends ScriptTestCase { final char[] tooManyChars = new char[Compiler.MAXIMUM_SOURCE_LENGTH + 1]; Arrays.fill(tooManyChars, '0'); - IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, () -> { + IllegalArgumentException expected = expectScriptThrows(IllegalArgumentException.class, false, () -> { exec(new String(tooManyChars)); }); assertTrue(expected.getMessage().contains("Scripts may be no longer than")); @@ -282,5 +326,4 @@ public class WhenThingsGoWrongTests extends ScriptTestCase { e = expectScriptThrows(IllegalArgumentException.class, () -> exec("'cat", false)); assertEquals("unexpected character ['cat].", e.getMessage()); } - } From 3ff014d07ddef197644a5c596584d9f68e64846d Mon Sep 17 00:00:00 2001 From: Glen Smith Date: Tue, 18 Apr 2017 21:27:26 -0600 Subject: [PATCH 11/19] ingest-node.asciidoc - Clarify json processor (#21876) Add examples for the json processor. --- docs/reference/ingest/ingest-node.asciidoc | 61 +++++++++++++++++++++- 1 file changed, 59 insertions(+), 2 deletions(-) diff --git a/docs/reference/ingest/ingest-node.asciidoc b/docs/reference/ingest/ingest-node.asciidoc index 970863041a9..8009f67e8e9 100644 --- a/docs/reference/ingest/ingest-node.asciidoc +++ b/docs/reference/ingest/ingest-node.asciidoc @@ -1510,15 +1510,72 @@ Converts a JSON string into a structured JSON object. | `add_to_root` | no | false | Flag that forces the serialized json to be injected into the top level of the document. `target_field` must not be set when this option is chosen. |====== +Suppose you provide this configuration of the `json` processor: + [source,js] -------------------------------------------------- { - "json": { - "field": "{\"foo\": 2000}" + "json" : { + "field" : "string_source", + "target_field" : "json_target" } } -------------------------------------------------- +If the following document is processed: + +[source,js] +-------------------------------------------------- +{ + "string_source": "{\"foo\": 2000}" +} +-------------------------------------------------- + +after the `json` processor operates on it, it will look like: + +[source,js] +-------------------------------------------------- +{ + "string_source": "{\"foo\": 2000}", + "json_target": { + "foo": 2000 + } +} +-------------------------------------------------- + +If the following configuration is provided, omitting the optional `target_field` setting: +[source,js] +-------------------------------------------------- +{ + "json" : { + "field" : "source_and_target" + } +} +-------------------------------------------------- + +then after the `json` processor operates on this document: + +[source,js] +-------------------------------------------------- +{ + "source_and_target": "{\"foo\": 2000}" +} +-------------------------------------------------- + +it will look like: + +[source,js] +-------------------------------------------------- +{ + "source_and_target": { + "foo": 2000 + } +} +-------------------------------------------------- + +This illustrates that, unless it is explicitly named in the processor configuration, the `target_field` +is the same field provided in the required `field` configuration. + [[kv-processor]] === KV Processor This processor helps automatically parse messages (or specific event fields) which are of the foo=bar variety. From 23b224a5a96611022df80a15df296fc8339ae67b Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Tue, 18 Apr 2017 23:31:31 -0400 Subject: [PATCH 12/19] Fix translog prepare commit and commit test This test was terribly, horribly, no goodly, and badly broken it's amazing it ever passed so this commit fixes it. --- .../java/org/elasticsearch/index/translog/TranslogTests.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java b/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java index 1e2d81705df..949f3cc258b 100644 --- a/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java +++ b/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java @@ -2270,10 +2270,10 @@ public class TranslogTests extends ESTestCase { final int committedGeneration = randomIntBetween(Math.max(1, Math.toIntExact(last)), Math.toIntExact(generation)); translog.commit(committedGeneration); last = committedGeneration; - for (long g = 0; i < generation; g++) { + for (long g = 0; g < committedGeneration; g++) { assertFileDeleted(translog, g); } - for (long g = generation; g < translog.currentFileGeneration(); g++) { + for (long g = committedGeneration; g < translog.currentFileGeneration(); g++) { assertFileIsPresent(translog, g); } } From 180d1f221934b1cecbb0207e9a719c43c1efa571 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Tue, 18 Apr 2017 23:37:54 -0400 Subject: [PATCH 13/19] Stronger check in translog prepare and commit test This commit strengthens an assertion in the translog prepare commit and commit test. --- .../java/org/elasticsearch/index/translog/TranslogTests.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java b/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java index 949f3cc258b..8255b47e3bc 100644 --- a/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java +++ b/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java @@ -2273,7 +2273,7 @@ public class TranslogTests extends ESTestCase { for (long g = 0; g < committedGeneration; g++) { assertFileDeleted(translog, g); } - for (long g = committedGeneration; g < translog.currentFileGeneration(); g++) { + for (long g = committedGeneration; g <= translog.currentFileGeneration(); g++) { assertFileIsPresent(translog, g); } } From 20181dd0ad6e627f8a6b3dd174641814f196270b Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Tue, 18 Apr 2017 23:41:55 -0400 Subject: [PATCH 14/19] Strengthen translog commit with open view test This commit strengthens an assertion in the translog commit with open view test. --- .../java/org/elasticsearch/index/translog/TranslogTests.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java b/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java index 8255b47e3bc..4dd7b2e99b7 100644 --- a/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java +++ b/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java @@ -2301,7 +2301,7 @@ public class TranslogTests extends ESTestCase { } // the view generation could be -1 if no commit has been performed final long max = Math.max(1, Math.min(lastCommittedGeneration, viewGeneration)); - for (long g = max; g < translog.currentFileGeneration(); g++) { + for (long g = max; g <= translog.currentFileGeneration(); g++) { assertFileIsPresent(translog, g); } } From 9e0ebc5965ae37db552cb35e74d3b740d6ac9193 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Tue, 18 Apr 2017 23:43:25 -0400 Subject: [PATCH 15/19] Rename variable in translog simple commit test This commit renames a variable for clarity in the translog simple commit test. --- .../org/elasticsearch/index/translog/TranslogTests.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java b/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java index 4dd7b2e99b7..4f572f81f33 100644 --- a/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java +++ b/core/src/test/java/org/elasticsearch/index/translog/TranslogTests.java @@ -2246,11 +2246,11 @@ public class TranslogTests extends ESTestCase { final long generation = randomIntBetween(1, Math.toIntExact(translog.currentFileGeneration())); translog.commit(generation); - for (long i = 0; i < generation; i++) { - assertFileDeleted(translog, i); + for (long g = 0; g < generation; g++) { + assertFileDeleted(translog, g); } - for (long i = generation; i <= translog.currentFileGeneration(); i++) { - assertFileIsPresent(translog, i); + for (long g = generation; g <= translog.currentFileGeneration(); g++) { + assertFileIsPresent(translog, g); } } From 4f773e2dbb8464c66fe1cf711ba792c28d7be59f Mon Sep 17 00:00:00 2001 From: Areek Zillur Date: Wed, 19 Apr 2017 01:23:54 -0400 Subject: [PATCH 16/19] Replicate write failures (#23314) * Replicate write failures Currently, when a primary write operation fails after generating a sequence number, the failure is not communicated to the replicas. Ideally, every operation which generates a sequence number on primary should be recorded in all replicas. In this change, a sequence number is associated with write operation failure. When a failure with an assinged seqence number arrives at a replica, the failure cause and sequence number is recorded in the translog and the sequence number is marked as completed via executing `Engine.noOp` on the replica engine. * use zlong to serialize seq_no * Incorporate feedback * track write failures in translog as a noop in primary * Add tests for replicating write failures. Test that document failure (w/ seq no generated) are recorded as no-op in the translog for primary and replica shards * Update to master * update shouldExecuteOnReplica comment * rename indexshard noop to markSeqNoAsNoOp * remove redundant conditional * Consolidate possible replica action for bulk item request depanding on it's primary execution * remove bulk shard result abstraction * fix failure handling logic for bwc * add more tests * minor fix * cleanup * incorporate feedback * incorporate feedback * add assert to remove handling noop primary response when 5.0 nodes are not supported --- .../action/bulk/BulkItemRequest.java | 9 +- .../action/bulk/BulkItemResponse.java | 46 ++++- .../action/bulk/TransportShardBulkAction.java | 189 +++++++++++++----- .../replication/TransportWriteAction.java | 4 +- .../elasticsearch/index/engine/Engine.java | 30 +-- .../index/engine/InternalEngine.java | 30 ++- .../elasticsearch/index/shard/IndexShard.java | 33 ++- .../shard/TranslogRecoveryPerformer.java | 4 +- .../bulk/TransportShardBulkActionTests.java | 65 ++++-- .../index/engine/InternalEngineTests.java | 12 +- .../ESIndexLevelReplicationTestCase.java | 113 +++++------ .../IndexLevelReplicationTests.java | 145 +++++++++++++- .../RecoveryDuringReplicationTests.java | 6 +- .../elasticsearch/backwards/IndexingIT.java | 42 ++-- 14 files changed, 521 insertions(+), 207 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/action/bulk/BulkItemRequest.java b/core/src/main/java/org/elasticsearch/action/bulk/BulkItemRequest.java index 3023ecb1856..50da1476f49 100644 --- a/core/src/main/java/org/elasticsearch/action/bulk/BulkItemRequest.java +++ b/core/src/main/java/org/elasticsearch/action/bulk/BulkItemRequest.java @@ -38,7 +38,8 @@ public class BulkItemRequest implements Streamable { } - protected BulkItemRequest(int id, DocWriteRequest request) { + // NOTE: public for testing only + public BulkItemRequest(int id, DocWriteRequest request) { this.id = id; this.request = request; } @@ -56,13 +57,11 @@ public class BulkItemRequest implements Streamable { return request.indices()[0]; } - // NOTE: protected for testing only - protected BulkItemResponse getPrimaryResponse() { + BulkItemResponse getPrimaryResponse() { return primaryResponse; } - // NOTE: protected for testing only - protected void setPrimaryResponse(BulkItemResponse primaryResponse) { + void setPrimaryResponse(BulkItemResponse primaryResponse) { this.primaryResponse = primaryResponse; } diff --git a/core/src/main/java/org/elasticsearch/action/bulk/BulkItemResponse.java b/core/src/main/java/org/elasticsearch/action/bulk/BulkItemResponse.java index 2e2a7f15401..68cede5d251 100644 --- a/core/src/main/java/org/elasticsearch/action/bulk/BulkItemResponse.java +++ b/core/src/main/java/org/elasticsearch/action/bulk/BulkItemResponse.java @@ -37,6 +37,8 @@ import org.elasticsearch.common.xcontent.StatusToXContentObject; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.seqno.SequenceNumbers; +import org.elasticsearch.index.seqno.SequenceNumbersService; import org.elasticsearch.rest.RestStatus; import java.io.IOException; @@ -171,17 +173,34 @@ public class BulkItemResponse implements Streamable, StatusToXContentObject { private final String id; private final Exception cause; private final RestStatus status; + private final long seqNo; - Failure(String index, String type, String id, Exception cause, RestStatus status) { + /** + * For write failures before operation was assigned a sequence number. + * + * use @{link {@link #Failure(String, String, String, Exception, long)}} + * to record operation sequence no with failure + */ + public Failure(String index, String type, String id, Exception cause) { + this(index, type, id, cause, ExceptionsHelper.status(cause), SequenceNumbersService.UNASSIGNED_SEQ_NO); + } + + public Failure(String index, String type, String id, Exception cause, RestStatus status) { + this(index, type, id, cause, status, SequenceNumbersService.UNASSIGNED_SEQ_NO); + } + + /** For write failures after operation was assigned a sequence number. */ + public Failure(String index, String type, String id, Exception cause, long seqNo) { + this(index, type, id, cause, ExceptionsHelper.status(cause), seqNo); + } + + public Failure(String index, String type, String id, Exception cause, RestStatus status, long seqNo) { this.index = index; this.type = type; this.id = id; this.cause = cause; this.status = status; - } - - public Failure(String index, String type, String id, Exception cause) { - this(index, type, id, cause, ExceptionsHelper.status(cause)); + this.seqNo = seqNo; } /** @@ -193,6 +212,11 @@ public class BulkItemResponse implements Streamable, StatusToXContentObject { id = in.readOptionalString(); cause = in.readException(); status = ExceptionsHelper.status(cause); + if (in.getVersion().onOrAfter(Version.V_6_0_0_alpha1_UNRELEASED)) { + seqNo = in.readZLong(); + } else { + seqNo = SequenceNumbersService.UNASSIGNED_SEQ_NO; + } } @Override @@ -201,6 +225,9 @@ public class BulkItemResponse implements Streamable, StatusToXContentObject { out.writeString(getType()); out.writeOptionalString(getId()); out.writeException(getCause()); + if (out.getVersion().onOrAfter(Version.V_6_0_0_alpha1_UNRELEASED)) { + out.writeZLong(getSeqNo()); + } } @@ -246,6 +273,15 @@ public class BulkItemResponse implements Streamable, StatusToXContentObject { return cause; } + /** + * The operation sequence number generated by primary + * NOTE: {@link SequenceNumbersService#UNASSIGNED_SEQ_NO} + * indicates sequence number was not generated by primary + */ + public long getSeqNo() { + return seqNo; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.field(INDEX_FIELD, index); diff --git a/core/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java b/core/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java index 697f4c2f993..170f2d30536 100644 --- a/core/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java +++ b/core/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java @@ -23,6 +23,7 @@ import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.Version; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.delete.DeleteRequest; @@ -43,7 +44,6 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.Nullable; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.inject.Inject; @@ -65,13 +65,9 @@ import org.elasticsearch.indices.IndicesService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportRequestOptions; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.index.translog.Translog.Location; -import org.elasticsearch.action.bulk.BulkItemResultHolder; -import org.elasticsearch.action.bulk.BulkItemResponse; import java.io.IOException; import java.util.Map; -import java.util.Objects; import java.util.function.LongSupplier; /** Performs shard-level bulk (index, delete or update) operations */ @@ -113,12 +109,20 @@ public class TransportShardBulkAction extends TransportWriteAction shardOperationOnPrimary( BulkShardRequest request, IndexShard primary) throws Exception { + return performOnPrimary(request, primary, updateHelper, threadPool::absoluteTimeInMillis, new ConcreteMappingUpdatePerformer()); + } + + public static WritePrimaryResult performOnPrimary( + BulkShardRequest request, + IndexShard primary, + UpdateHelper updateHelper, + LongSupplier nowInMillisSupplier, + MappingUpdatePerformer mappingUpdater) throws Exception { final IndexMetaData metaData = primary.indexSettings().getIndexMetaData(); Translog.Location location = null; - final MappingUpdatePerformer mappingUpdater = new ConcreteMappingUpdatePerformer(); for (int requestIndex = 0; requestIndex < request.items().length; requestIndex++) { location = executeBulkItemRequest(metaData, primary, request, location, requestIndex, - updateHelper, threadPool::absoluteTimeInMillis, mappingUpdater); + updateHelper, nowInMillisSupplier, mappingUpdater); } BulkItemResponse[] responses = new BulkItemResponse[request.items().length]; BulkItemRequest[] items = request.items(); @@ -129,7 +133,6 @@ public class TransportShardBulkAction extends TransportWriteAction(request, response, location, null, primary, logger); } - private static BulkItemResultHolder executeIndexRequest(final IndexRequest indexRequest, final BulkItemRequest bulkItemRequest, final IndexShard primary, @@ -208,7 +211,8 @@ public class TransportShardBulkAction extends TransportWriteAction shardOperationOnReplica(BulkShardRequest request, IndexShard replica) throws Exception { + final Translog.Location location = performOnReplica(request, replica); + return new WriteReplicaResult<>(request, location, null, replica, logger); + } + + public static Translog.Location performOnReplica(BulkShardRequest request, IndexShard replica) throws Exception { Translog.Location location = null; for (int i = 0; i < request.items().length; i++) { BulkItemRequest item = request.items()[i]; - if (shouldExecuteReplicaItem(item, i)) { - DocWriteRequest docWriteRequest = item.request(); - DocWriteResponse primaryResponse = item.getPrimaryResponse().getResponse(); - final Engine.Result operationResult; - try { - switch (docWriteRequest.opType()) { - case CREATE: - case INDEX: - operationResult = executeIndexRequestOnReplica(primaryResponse, (IndexRequest) docWriteRequest, replica); - break; - case DELETE: - operationResult = executeDeleteRequestOnReplica(primaryResponse, (DeleteRequest) docWriteRequest, replica); - break; - default: - throw new IllegalStateException("Unexpected request operation type on replica: " - + docWriteRequest.opType().getLowercase()); - } - if (operationResult.hasFailure()) { - // check if any transient write operation failures should be bubbled up - Exception failure = operationResult.getFailure(); - assert failure instanceof VersionConflictEngineException - || failure instanceof MapperParsingException - : "expected any one of [version conflict, mapper parsing, engine closed, index shard closed]" + - " failures. got " + failure; - if (!TransportActions.isShardNotAvailableException(failure)) { - throw failure; + final Engine.Result operationResult; + DocWriteRequest docWriteRequest = item.request(); + try { + switch (replicaItemExecutionMode(item, i)) { + case NORMAL: + final DocWriteResponse primaryResponse = item.getPrimaryResponse().getResponse(); + switch (docWriteRequest.opType()) { + case CREATE: + case INDEX: + operationResult = executeIndexRequestOnReplica(primaryResponse, (IndexRequest) docWriteRequest, replica); + break; + case DELETE: + operationResult = executeDeleteRequestOnReplica(primaryResponse, (DeleteRequest) docWriteRequest, replica); + break; + default: + throw new IllegalStateException("Unexpected request operation type on replica: " + + docWriteRequest.opType().getLowercase()); } - } else { - location = locationToSync(location, operationResult.getTranslogLocation()); - } - } catch (Exception e) { - // if its not an ignore replica failure, we need to make sure to bubble up the failure - // so we will fail the shard - if (!TransportActions.isShardNotAvailableException(e)) { - throw e; - } + assert operationResult != null : "operation result must never be null when primary response has no failure"; + location = syncOperationResultOrThrow(operationResult, location); + break; + case NOOP: + break; + case FAILURE: + final BulkItemResponse.Failure failure = item.getPrimaryResponse().getFailure(); + assert failure.getSeqNo() != SequenceNumbersService.UNASSIGNED_SEQ_NO : "seq no must be assigned"; + operationResult = executeFailureNoOpOnReplica(failure, replica); + assert operationResult != null : "operation result must never be null when primary response has no failure"; + location = syncOperationResultOrThrow(operationResult, location); + break; + default: + throw new IllegalStateException("illegal replica item execution mode for: " + item.request()); + } + } catch (Exception e) { + // if its not an ignore replica failure, we need to make sure to bubble up the failure + // so we will fail the shard + if (!TransportActions.isShardNotAvailableException(e)) { + throw e; } } } - return new WriteReplicaResult<>(request, location, null, replica, logger); + return location; + } + + /** Syncs operation result to the translog or throws a shard not available failure */ + private static Translog.Location syncOperationResultOrThrow(final Engine.Result operationResult, + final Translog.Location currentLocation) throws Exception { + final Translog.Location location; + if (operationResult.hasFailure()) { + // check if any transient write operation failures should be bubbled up + Exception failure = operationResult.getFailure(); + assert failure instanceof MapperParsingException : "expected mapper parsing failures. got " + failure; + if (!TransportActions.isShardNotAvailableException(failure)) { + throw failure; + } else { + location = currentLocation; + } + } else { + location = locationToSync(currentLocation, operationResult.getTranslogLocation()); + } + return location; } private static Translog.Location locationToSync(Translog.Location current, @@ -429,7 +504,7 @@ public class TransportShardBulkAction extends TransportWriteAction, + public static class WritePrimaryResult, Response extends ReplicationResponse & WriteResponse> extends PrimaryResult implements RespondingWriteResult { boolean finishedAsyncActions; diff --git a/core/src/main/java/org/elasticsearch/index/engine/Engine.java b/core/src/main/java/org/elasticsearch/index/engine/Engine.java index 59655abf289..45b731cd9cf 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/Engine.java +++ b/core/src/main/java/org/elasticsearch/index/engine/Engine.java @@ -363,7 +363,6 @@ public abstract class Engine implements Closeable { void setTranslogLocation(Translog.Location translogLocation) { if (freeze.get() == null) { - assert failure == null : "failure has to be null to set translog location"; this.translogLocation = translogLocation; } else { throw new IllegalStateException("result is already frozen"); @@ -432,7 +431,7 @@ public abstract class Engine implements Closeable { } - static class NoOpResult extends Result { + public static class NoOpResult extends Result { NoOpResult(long seqNo) { super(Operation.TYPE.NO_OP, 0, seqNo); @@ -1154,24 +1153,31 @@ public abstract class Engine implements Closeable { return reason; } - public NoOp( - final Term uid, - final long seqNo, - final long primaryTerm, - final long version, - final VersionType versionType, - final Origin origin, - final long startTime, - final String reason) { - super(uid, seqNo, primaryTerm, version, versionType, origin, startTime); + public NoOp(final long seqNo, final long primaryTerm, final Origin origin, final long startTime, final String reason) { + super(null, seqNo, primaryTerm, Versions.NOT_FOUND, null, origin, startTime); this.reason = reason; } + @Override + public Term uid() { + throw new UnsupportedOperationException(); + } + @Override public String type() { throw new UnsupportedOperationException(); } + @Override + public long version() { + throw new UnsupportedOperationException(); + } + + @Override + public VersionType versionType() { + throw new UnsupportedOperationException(); + } + @Override String id() { throw new UnsupportedOperationException(); diff --git a/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index 0bed51e0e24..544b68add13 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -614,10 +614,16 @@ public class InternalEngine extends Engine { indexResult = new IndexResult(plan.versionForIndexing, plan.seqNoForIndexing, plan.currentNotFoundOrDeleted); } - if (indexResult.hasFailure() == false && - index.origin() != Operation.Origin.LOCAL_TRANSLOG_RECOVERY) { - Translog.Location location = - translog.add(new Translog.Index(index, indexResult)); + if (index.origin() != Operation.Origin.LOCAL_TRANSLOG_RECOVERY) { + final Translog.Location location; + if (indexResult.hasFailure() == false) { + location = translog.add(new Translog.Index(index, indexResult)); + } else if (indexResult.getSeqNo() != SequenceNumbersService.UNASSIGNED_SEQ_NO) { + // if we have document failure, record it as a no-op in the translog with the generated seq_no + location = translog.add(new Translog.NoOp(indexResult.getSeqNo(), index.primaryTerm(), indexResult.getFailure().getMessage())); + } else { + location = null; + } indexResult.setTranslogLocation(location); } if (indexResult.getSeqNo() != SequenceNumbersService.UNASSIGNED_SEQ_NO) { @@ -749,7 +755,7 @@ public class InternalEngine extends Engine { * we return a `MATCH_ANY` version to indicate no document was index. The value is * not used anyway */ - return new IndexResult(ex, Versions.MATCH_ANY, index.seqNo()); + return new IndexResult(ex, Versions.MATCH_ANY, plan.seqNoForIndexing); } else { throw ex; } @@ -900,10 +906,16 @@ public class InternalEngine extends Engine { deleteResult = new DeleteResult(plan.versionOfDeletion, plan.seqNoOfDeletion, plan.currentlyDeleted == false); } - if (!deleteResult.hasFailure() && - delete.origin() != Operation.Origin.LOCAL_TRANSLOG_RECOVERY) { - Translog.Location location = - translog.add(new Translog.Delete(delete, deleteResult)); + if (delete.origin() != Operation.Origin.LOCAL_TRANSLOG_RECOVERY) { + final Translog.Location location; + if (deleteResult.hasFailure() == false) { + location = translog.add(new Translog.Delete(delete, deleteResult)); + } else if (deleteResult.getSeqNo() != SequenceNumbersService.UNASSIGNED_SEQ_NO) { + location = translog.add(new Translog.NoOp(deleteResult.getSeqNo(), + delete.primaryTerm(), deleteResult.getFailure().getMessage())); + } else { + location = null; + } deleteResult.setTranslogLocation(location); } if (deleteResult.getSeqNo() != SequenceNumbersService.UNASSIGNED_SEQ_NO) { diff --git a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java index 1dee58ced00..589572fff3f 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -569,12 +569,21 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl return result; } + public Engine.NoOp prepareMarkingSeqNoAsNoOp(long seqNo, String reason) { + verifyReplicationTarget(); + long startTime = System.nanoTime(); + return new Engine.NoOp(seqNo, primaryTerm, Engine.Operation.Origin.REPLICA, startTime, reason); + } + + public Engine.NoOpResult markSeqNoAsNoOp(Engine.NoOp noOp) throws IOException { + ensureWriteAllowed(noOp); + Engine engine = getEngine(); + return engine.noOp(noOp); + } + public Engine.Delete prepareDeleteOnPrimary(String type, String id, long version, VersionType versionType) { verifyPrimary(); - final DocumentMapper documentMapper = docMapper(type).getDocumentMapper(); - final MappedFieldType uidFieldType = documentMapper.uidMapper().fieldType(); - final Query uidQuery = uidFieldType.termQuery(Uid.createUid(type, id), null); - final Term uid = MappedFieldType.extractTerm(uidQuery); + final Term uid = extractUid(type, id); return prepareDelete(type, id, uid, SequenceNumbersService.UNASSIGNED_SEQ_NO, primaryTerm, version, versionType, Engine.Operation.Origin.PRIMARY); } @@ -582,15 +591,12 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl public Engine.Delete prepareDeleteOnReplica(String type, String id, long seqNo, long primaryTerm, long version, VersionType versionType) { verifyReplicationTarget(); - final DocumentMapper documentMapper = docMapper(type).getDocumentMapper(); - final MappedFieldType uidFieldType = documentMapper.uidMapper().fieldType(); - final Query uidQuery = uidFieldType.termQuery(Uid.createUid(type, id), null); - final Term uid = MappedFieldType.extractTerm(uidQuery); + final Term uid = extractUid(type, id); return prepareDelete(type, id, uid, seqNo, primaryTerm, version, versionType, Engine.Operation.Origin.REPLICA); } - static Engine.Delete prepareDelete(String type, String id, Term uid, long seqNo, long primaryTerm, long version, - VersionType versionType, Engine.Operation.Origin origin) { + private static Engine.Delete prepareDelete(String type, String id, Term uid, long seqNo, long primaryTerm, long version, + VersionType versionType, Engine.Operation.Origin origin) { long startTime = System.nanoTime(); return new Engine.Delete(type, id, uid, seqNo, primaryTerm, version, versionType, origin, startTime); } @@ -601,6 +607,13 @@ public class IndexShard extends AbstractIndexShardComponent implements IndicesCl return delete(engine, delete); } + private Term extractUid(String type, String id) { + final DocumentMapper documentMapper = docMapper(type).getDocumentMapper(); + final MappedFieldType uidFieldType = documentMapper.uidMapper().fieldType(); + final Query uidQuery = uidFieldType.termQuery(Uid.createUid(type, id), null); + return MappedFieldType.extractTerm(uidQuery); + } + private Engine.DeleteResult delete(Engine engine, Engine.Delete delete) throws IOException { active.set(true); final Engine.DeleteResult result; diff --git a/core/src/main/java/org/elasticsearch/index/shard/TranslogRecoveryPerformer.java b/core/src/main/java/org/elasticsearch/index/shard/TranslogRecoveryPerformer.java index d5aadc1664e..8842cbf3c0b 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/TranslogRecoveryPerformer.java +++ b/core/src/main/java/org/elasticsearch/index/shard/TranslogRecoveryPerformer.java @@ -23,7 +23,6 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.index.VersionType; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.IgnoreOnRecoveryEngineException; import org.elasticsearch.index.mapper.DocumentMapperForType; @@ -31,7 +30,6 @@ import org.elasticsearch.index.mapper.MapperException; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.Mapping; import org.elasticsearch.index.mapper.Uid; -import org.elasticsearch.index.seqno.SequenceNumbersService; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.rest.RestStatus; @@ -182,7 +180,7 @@ public class TranslogRecoveryPerformer { final String reason = noOp.reason(); logger.trace("[translog] recover [no_op] op [({}, {})] of [{}]", seqNo, primaryTerm, reason); final Engine.NoOp engineNoOp = - new Engine.NoOp(null, seqNo, primaryTerm, 0, VersionType.INTERNAL, origin, System.nanoTime(), reason); + new Engine.NoOp(seqNo, primaryTerm, origin, System.nanoTime(), reason); noOp(engine, engineNoOp); break; default: diff --git a/core/src/test/java/org/elasticsearch/action/bulk/TransportShardBulkActionTests.java b/core/src/test/java/org/elasticsearch/action/bulk/TransportShardBulkActionTests.java index 355b3978cbf..4016c2cbdef 100644 --- a/core/src/test/java/org/elasticsearch/action/bulk/TransportShardBulkActionTests.java +++ b/core/src/test/java/org/elasticsearch/action/bulk/TransportShardBulkActionTests.java @@ -23,7 +23,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.DocWriteResponse; -import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequestBuilder; +import org.elasticsearch.action.bulk.TransportShardBulkAction.ReplicaItemExecutionMode; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.index.IndexRequest; @@ -34,14 +34,9 @@ import org.elasticsearch.action.update.UpdateHelper; import org.elasticsearch.action.update.UpdateRequest; import org.elasticsearch.action.update.UpdateResponse; import org.elasticsearch.client.Requests; -import org.elasticsearch.cluster.action.index.MappingUpdatedAction; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.lucene.uid.Versions; -import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.unit.TimeValue; -import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.index.Index; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.VersionConflictEngineException; @@ -52,15 +47,12 @@ import org.elasticsearch.index.shard.IndexShardTestCase; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.action.bulk.TransportShardBulkAction; -import org.elasticsearch.action.bulk.MappingUpdatePerformer; -import org.elasticsearch.action.bulk.BulkItemResultHolder; +import org.mockito.ArgumentCaptor; import java.io.IOException; -import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; +import static org.elasticsearch.action.bulk.TransportShardBulkAction.replicaItemExecutionMode; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.Matchers.containsString; @@ -96,26 +88,38 @@ public class TransportShardBulkActionTests extends IndexShardTestCase { DocWriteResponse response = new IndexResponse(shardId, "type", "id", 1, 1, randomBoolean()); BulkItemRequest request = new BulkItemRequest(0, writeRequest); request.setPrimaryResponse(new BulkItemResponse(0, DocWriteRequest.OpType.INDEX, response)); - assertTrue(TransportShardBulkAction.shouldExecuteReplicaItem(request, 0)); + assertThat(replicaItemExecutionMode(request, 0), + equalTo(ReplicaItemExecutionMode.NORMAL)); - // Failed index requests should not be replicated (for now!) + // Failed index requests without sequence no should not be replicated writeRequest = new IndexRequest("index", "type", "id") .source(Requests.INDEX_CONTENT_TYPE, "foo", "bar"); - response = new IndexResponse(shardId, "type", "id", 1, 1, randomBoolean()); request = new BulkItemRequest(0, writeRequest); request.setPrimaryResponse( new BulkItemResponse(0, DocWriteRequest.OpType.INDEX, new BulkItemResponse.Failure("index", "type", "id", new IllegalArgumentException("i died")))); - assertFalse(TransportShardBulkAction.shouldExecuteReplicaItem(request, 0)); + assertThat(replicaItemExecutionMode(request, 0), + equalTo(ReplicaItemExecutionMode.NOOP)); + // Failed index requests with sequence no should be replicated + request = new BulkItemRequest(0, writeRequest); + request.setPrimaryResponse( + new BulkItemResponse(0, DocWriteRequest.OpType.INDEX, + new BulkItemResponse.Failure("index", "type", "id", + new IllegalArgumentException( + "i died after sequence no was generated"), + 1))); + assertThat(replicaItemExecutionMode(request, 0), + equalTo(ReplicaItemExecutionMode.FAILURE)); // NOOP requests should not be replicated writeRequest = new UpdateRequest("index", "type", "id"); response = new UpdateResponse(shardId, "type", "id", 1, DocWriteResponse.Result.NOOP); request = new BulkItemRequest(0, writeRequest); request.setPrimaryResponse(new BulkItemResponse(0, DocWriteRequest.OpType.UPDATE, response)); - assertFalse(TransportShardBulkAction.shouldExecuteReplicaItem(request, 0)); + assertThat(replicaItemExecutionMode(request, 0), + equalTo(ReplicaItemExecutionMode.NOOP)); } @@ -515,6 +519,35 @@ public class TransportShardBulkActionTests extends IndexShardTestCase { } + public void testNoOpReplicationOnPrimaryDocumentFailure() throws Exception { + final IndexShard shard = spy(newStartedShard(false)); + BulkItemRequest itemRequest = new BulkItemRequest(0, + new IndexRequest("index", "type") + .source(Requests.INDEX_CONTENT_TYPE, "foo", "bar") + ); + final String failureMessage = "simulated primary failure"; + itemRequest.setPrimaryResponse(new BulkItemResponse(0, + randomFrom( + DocWriteRequest.OpType.CREATE, + DocWriteRequest.OpType.DELETE, + DocWriteRequest.OpType.INDEX + ), + new BulkItemResponse.Failure("index", "type", "1", + new IOException(failureMessage), 1L) + )); + BulkItemRequest[] itemRequests = new BulkItemRequest[1]; + itemRequests[0] = itemRequest; + BulkShardRequest bulkShardRequest = new BulkShardRequest( + shard.shardId(), RefreshPolicy.NONE, itemRequests); + TransportShardBulkAction.performOnReplica(bulkShardRequest, shard); + ArgumentCaptor noOp = ArgumentCaptor.forClass(Engine.NoOp.class); + verify(shard, times(1)).markSeqNoAsNoOp(noOp.capture()); + final Engine.NoOp noOpValue = noOp.getValue(); + assertThat(noOpValue.seqNo(), equalTo(1L)); + assertThat(noOpValue.reason(), containsString(failureMessage)); + closeShards(shard); + } + public void testMappingUpdateParsesCorrectNumberOfTimes() throws Exception { IndexMetaData metaData = indexMetaData(); logger.info("--> metadata.getIndex(): {}", metaData.getIndex()); diff --git a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index 71d754ddfb6..af53c4997fd 100644 --- a/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/core/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -2857,10 +2857,13 @@ public class InternalEngineTests extends ESTestCase { } Engine.IndexResult indexResult = engine.index(indexForDoc(doc1)); assertNotNull(indexResult.getFailure()); - + // document failures should be recorded in translog + assertNotNull(indexResult.getTranslogLocation()); throwingIndexWriter.get().clearFailure(); indexResult = engine.index(indexForDoc(doc1)); assertNull(indexResult.getFailure()); + // document failures should be recorded in translog + assertNotNull(indexResult.getTranslogLocation()); engine.index(indexForDoc(doc2)); // test failure while deleting @@ -3672,12 +3675,9 @@ public class InternalEngineTests extends ESTestCase { final String reason = randomAlphaOfLength(16); noOpEngine.noOp( new Engine.NoOp( - null, - maxSeqNo + 1, + maxSeqNo + 1, primaryTerm, - 0, - VersionType.INTERNAL, - randomFrom(PRIMARY, REPLICA, PEER_RECOVERY, LOCAL_TRANSLOG_RECOVERY), + randomFrom(PRIMARY, REPLICA, PEER_RECOVERY, LOCAL_TRANSLOG_RECOVERY), System.nanoTime(), reason)); assertThat(noOpEngine.seqNoService().getLocalCheckpoint(), equalTo((long) (maxSeqNo + 1))); diff --git a/core/src/test/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java b/core/src/test/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java index c35f72d2085..2243a5769b9 100644 --- a/core/src/test/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java @@ -22,21 +22,21 @@ package org.elasticsearch.index.replication; import org.apache.lucene.store.AlreadyClosedException; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.indices.flush.FlushRequest; import org.elasticsearch.action.bulk.BulkItemRequest; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkShardRequest; import org.elasticsearch.action.bulk.BulkShardResponse; +import org.elasticsearch.action.bulk.TransportShardBulkAction; import org.elasticsearch.action.bulk.TransportShardBulkActionTests; import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.replication.ReplicationOperation; import org.elasticsearch.action.support.replication.ReplicationRequest; import org.elasticsearch.action.support.replication.ReplicationResponse; import org.elasticsearch.action.support.replication.TransportReplicationAction.ReplicaResponse; +import org.elasticsearch.action.support.replication.TransportWriteAction; import org.elasticsearch.action.support.replication.TransportWriteActionTestHelper; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -50,7 +50,6 @@ import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.Index; -import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.EngineFactory; import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.seqno.GlobalCheckpointSyncAction; @@ -58,6 +57,7 @@ import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.IndexShardTestCase; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardPath; +import org.elasticsearch.index.translog.Translog; import org.elasticsearch.indices.recovery.RecoveryState; import org.elasticsearch.indices.recovery.RecoveryTarget; @@ -77,8 +77,6 @@ import java.util.function.Consumer; import java.util.stream.Collectors; import java.util.stream.StreamSupport; -import static org.elasticsearch.action.bulk.TransportShardBulkAction.executeIndexRequestOnPrimary; -import static org.elasticsearch.action.bulk.TransportShardBulkAction.executeIndexRequestOnReplica; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; @@ -147,9 +145,13 @@ public abstract class ESIndexLevelReplicationTestCase extends IndexShardTestCase public int indexDocs(final int numOfDoc) throws Exception { for (int doc = 0; doc < numOfDoc; doc++) { final IndexRequest indexRequest = new IndexRequest(index.getName(), "type", Integer.toString(docId.incrementAndGet())) - .source("{}", XContentType.JSON); - final IndexResponse response = index(indexRequest); - assertEquals(DocWriteResponse.Result.CREATED, response.getResult()); + .source("{}", XContentType.JSON); + final BulkItemResponse response = index(indexRequest); + if (response.isFailed()) { + throw response.getFailure().getCause(); + } else { + assertEquals(DocWriteResponse.Result.CREATED, response.getResponse().getResult()); + } } primary.updateGlobalCheckpointOnPrimary(); return numOfDoc; @@ -158,43 +160,29 @@ public abstract class ESIndexLevelReplicationTestCase extends IndexShardTestCase public int appendDocs(final int numOfDoc) throws Exception { for (int doc = 0; doc < numOfDoc; doc++) { final IndexRequest indexRequest = new IndexRequest(index.getName(), "type").source("{}", XContentType.JSON); - final IndexResponse response = index(indexRequest); - assertEquals(DocWriteResponse.Result.CREATED, response.getResult()); + final BulkItemResponse response = index(indexRequest); + if (response.isFailed()) { + throw response.getFailure().getCause(); + } else if (response.isFailed() == false) { + assertEquals(DocWriteResponse.Result.CREATED, response.getResponse().getResult()); + } } primary.updateGlobalCheckpointOnPrimary(); return numOfDoc; } - public IndexResponse index(IndexRequest indexRequest) throws Exception { - PlainActionFuture listener = new PlainActionFuture<>(); + public BulkItemResponse index(IndexRequest indexRequest) throws Exception { + PlainActionFuture listener = new PlainActionFuture<>(); final ActionListener wrapBulkListener = ActionListener.wrap( - bulkShardResponse -> listener.onResponse(bulkShardResponse.getResponses()[0].getResponse()), + bulkShardResponse -> listener.onResponse(bulkShardResponse.getResponses()[0]), listener::onFailure); BulkItemRequest[] items = new BulkItemRequest[1]; - items[0] = new TestBulkItemRequest(0, indexRequest); + items[0] = new BulkItemRequest(0, indexRequest); BulkShardRequest request = new BulkShardRequest(shardId, indexRequest.getRefreshPolicy(), items); new IndexingAction(request, wrapBulkListener, this).execute(); return listener.get(); } - /** BulkItemRequest exposing get/set primary response */ - public class TestBulkItemRequest extends BulkItemRequest { - - TestBulkItemRequest(int id, DocWriteRequest request) { - super(id, request); - } - - @Override - protected void setPrimaryResponse(BulkItemResponse primaryResponse) { - super.setPrimaryResponse(primaryResponse); - } - - @Override - protected BulkItemResponse getPrimaryResponse() { - return super.getPrimaryResponse(); - } - } - public synchronized void startAll() throws IOException { startReplicas(replicas.size()); } @@ -442,7 +430,7 @@ public abstract class ESIndexLevelReplicationTestCase extends IndexShardTestCase protected abstract PrimaryResult performOnPrimary(IndexShard primary, Request request) throws Exception; - protected abstract void performOnReplica(ReplicaRequest request, IndexShard replica) throws IOException; + protected abstract void performOnReplica(ReplicaRequest request, IndexShard replica) throws Exception; class PrimaryRef implements ReplicationOperation.Primary { @@ -539,46 +527,53 @@ public abstract class ESIndexLevelReplicationTestCase extends IndexShardTestCase @Override protected PrimaryResult performOnPrimary(IndexShard primary, BulkShardRequest request) throws Exception { - final IndexRequest indexRequest = (IndexRequest) request.items()[0].request(); - indexRequest.process(null, request.index()); - final IndexResponse indexResponse = indexOnPrimary(indexRequest, primary); - BulkItemResponse[] itemResponses = new BulkItemResponse[1]; - itemResponses[0] = new BulkItemResponse(0, indexRequest.opType(), indexResponse); - ((ReplicationGroup.TestBulkItemRequest) request.items()[0]).setPrimaryResponse(itemResponses[0]); - return new PrimaryResult(request, new BulkShardResponse(primary.shardId(), itemResponses)); + final TransportWriteAction.WritePrimaryResult result = executeShardBulkOnPrimary(primary, request); + return new PrimaryResult(result.replicaRequest(), result.finalResponseIfSuccessful); } @Override - protected void performOnReplica(BulkShardRequest request, IndexShard replica) throws IOException { - final ReplicationGroup.TestBulkItemRequest bulkItemRequest = ((ReplicationGroup.TestBulkItemRequest) request.items()[0]); - final DocWriteResponse primaryResponse = bulkItemRequest.getPrimaryResponse().getResponse(); - indexOnReplica(primaryResponse, ((IndexRequest) bulkItemRequest.request()), replica); + protected void performOnReplica(BulkShardRequest request, IndexShard replica) throws Exception { + executeShardBulkOnReplica(replica, request); } } + private TransportWriteAction.WritePrimaryResult executeShardBulkOnPrimary(IndexShard primary, BulkShardRequest request) throws Exception { + for (BulkItemRequest itemRequest : request.items()) { + if (itemRequest.request() instanceof IndexRequest) { + ((IndexRequest) itemRequest.request()).process(null, index.getName()); + } + } + final TransportWriteAction.WritePrimaryResult result = + TransportShardBulkAction.performOnPrimary(request, primary, null, + System::currentTimeMillis, new TransportShardBulkActionTests.NoopMappingUpdatePerformer()); + request.primaryTerm(primary.getPrimaryTerm()); + TransportWriteActionTestHelper.performPostWriteActions(primary, request, result.location, logger); + return result; + } + + private void executeShardBulkOnReplica(IndexShard replica, BulkShardRequest request) throws Exception { + final Translog.Location location = TransportShardBulkAction.performOnReplica(request, replica); + TransportWriteActionTestHelper.performPostWriteActions(replica, request, location, logger); + } + /** * indexes the given requests on the supplied primary, modifying it for replicas */ - protected IndexResponse indexOnPrimary(IndexRequest request, IndexShard primary) throws Exception { - final Engine.IndexResult indexResult = executeIndexRequestOnPrimary(request, primary, - new TransportShardBulkActionTests.NoopMappingUpdatePerformer()); - request.primaryTerm(primary.getPrimaryTerm()); - TransportWriteActionTestHelper.performPostWriteActions(primary, request, indexResult.getTranslogLocation(), logger); - return new IndexResponse( - primary.shardId(), - request.type(), - request.id(), - indexResult.getSeqNo(), - indexResult.getVersion(), - indexResult.isCreated()); + BulkShardRequest indexOnPrimary(IndexRequest request, IndexShard primary) throws Exception { + final BulkItemRequest bulkItemRequest = new BulkItemRequest(0, request); + BulkItemRequest[] bulkItemRequests = new BulkItemRequest[1]; + bulkItemRequests[0] = bulkItemRequest; + final BulkShardRequest bulkShardRequest = new BulkShardRequest(shardId, request.getRefreshPolicy(), bulkItemRequests); + final TransportWriteAction.WritePrimaryResult result = + executeShardBulkOnPrimary(primary, bulkShardRequest); + return result.replicaRequest(); } /** * indexes the given requests on the supplied replica shard */ - protected void indexOnReplica(DocWriteResponse response, IndexRequest request, IndexShard replica) throws IOException { - final Engine.IndexResult result = executeIndexRequestOnReplica(response, request, replica); - TransportWriteActionTestHelper.performPostWriteActions(replica, request, result.getTranslogLocation(), logger); + void indexOnReplica(BulkShardRequest request, IndexShard replica) throws Exception { + executeShardBulkOnReplica(replica, request); } class GlobalCheckpointSync extends ReplicationAction future = shards.asyncRecoverReplica(replica, (indexShard, node) - -> new RecoveryTarget(indexShard, node, recoveryListener, version -> {}) { + -> new RecoveryTarget(indexShard, node, recoveryListener, version -> { + }) { @Override public void cleanFiles(int totalTranslogOps, Store.MetadataSnapshot sourceMetaData) throws IOException { super.cleanFiles(totalTranslogOps, sourceMetaData); @@ -113,8 +122,8 @@ public class IndexLevelReplicationTests extends ESIndexLevelReplicationTestCase shards.startAll(); final IndexRequest indexRequest = new IndexRequest(index.getName(), "type").source("{}", XContentType.JSON); indexRequest.onRetry(); // force an update of the timestamp - final IndexResponse response = shards.index(indexRequest); - assertEquals(DocWriteResponse.Result.CREATED, response.getResult()); + final BulkItemResponse response = shards.index(indexRequest); + assertEquals(DocWriteResponse.Result.CREATED, response.getResponse().getResult()); if (randomBoolean()) { // lets check if that also happens if no translog record is replicated shards.flush(); } @@ -147,7 +156,7 @@ public class IndexLevelReplicationTests extends ESIndexLevelReplicationTestCase final SeqNoStats shardStats = shard.seqNoStats(); final ShardRouting shardRouting = shard.routingEntry(); logger.debug("seq_no stats for {}: {}", shardRouting, XContentHelper.toString(shardStats, - new ToXContent.MapParams(Collections.singletonMap("pretty", "false")))); + new ToXContent.MapParams(Collections.singletonMap("pretty", "false")))); assertThat(shardRouting + " local checkpoint mismatch", shardStats.getLocalCheckpoint(), equalTo(numDocs - 1L)); assertThat(shardRouting + " global checkpoint mismatch", shardStats.getGlobalCheckpoint(), equalTo(numDocs - 1L)); @@ -158,7 +167,7 @@ public class IndexLevelReplicationTests extends ESIndexLevelReplicationTestCase public void testConflictingOpsOnReplica() throws Exception { Map mappings = - Collections.singletonMap("type", "{ \"type\": { \"properties\": { \"f\": { \"type\": \"keyword\"} }}}"); + Collections.singletonMap("type", "{ \"type\": { \"properties\": { \"f\": { \"type\": \"keyword\"} }}}"); try (ReplicationGroup shards = new ReplicationGroup(buildIndexMetaData(2, mappings))) { shards.startAll(); IndexShard replica1 = shards.getReplicas().get(0); @@ -180,4 +189,128 @@ public class IndexLevelReplicationTests extends ESIndexLevelReplicationTestCase } } } + + /** + * test document failures (failures after seq_no generation) are added as noop operation to the translog + * for primary and replica shards + */ + public void testDocumentFailureReplication() throws Exception { + final String failureMessage = "simulated document failure"; + final ThrowingDocumentFailureEngineFactory throwingDocumentFailureEngineFactory = + new ThrowingDocumentFailureEngineFactory(failureMessage); + try (ReplicationGroup shards = new ReplicationGroup(buildIndexMetaData(0)) { + @Override + protected EngineFactory getEngineFactory(ShardRouting routing) { + return throwingDocumentFailureEngineFactory; + }}) { + + // test only primary + shards.startPrimary(); + BulkItemResponse response = shards.index( + new IndexRequest(index.getName(), "testDocumentFailureReplication", "1") + .source("{}", XContentType.JSON) + ); + assertTrue(response.isFailed()); + assertNoOpTranslogOperationForDocumentFailure(shards, 1, failureMessage); + shards.assertAllEqual(0); + + // add some replicas + int nReplica = randomIntBetween(1, 3); + for (int i = 0; i < nReplica; i++) { + shards.addReplica(); + } + shards.startReplicas(nReplica); + response = shards.index( + new IndexRequest(index.getName(), "testDocumentFailureReplication", "1") + .source("{}", XContentType.JSON) + ); + assertTrue(response.isFailed()); + assertNoOpTranslogOperationForDocumentFailure(shards, 2, failureMessage); + shards.assertAllEqual(0); + } + } + + /** + * test request failures (failures before seq_no generation) are not added as a noop to translog + */ + public void testRequestFailureReplication() throws Exception { + try (ReplicationGroup shards = createGroup(0)) { + shards.startAll(); + BulkItemResponse response = shards.index( + new IndexRequest(index.getName(), "testRequestFailureException", "1") + .source("{}", XContentType.JSON) + .version(2) + ); + assertTrue(response.isFailed()); + assertThat(response.getFailure().getCause(), instanceOf(VersionConflictEngineException.class)); + shards.assertAllEqual(0); + for (IndexShard indexShard : shards) { + try(Translog.View view = indexShard.acquireTranslogView()) { + assertThat(view.totalOperations(), equalTo(0)); + } + } + + // add some replicas + int nReplica = randomIntBetween(1, 3); + for (int i = 0; i < nReplica; i++) { + shards.addReplica(); + } + shards.startReplicas(nReplica); + response = shards.index( + new IndexRequest(index.getName(), "testRequestFailureException", "1") + .source("{}", XContentType.JSON) + .version(2) + ); + assertTrue(response.isFailed()); + assertThat(response.getFailure().getCause(), instanceOf(VersionConflictEngineException.class)); + shards.assertAllEqual(0); + for (IndexShard indexShard : shards) { + try(Translog.View view = indexShard.acquireTranslogView()) { + assertThat(view.totalOperations(), equalTo(0)); + } + } + } + } + + /** Throws documentFailure on every indexing operation */ + static class ThrowingDocumentFailureEngineFactory implements EngineFactory { + final String documentFailureMessage; + + ThrowingDocumentFailureEngineFactory(String documentFailureMessage) { + this.documentFailureMessage = documentFailureMessage; + } + + @Override + public Engine newReadWriteEngine(EngineConfig config) { + return InternalEngineTests.createInternalEngine((directory, writerConfig) -> + new IndexWriter(directory, writerConfig) { + @Override + public long addDocument(Iterable doc) throws IOException { + assert documentFailureMessage != null; + throw new IOException(documentFailureMessage); + } + }, null, config); + } + } + + private static void assertNoOpTranslogOperationForDocumentFailure( + Iterable replicationGroup, + int expectedOperation, + String failureMessage) throws IOException { + for (IndexShard indexShard : replicationGroup) { + try(Translog.View view = indexShard.acquireTranslogView()) { + assertThat(view.totalOperations(), equalTo(expectedOperation)); + final Translog.Snapshot snapshot = view.snapshot(); + long expectedSeqNo = 0L; + Translog.Operation op = snapshot.next(); + do { + assertThat(op.opType(), equalTo(Translog.Operation.Type.NO_OP)); + assertThat(op.seqNo(), equalTo(expectedSeqNo)); + assertThat(((Translog.NoOp) op).reason(), containsString(failureMessage)); + op = snapshot.next(); + expectedSeqNo++; + } while (op != null); + } + } + } } diff --git a/core/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java b/core/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java index 12f749e6819..139c7f500d8 100644 --- a/core/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java +++ b/core/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java @@ -24,9 +24,9 @@ import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexableField; import org.apache.lucene.util.IOUtils; import org.elasticsearch.action.admin.indices.flush.FlushRequest; +import org.elasticsearch.action.bulk.BulkShardRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.index.engine.Engine; @@ -168,8 +168,8 @@ public class RecoveryDuringReplicationTests extends ESIndexLevelReplicationTestC for (int i = 0; i < rollbackDocs; i++) { final IndexRequest indexRequest = new IndexRequest(index.getName(), "type", "rollback_" + i) .source("{}", XContentType.JSON); - final IndexResponse primaryResponse = indexOnPrimary(indexRequest, oldPrimary); - indexOnReplica(primaryResponse, indexRequest, replica); + final BulkShardRequest bulkShardRequest = indexOnPrimary(indexRequest, oldPrimary); + indexOnReplica(bulkShardRequest, replica); } if (randomBoolean()) { oldPrimary.flush(new FlushRequest(index.getName())); diff --git a/qa/backwards-5.0/src/test/java/org/elasticsearch/backwards/IndexingIT.java b/qa/backwards-5.0/src/test/java/org/elasticsearch/backwards/IndexingIT.java index f0be7753067..6ef40a77782 100644 --- a/qa/backwards-5.0/src/test/java/org/elasticsearch/backwards/IndexingIT.java +++ b/qa/backwards-5.0/src/test/java/org/elasticsearch/backwards/IndexingIT.java @@ -41,6 +41,7 @@ import java.util.List; import java.util.Map; import java.util.stream.Collectors; +import static com.carrotsearch.randomizedtesting.RandomizedTest.randomAsciiOfLength; import static java.util.Collections.emptyMap; import static java.util.Collections.singletonMap; import static org.hamcrest.Matchers.anyOf; @@ -76,7 +77,7 @@ public class IndexingIT extends ESRestTestCase { for (int i = 0; i < numDocs; i++) { final int id = idStart + i; assertOK(client().performRequest("PUT", index + "/test/" + id, emptyMap(), - new StringEntity("{\"test\": \"test_" + id + "\"}", ContentType.APPLICATION_JSON))); + new StringEntity("{\"test\": \"test_" + randomAsciiOfLength(2) + "\"}", ContentType.APPLICATION_JSON))); } return numDocs; } @@ -116,7 +117,7 @@ public class IndexingIT extends ESRestTestCase { .put(IndexMetaData.INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), 1) .put(IndexMetaData.INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), 2) .put("index.routing.allocation.include._name", bwcNames); - final String index = "test"; + final String index = "indexversionprop"; final int minUpdates = 5; final int maxUpdates = 10; createIndex(index, settings.build()); @@ -130,7 +131,9 @@ public class IndexingIT extends ESRestTestCase { updateIndexSetting(index, Settings.builder().putNull("index.routing.allocation.include._name")); ensureGreen(); assertOK(client().performRequest("POST", index + "/_refresh")); - List shards = buildShards(nodes, newNodeClient); + List shards = buildShards(index, nodes, newNodeClient); + Shard primary = buildShards(index, nodes, newNodeClient).stream().filter(Shard::isPrimary).findFirst().get(); + logger.info("primary resolved to: " + primary.getNode().getNodeName()); for (Shard shard : shards) { assertVersion(index, 1, "_only_nodes:" + shard.getNode().getNodeName(), finalVersionForDoc1); assertCount(index, "_only_nodes:" + shard.getNode().getNodeName(), 1); @@ -140,13 +143,15 @@ public class IndexingIT extends ESRestTestCase { logger.info("indexing docs with [{}] concurrent updates after allowing shards on all nodes", nUpdates); final int finalVersionForDoc2 = indexDocWithConcurrentUpdates(index, 2, nUpdates); assertOK(client().performRequest("POST", index + "/_refresh")); - shards = buildShards(nodes, newNodeClient); + shards = buildShards(index, nodes, newNodeClient); + primary = shards.stream().filter(Shard::isPrimary).findFirst().get(); + logger.info("primary resolved to: " + primary.getNode().getNodeName()); for (Shard shard : shards) { assertVersion(index, 2, "_only_nodes:" + shard.getNode().getNodeName(), finalVersionForDoc2); assertCount(index, "_only_nodes:" + shard.getNode().getNodeName(), 2); } - Shard primary = buildShards(nodes, newNodeClient).stream().filter(Shard::isPrimary).findFirst().get(); + primary = shards.stream().filter(Shard::isPrimary).findFirst().get(); logger.info("moving primary to new node by excluding {}", primary.getNode().getNodeName()); updateIndexSetting(index, Settings.builder().put("index.routing.allocation.exclude._name", primary.getNode().getNodeName())); ensureGreen(); @@ -154,7 +159,7 @@ public class IndexingIT extends ESRestTestCase { logger.info("indexing docs with [{}] concurrent updates after moving primary", nUpdates); final int finalVersionForDoc3 = indexDocWithConcurrentUpdates(index, 3, nUpdates); assertOK(client().performRequest("POST", index + "/_refresh")); - shards = buildShards(nodes, newNodeClient); + shards = buildShards(index, nodes, newNodeClient); for (Shard shard : shards) { assertVersion(index, 3, "_only_nodes:" + shard.getNode().getNodeName(), finalVersionForDoc3); assertCount(index, "_only_nodes:" + shard.getNode().getNodeName(), 3); @@ -167,7 +172,7 @@ public class IndexingIT extends ESRestTestCase { logger.info("indexing doc with [{}] concurrent updates after setting number of replicas to 0", nUpdates); final int finalVersionForDoc4 = indexDocWithConcurrentUpdates(index, 4, nUpdates); assertOK(client().performRequest("POST", index + "/_refresh")); - shards = buildShards(nodes, newNodeClient); + shards = buildShards(index, nodes, newNodeClient); for (Shard shard : shards) { assertVersion(index, 4, "_only_nodes:" + shard.getNode().getNodeName(), finalVersionForDoc4); assertCount(index, "_only_nodes:" + shard.getNode().getNodeName(), 4); @@ -180,7 +185,7 @@ public class IndexingIT extends ESRestTestCase { logger.info("indexing doc with [{}] concurrent updates after setting number of replicas to 1", nUpdates); final int finalVersionForDoc5 = indexDocWithConcurrentUpdates(index, 5, nUpdates); assertOK(client().performRequest("POST", index + "/_refresh")); - shards = buildShards(nodes, newNodeClient); + shards = buildShards(index, nodes, newNodeClient); for (Shard shard : shards) { assertVersion(index, 5, "_only_nodes:" + shard.getNode().getNodeName(), finalVersionForDoc5); assertCount(index, "_only_nodes:" + shard.getNode().getNodeName(), 5); @@ -216,7 +221,7 @@ public class IndexingIT extends ESRestTestCase { final int numberOfInitialDocs = 1 + randomInt(5); logger.info("indexing [{}] docs initially", numberOfInitialDocs); numDocs += indexDocs(index, 0, numberOfInitialDocs); - assertSeqNoOnShards(nodes, checkGlobalCheckpoints, 0, newNodeClient); + assertSeqNoOnShards(index, nodes, checkGlobalCheckpoints, 0, newNodeClient); logger.info("allowing shards on all nodes"); updateIndexSetting(index, Settings.builder().putNull("index.routing.allocation.include._name")); ensureGreen(); @@ -227,8 +232,8 @@ public class IndexingIT extends ESRestTestCase { final int numberOfDocsAfterAllowingShardsOnAllNodes = 1 + randomInt(5); logger.info("indexing [{}] docs after allowing shards on all nodes", numberOfDocsAfterAllowingShardsOnAllNodes); numDocs += indexDocs(index, numDocs, numberOfDocsAfterAllowingShardsOnAllNodes); - assertSeqNoOnShards(nodes, checkGlobalCheckpoints, 0, newNodeClient); - Shard primary = buildShards(nodes, newNodeClient).stream().filter(Shard::isPrimary).findFirst().get(); + assertSeqNoOnShards(index, nodes, checkGlobalCheckpoints, 0, newNodeClient); + Shard primary = buildShards(index, nodes, newNodeClient).stream().filter(Shard::isPrimary).findFirst().get(); logger.info("moving primary to new node by excluding {}", primary.getNode().getNodeName()); updateIndexSetting(index, Settings.builder().put("index.routing.allocation.exclude._name", primary.getNode().getNodeName())); ensureGreen(); @@ -237,7 +242,7 @@ public class IndexingIT extends ESRestTestCase { logger.info("indexing [{}] docs after moving primary", numberOfDocsAfterMovingPrimary); numDocsOnNewPrimary += indexDocs(index, numDocs, numberOfDocsAfterMovingPrimary); numDocs += numberOfDocsAfterMovingPrimary; - assertSeqNoOnShards(nodes, checkGlobalCheckpoints, numDocsOnNewPrimary, newNodeClient); + assertSeqNoOnShards(index, nodes, checkGlobalCheckpoints, numDocsOnNewPrimary, newNodeClient); /* * Dropping the number of replicas to zero, and then increasing it to one triggers a recovery thus exercising any BWC-logic in * the recovery code. @@ -255,7 +260,7 @@ public class IndexingIT extends ESRestTestCase { // the number of documents on the primary and on the recovered replica should match the number of indexed documents assertCount(index, "_primary", numDocs); assertCount(index, "_replica", numDocs); - assertSeqNoOnShards(nodes, checkGlobalCheckpoints, numDocsOnNewPrimary, newNodeClient); + assertSeqNoOnShards(index, nodes, checkGlobalCheckpoints, numDocsOnNewPrimary, newNodeClient); } } @@ -274,10 +279,11 @@ public class IndexingIT extends ESRestTestCase { assertThat("version mismatch for doc [" + docId + "] preference [" + preference + "]", actualVersion, equalTo(expectedVersion)); } - private void assertSeqNoOnShards(Nodes nodes, boolean checkGlobalCheckpoints, int numDocs, RestClient client) throws Exception { + private void assertSeqNoOnShards(String index, Nodes nodes, boolean checkGlobalCheckpoints, int numDocs, RestClient client) + throws Exception { assertBusy(() -> { try { - List shards = buildShards(nodes, client); + List shards = buildShards(index, nodes, client); Shard primaryShard = shards.stream().filter(Shard::isPrimary).findFirst().get(); assertNotNull("failed to find primary shard", primaryShard); final long expectedGlobalCkp; @@ -311,9 +317,9 @@ public class IndexingIT extends ESRestTestCase { }); } - private List buildShards(Nodes nodes, RestClient client) throws IOException { - Response response = client.performRequest("GET", "test/_stats", singletonMap("level", "shards")); - List shardStats = ObjectPath.createFromResponse(response).evaluate("indices.test.shards.0"); + private List buildShards(String index, Nodes nodes, RestClient client) throws IOException { + Response response = client.performRequest("GET", index + "/_stats", singletonMap("level", "shards")); + List shardStats = ObjectPath.createFromResponse(response).evaluate("indices." + index + ".shards.0"); ArrayList shards = new ArrayList<>(); for (Object shard : shardStats) { final String nodeId = ObjectPath.evaluate(shard, "routing.node"); From 741c0313847cf2bb362bb81e0abb5be1867bedcc Mon Sep 17 00:00:00 2001 From: Tanguy Leroux Date: Wed, 19 Apr 2017 09:37:01 +0200 Subject: [PATCH 17/19] [Test] Add unit tests for InternalHDRPercentilesTests (#24157) Related to #22278 --- .../InternalPercentilesTestCase.java | 61 +++++++++++++++++++ .../hdr/InternalHDRPercentilesTests.java | 60 ++++++++++++++++++ .../InternalTDigestPercentilesTests.java | 30 +++------ 3 files changed, 129 insertions(+), 22 deletions(-) create mode 100644 core/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/InternalPercentilesTestCase.java create mode 100644 core/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/InternalHDRPercentilesTests.java diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/InternalPercentilesTestCase.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/InternalPercentilesTestCase.java new file mode 100644 index 00000000000..0cfa07538e4 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/InternalPercentilesTestCase.java @@ -0,0 +1,61 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.aggregations.metrics.percentiles; + +import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.InternalAggregationTestCase; +import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; +import org.junit.Before; + +import java.util.List; +import java.util.Map; + +public abstract class InternalPercentilesTestCase extends InternalAggregationTestCase { + + private double[] percents; + + @Before + public void init() { + percents = randomPercents(); + } + + @Override + protected T createTestInstance(String name, List pipelineAggregators, Map metaData) { + int numValues = randomInt(100); + double[] values = new double[numValues]; + for (int i = 0; i < numValues; ++i) { + values[i] = randomDouble(); + } + return createTestInstance(name, pipelineAggregators, metaData, randomBoolean(), DocValueFormat.RAW, percents, values); + } + + protected abstract T createTestInstance(String name, List pipelineAggregators, Map metaData, + boolean keyed, DocValueFormat format, double[] percents, double[] values); + + private static double[] randomPercents() { + List randomCdfValues = randomSubsetOf(randomIntBetween(1, 7), 0.01d, 0.05d, 0.25d, 0.50d, 0.75d, 0.95d, 0.99d); + double[] percents = new double[randomCdfValues.size()]; + for (int i = 0; i < randomCdfValues.size(); i++) { + percents[i] = randomCdfValues.get(i); + } + return percents; + } +} diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/InternalHDRPercentilesTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/InternalHDRPercentilesTests.java new file mode 100644 index 00000000000..bff026d5cf4 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/hdr/InternalHDRPercentilesTests.java @@ -0,0 +1,60 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.aggregations.metrics.percentiles.hdr; + +import org.HdrHistogram.DoubleHistogram; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.search.aggregations.metrics.percentiles.InternalPercentilesTestCase; +import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; + +import java.util.Arrays; +import java.util.List; +import java.util.Map; + +public class InternalHDRPercentilesTests extends InternalPercentilesTestCase { + + @Override + protected InternalHDRPercentiles createTestInstance(String name, + List pipelineAggregators, + Map metaData, + boolean keyed, DocValueFormat format, double[] percents, double[] values) { + + final DoubleHistogram state = new DoubleHistogram(3); + Arrays.stream(values).forEach(state::recordValue); + + return new InternalHDRPercentiles(name, percents, state, keyed, format, pipelineAggregators, metaData); + } + + @Override + protected void assertReduced(InternalHDRPercentiles reduced, List inputs) { + // it is hard to check the values due to the inaccuracy of the algorithm + long totalCount = 0; + for (InternalHDRPercentiles ranks : inputs) { + totalCount += ranks.state.getTotalCount(); + } + assertEquals(totalCount, reduced.state.getTotalCount()); + } + + @Override + protected Writeable.Reader instanceReader() { + return InternalHDRPercentiles::new; + } +} diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/InternalTDigestPercentilesTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/InternalTDigestPercentilesTests.java index 75efa516409..f2db4a48530 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/InternalTDigestPercentilesTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/metrics/percentiles/tdigest/InternalTDigestPercentilesTests.java @@ -21,29 +21,24 @@ package org.elasticsearch.search.aggregations.metrics.percentiles.tdigest; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.search.DocValueFormat; -import org.elasticsearch.search.aggregations.InternalAggregationTestCase; +import org.elasticsearch.search.aggregations.metrics.percentiles.InternalPercentilesTestCase; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; +import java.util.Arrays; import java.util.List; import java.util.Map; -public class InternalTDigestPercentilesTests extends InternalAggregationTestCase { - - private final double[] percents = randomPercents(); +public class InternalTDigestPercentilesTests extends InternalPercentilesTestCase { @Override protected InternalTDigestPercentiles createTestInstance(String name, List pipelineAggregators, - Map metaData) { - boolean keyed = randomBoolean(); - DocValueFormat format = DocValueFormat.RAW; - TDigestState state = new TDigestState(100); + Map metaData, + boolean keyed, DocValueFormat format, double[] percents, double[] values) { + final TDigestState state = new TDigestState(100); + Arrays.stream(values).forEach(state::add); - int numValues = randomInt(10); - for (int i = 0; i < numValues; ++i) { - state.add(randomDouble() * 100); - } - assertEquals(state.centroidCount(), numValues); + assertEquals(state.centroidCount(), values.length); return new InternalTDigestPercentiles(name, percents, state, keyed, format, pipelineAggregators, metaData); } @@ -69,13 +64,4 @@ public class InternalTDigestPercentilesTests extends InternalAggregationTestCase protected Writeable.Reader instanceReader() { return InternalTDigestPercentiles::new; } - - private static double[] randomPercents() { - List randomCdfValues = randomSubsetOf(randomIntBetween(1, 7), 0.01d, 0.05d, 0.25d, 0.50d, 0.75d, 0.95d, 0.99d); - double[] percents = new double[randomCdfValues.size()]; - for (int i = 0; i < randomCdfValues.size(); i++) { - percents[i] = randomCdfValues.get(i); - } - return percents; - } } From e81bbc288aa7eaf9abb210cf57a866d66c1116ff Mon Sep 17 00:00:00 2001 From: Tanguy Leroux Date: Wed, 19 Apr 2017 09:39:42 +0200 Subject: [PATCH 18/19] Remove Ubuntu 12.04 (#24161) Ubuntu 12.04 will be EOL on April 28, 2017. --- TESTING.asciidoc | 3 +-- Vagrantfile | 4 ---- .../org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy | 1 - docs/plugins/discovery-azure-classic.asciidoc | 2 +- 4 files changed, 2 insertions(+), 8 deletions(-) diff --git a/TESTING.asciidoc b/TESTING.asciidoc index 43b53fd360f..216100c07da 100644 --- a/TESTING.asciidoc +++ b/TESTING.asciidoc @@ -345,7 +345,6 @@ VM running trusty by running These are the linux flavors the Vagrantfile currently supports: -* ubuntu-1204 aka precise * ubuntu-1404 aka trusty * ubuntu-1604 aka xenial * debian-8 aka jessie, the current debian stable distribution @@ -431,7 +430,7 @@ gradle vagrantFedora24#up ------------------------------------------------- Or any of vagrantCentos6#up, vagrantDebian8#up, vagrantFedora24#up, vagrantOel6#up, -vagrantOel7#up, vagrantOpensuse13#up, vagrantSles12#up, vagrantUbuntu1204#up, +vagrantOel7#up, vagrantOpensuse13#up, vagrantSles12#up, vagrantUbuntu1404#up, vagrantUbuntu1604#up. Once up, you can then connect to the VM using SSH from the elasticsearch directory: diff --git a/Vagrantfile b/Vagrantfile index 04439442404..f008b339c3f 100644 --- a/Vagrantfile +++ b/Vagrantfile @@ -22,10 +22,6 @@ # under the License. Vagrant.configure(2) do |config| - config.vm.define "ubuntu-1204" do |config| - config.vm.box = "elastic/ubuntu-12.04-x86_64" - ubuntu_common config - end config.vm.define "ubuntu-1404" do |config| config.vm.box = "elastic/ubuntu-14.04-x86_64" ubuntu_common config diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy index 336ee207abf..2fb047e9305 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy @@ -22,7 +22,6 @@ class VagrantTestPlugin implements Plugin { 'oel-7', 'opensuse-13', 'sles-12', - 'ubuntu-1204', 'ubuntu-1404', 'ubuntu-1604' ] diff --git a/docs/plugins/discovery-azure-classic.asciidoc b/docs/plugins/discovery-azure-classic.asciidoc index f69df7f5171..0362f2a6fe3 100644 --- a/docs/plugins/discovery-azure-classic.asciidoc +++ b/docs/plugins/discovery-azure-classic.asciidoc @@ -169,7 +169,7 @@ Before starting, you need to have: * A http://www.windowsazure.com/[Windows Azure account] * OpenSSL that isn't from MacPorts, specifically `OpenSSL 1.0.1f 6 Jan 2014` doesn't seem to create a valid keypair for ssh. FWIW, - `OpenSSL 1.0.1c 10 May 2012` on Ubuntu 12.04 LTS is known to work. + `OpenSSL 1.0.1c 10 May 2012` on Ubuntu 14.04 LTS is known to work. * SSH keys and certificate + -- From 8758c541b35bda1a6494ba703b450517027d5e9f Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Wed, 19 Apr 2017 09:49:17 +0200 Subject: [PATCH 19/19] ElectMasterService.hasEnoughMasterNodes should return false if no masters were found This is a regression introduced in #20063 --- .../org/elasticsearch/discovery/zen/ElectMasterService.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/core/src/main/java/org/elasticsearch/discovery/zen/ElectMasterService.java b/core/src/main/java/org/elasticsearch/discovery/zen/ElectMasterService.java index 92b20c5199b..024c50fb6e0 100644 --- a/core/src/main/java/org/elasticsearch/discovery/zen/ElectMasterService.java +++ b/core/src/main/java/org/elasticsearch/discovery/zen/ElectMasterService.java @@ -150,7 +150,8 @@ public class ElectMasterService extends AbstractComponent { } public boolean hasEnoughMasterNodes(Iterable nodes) { - return minimumMasterNodes < 1 || countMasterNodes(nodes) >= minimumMasterNodes; + final int count = countMasterNodes(nodes); + return count > 0 && (minimumMasterNodes < 0 || count >= minimumMasterNodes); } public boolean hasTooManyMasterNodes(Iterable nodes) {