From 877589e1539a2369127e0360dcea1589227f7344 Mon Sep 17 00:00:00 2001 From: Clinton Gormley Date: Thu, 3 Sep 2015 20:59:57 +0200 Subject: [PATCH 01/17] In the license checker, include the "ignore prefix" parameter in the output Closes #13322 --- .../src/main/resources/license-check/check_license_and_sha.pl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/dev-tools/src/main/resources/license-check/check_license_and_sha.pl b/dev-tools/src/main/resources/license-check/check_license_and_sha.pl index 4d9d5ba06b8..c6b0f04b6db 100755 --- a/dev-tools/src/main/resources/license-check/check_license_and_sha.pl +++ b/dev-tools/src/main/resources/license-check/check_license_and_sha.pl @@ -19,7 +19,7 @@ die usage() unless $mode =~ /^--(check|update)$/; my $License_Dir = shift(@ARGV) || die usage(); my $Source = shift(@ARGV) || die usage(); -my $Ignore = shift(@ARGV); +my $Ignore = shift(@ARGV) || ''; my $ignore = $Ignore ? qr/${Ignore}[^\/]*$/ @@ -129,7 +129,7 @@ sub check_shas_and_licenses { You can update the SHA files by running: -$0 --update $License_Dir $Source +$0 --update $License_Dir $Source $Ignore SHAS } From f216d92d19cf2f12fbc460c7aef66f38998ef9fd Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Thu, 3 Sep 2015 15:13:33 -0400 Subject: [PATCH 02/17] Upgrade to lucene 5.4-snapshot r1701068 --- .../main/java/org/elasticsearch/Version.java | 2 +- .../index/analysis/AnalysisModule.java | 3 +- .../analysis/DecimalDigitFilterFactory.java | 42 +++++++++++++++++++ .../elasticsearch/index/shard/IndexShard.java | 37 +--------------- .../analysis/PreBuiltTokenFilters.java | 8 ++++ .../index/analysis/AnalysisFactoryTests.java | 3 ++ .../licenses/antlr-runtime-3.5.jar.sha1 | 1 - .../licenses/antlr-runtime-LICENSE.txt | 7 ---- .../licenses/antlr-runtime-NOTICE.txt | 1 - .../licenses/antlr4-runtime-4.5.jar.sha1 | 1 + .../licenses/antlr4-runtime-LICENSE.txt | 26 ++++++++++++ .../licenses/antlr4-runtime-NOTICE.txt | 0 distribution/licenses/asm-4.1.jar.sha1 | 1 - distribution/licenses/asm-5.0.4.jar.sha1 | 1 + .../licenses/asm-commons-4.1.jar.sha1 | 1 - .../licenses/asm-commons-5.0.4.jar.sha1 | 1 + .../lucene-analyzers-common-5.3.0.jar.sha1 | 1 - ...ers-common-5.4.0-snapshot-1701068.jar.sha1 | 1 + .../lucene-backward-codecs-5.3.0.jar.sha1 | 1 - ...ard-codecs-5.4.0-snapshot-1701068.jar.sha1 | 1 + .../licenses/lucene-core-5.3.0.jar.sha1 | 1 - ...ucene-core-5.4.0-snapshot-1701068.jar.sha1 | 1 + .../lucene-expressions-5.3.0.jar.sha1 | 1 - ...xpressions-5.4.0-snapshot-1701068.jar.sha1 | 1 + .../licenses/lucene-grouping-5.3.0.jar.sha1 | 1 - ...e-grouping-5.4.0-snapshot-1701068.jar.sha1 | 1 + .../lucene-highlighter-5.3.0.jar.sha1 | 1 - ...ighlighter-5.4.0-snapshot-1701068.jar.sha1 | 1 + .../licenses/lucene-join-5.3.0.jar.sha1 | 1 - ...ucene-join-5.4.0-snapshot-1701068.jar.sha1 | 1 + .../licenses/lucene-memory-5.3.0.jar.sha1 | 1 - ...ene-memory-5.4.0-snapshot-1701068.jar.sha1 | 1 + .../licenses/lucene-misc-5.3.0.jar.sha1 | 1 - ...ucene-misc-5.4.0-snapshot-1701068.jar.sha1 | 1 + .../licenses/lucene-queries-5.3.0.jar.sha1 | 1 - ...ne-queries-5.4.0-snapshot-1701068.jar.sha1 | 1 + .../lucene-queryparser-5.3.0.jar.sha1 | 1 - ...ueryparser-5.4.0-snapshot-1701068.jar.sha1 | 1 + .../licenses/lucene-sandbox-5.3.0.jar.sha1 | 1 - ...ne-sandbox-5.4.0-snapshot-1701068.jar.sha1 | 1 + .../licenses/lucene-spatial-5.3.0.jar.sha1 | 1 - ...ne-spatial-5.4.0-snapshot-1701068.jar.sha1 | 1 + .../licenses/lucene-spatial3d-5.3.0.jar.sha1 | 1 - ...-spatial3d-5.4.0-snapshot-1701068.jar.sha1 | 1 + .../licenses/lucene-suggest-5.3.0.jar.sha1 | 1 - ...ne-suggest-5.4.0-snapshot-1701068.jar.sha1 | 1 + docs/reference/analysis/tokenfilters.asciidoc | 2 + .../decimal-digit-tokenfilter.asciidoc | 4 ++ .../lucene-analyzers-icu-5.3.0.jar.sha1 | 1 - ...lyzers-icu-5.4.0-snapshot-1701068.jar.sha1 | 1 + .../lucene-analyzers-kuromoji-5.3.0.jar.sha1 | 1 - ...s-kuromoji-5.4.0-snapshot-1701068.jar.sha1 | 1 + .../lucene-analyzers-phonetic-5.3.0.jar.sha1 | 1 - ...s-phonetic-5.4.0-snapshot-1701068.jar.sha1 | 1 + .../lucene-analyzers-smartcn-5.3.0.jar.sha1 | 1 - ...rs-smartcn-5.4.0-snapshot-1701068.jar.sha1 | 1 + .../lucene-analyzers-stempel-5.3.0.jar.sha1 | 1 - ...rs-stempel-5.4.0-snapshot-1701068.jar.sha1 | 1 + pom.xml | 9 +++- 59 files changed, 118 insertions(+), 72 deletions(-) create mode 100644 core/src/main/java/org/elasticsearch/index/analysis/DecimalDigitFilterFactory.java delete mode 100644 distribution/licenses/antlr-runtime-3.5.jar.sha1 delete mode 100644 distribution/licenses/antlr-runtime-LICENSE.txt delete mode 100644 distribution/licenses/antlr-runtime-NOTICE.txt create mode 100644 distribution/licenses/antlr4-runtime-4.5.jar.sha1 create mode 100644 distribution/licenses/antlr4-runtime-LICENSE.txt create mode 100644 distribution/licenses/antlr4-runtime-NOTICE.txt delete mode 100644 distribution/licenses/asm-4.1.jar.sha1 create mode 100644 distribution/licenses/asm-5.0.4.jar.sha1 delete mode 100644 distribution/licenses/asm-commons-4.1.jar.sha1 create mode 100644 distribution/licenses/asm-commons-5.0.4.jar.sha1 delete mode 100644 distribution/licenses/lucene-analyzers-common-5.3.0.jar.sha1 create mode 100644 distribution/licenses/lucene-analyzers-common-5.4.0-snapshot-1701068.jar.sha1 delete mode 100644 distribution/licenses/lucene-backward-codecs-5.3.0.jar.sha1 create mode 100644 distribution/licenses/lucene-backward-codecs-5.4.0-snapshot-1701068.jar.sha1 delete mode 100644 distribution/licenses/lucene-core-5.3.0.jar.sha1 create mode 100644 distribution/licenses/lucene-core-5.4.0-snapshot-1701068.jar.sha1 delete mode 100644 distribution/licenses/lucene-expressions-5.3.0.jar.sha1 create mode 100644 distribution/licenses/lucene-expressions-5.4.0-snapshot-1701068.jar.sha1 delete mode 100644 distribution/licenses/lucene-grouping-5.3.0.jar.sha1 create mode 100644 distribution/licenses/lucene-grouping-5.4.0-snapshot-1701068.jar.sha1 delete mode 100644 distribution/licenses/lucene-highlighter-5.3.0.jar.sha1 create mode 100644 distribution/licenses/lucene-highlighter-5.4.0-snapshot-1701068.jar.sha1 delete mode 100644 distribution/licenses/lucene-join-5.3.0.jar.sha1 create mode 100644 distribution/licenses/lucene-join-5.4.0-snapshot-1701068.jar.sha1 delete mode 100644 distribution/licenses/lucene-memory-5.3.0.jar.sha1 create mode 100644 distribution/licenses/lucene-memory-5.4.0-snapshot-1701068.jar.sha1 delete mode 100644 distribution/licenses/lucene-misc-5.3.0.jar.sha1 create mode 100644 distribution/licenses/lucene-misc-5.4.0-snapshot-1701068.jar.sha1 delete mode 100644 distribution/licenses/lucene-queries-5.3.0.jar.sha1 create mode 100644 distribution/licenses/lucene-queries-5.4.0-snapshot-1701068.jar.sha1 delete mode 100644 distribution/licenses/lucene-queryparser-5.3.0.jar.sha1 create mode 100644 distribution/licenses/lucene-queryparser-5.4.0-snapshot-1701068.jar.sha1 delete mode 100644 distribution/licenses/lucene-sandbox-5.3.0.jar.sha1 create mode 100644 distribution/licenses/lucene-sandbox-5.4.0-snapshot-1701068.jar.sha1 delete mode 100644 distribution/licenses/lucene-spatial-5.3.0.jar.sha1 create mode 100644 distribution/licenses/lucene-spatial-5.4.0-snapshot-1701068.jar.sha1 delete mode 100644 distribution/licenses/lucene-spatial3d-5.3.0.jar.sha1 create mode 100644 distribution/licenses/lucene-spatial3d-5.4.0-snapshot-1701068.jar.sha1 delete mode 100644 distribution/licenses/lucene-suggest-5.3.0.jar.sha1 create mode 100644 distribution/licenses/lucene-suggest-5.4.0-snapshot-1701068.jar.sha1 create mode 100644 docs/reference/analysis/tokenfilters/decimal-digit-tokenfilter.asciidoc delete mode 100644 plugins/analysis-icu/licenses/lucene-analyzers-icu-5.3.0.jar.sha1 create mode 100644 plugins/analysis-icu/licenses/lucene-analyzers-icu-5.4.0-snapshot-1701068.jar.sha1 delete mode 100644 plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-5.3.0.jar.sha1 create mode 100644 plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-5.4.0-snapshot-1701068.jar.sha1 delete mode 100644 plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-5.3.0.jar.sha1 create mode 100644 plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-5.4.0-snapshot-1701068.jar.sha1 delete mode 100644 plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-5.3.0.jar.sha1 create mode 100644 plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-5.4.0-snapshot-1701068.jar.sha1 delete mode 100644 plugins/analysis-stempel/licenses/lucene-analyzers-stempel-5.3.0.jar.sha1 create mode 100644 plugins/analysis-stempel/licenses/lucene-analyzers-stempel-5.4.0-snapshot-1701068.jar.sha1 diff --git a/core/src/main/java/org/elasticsearch/Version.java b/core/src/main/java/org/elasticsearch/Version.java index 494ae24222b..7286e979dbe 100644 --- a/core/src/main/java/org/elasticsearch/Version.java +++ b/core/src/main/java/org/elasticsearch/Version.java @@ -260,7 +260,7 @@ public class Version { public static final int V_2_1_0_ID = 2010099; public static final Version V_2_1_0 = new Version(V_2_1_0_ID, true, org.apache.lucene.util.Version.LUCENE_5_3_0); public static final int V_3_0_0_ID = 3000099; - public static final Version V_3_0_0 = new Version(V_3_0_0_ID, true, org.apache.lucene.util.Version.LUCENE_5_3_0); + public static final Version V_3_0_0 = new Version(V_3_0_0_ID, true, org.apache.lucene.util.Version.LUCENE_5_4_0); public static final Version CURRENT = V_3_0_0; static { diff --git a/core/src/main/java/org/elasticsearch/index/analysis/AnalysisModule.java b/core/src/main/java/org/elasticsearch/index/analysis/AnalysisModule.java index 393f1c96317..6ebe7134544 100644 --- a/core/src/main/java/org/elasticsearch/index/analysis/AnalysisModule.java +++ b/core/src/main/java/org/elasticsearch/index/analysis/AnalysisModule.java @@ -453,8 +453,7 @@ public class AnalysisModule extends AbstractModule { tokenFiltersBindings.processTokenFilter("apostrophe", ApostropheFilterFactory.class); tokenFiltersBindings.processTokenFilter("classic", ClassicFilterFactory.class); - - + tokenFiltersBindings.processTokenFilter("decimal_digit", DecimalDigitFilterFactory.class); } @Override diff --git a/core/src/main/java/org/elasticsearch/index/analysis/DecimalDigitFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/DecimalDigitFilterFactory.java new file mode 100644 index 00000000000..9ec8408d597 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/index/analysis/DecimalDigitFilterFactory.java @@ -0,0 +1,42 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.analysis; + +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.core.DecimalDigitFilter; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.Index; + +/** + * Factory for {@link DecimalDigitFilter} + */ +public final class DecimalDigitFilterFactory extends AbstractTokenFilterFactory { + + @Inject + public DecimalDigitFilterFactory(Index index, Settings indexSettings, String name, Settings settings) { + super(index, indexSettings, name, settings); + } + + @Override + public TokenStream create(TokenStream tokenStream) { + return new DecimalDigitFilter(tokenStream); + } +} diff --git a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java index 0a804bf8694..2101ed03e63 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/core/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -254,42 +254,7 @@ public class IndexShard extends AbstractIndexShardComponent { if (indexSettings.getAsBoolean(IndexCacheModule.QUERY_CACHE_EVERYTHING, false)) { cachingPolicy = QueryCachingPolicy.ALWAYS_CACHE; } else { - assert Version.CURRENT.luceneVersion == org.apache.lucene.util.Version.LUCENE_5_3_0; - // TODO: remove this hack in Lucene 5.4, use UsageTrackingQueryCachingPolicy directly - // See https://issues.apache.org/jira/browse/LUCENE-6748 - // cachingPolicy = new UsageTrackingQueryCachingPolicy(); - - final QueryCachingPolicy wrapped = new UsageTrackingQueryCachingPolicy(); - cachingPolicy = new QueryCachingPolicy() { - - @Override - public boolean shouldCache(Query query, LeafReaderContext context) throws IOException { - if (query instanceof MatchAllDocsQuery - // MatchNoDocsQuery currently rewrites to a BooleanQuery, - // but who knows, it might get its own Weight one day - || query instanceof MatchNoDocsQuery) { - return false; - } - if (query instanceof BooleanQuery) { - BooleanQuery bq = (BooleanQuery) query; - if (bq.clauses().isEmpty()) { - return false; - } - } - if (query instanceof DisjunctionMaxQuery) { - DisjunctionMaxQuery dmq = (DisjunctionMaxQuery) query; - if (dmq.getDisjuncts().isEmpty()) { - return false; - } - } - return wrapped.shouldCache(query, context); - } - - @Override - public void onUse(Query query) { - wrapped.onUse(query); - } - }; + cachingPolicy = new UsageTrackingQueryCachingPolicy(); } this.engineConfig = newEngineConfig(translogConfig, cachingPolicy); this.indexShardOperationCounter = new IndexShardOperationCounter(logger, shardId); diff --git a/core/src/main/java/org/elasticsearch/indices/analysis/PreBuiltTokenFilters.java b/core/src/main/java/org/elasticsearch/indices/analysis/PreBuiltTokenFilters.java index 07a9688fbc4..70d1a25b43e 100644 --- a/core/src/main/java/org/elasticsearch/indices/analysis/PreBuiltTokenFilters.java +++ b/core/src/main/java/org/elasticsearch/indices/analysis/PreBuiltTokenFilters.java @@ -26,6 +26,7 @@ import org.apache.lucene.analysis.cjk.CJKBigramFilter; import org.apache.lucene.analysis.cjk.CJKWidthFilter; import org.apache.lucene.analysis.ckb.SoraniNormalizationFilter; import org.apache.lucene.analysis.commongrams.CommonGramsFilter; +import org.apache.lucene.analysis.core.DecimalDigitFilter; import org.apache.lucene.analysis.core.LowerCaseFilter; import org.apache.lucene.analysis.core.Lucene43StopFilter; import org.apache.lucene.analysis.core.StopAnalyzer; @@ -395,6 +396,13 @@ public enum PreBuiltTokenFilters { return new CJKWidthFilter(tokenStream); } }, + + DECIMAL_DIGIT(CachingStrategy.ONE) { + @Override + public TokenStream create(TokenStream tokenStream, Version version) { + return new DecimalDigitFilter(tokenStream); + } + }, CJK_BIGRAM(CachingStrategy.ONE) { @Override diff --git a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisFactoryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisFactoryTests.java index a79da27a524..b2df4a9d416 100644 --- a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisFactoryTests.java +++ b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisFactoryTests.java @@ -87,6 +87,7 @@ public class AnalysisFactoryTests extends ESTestCase { put("commongrams", CommonGramsTokenFilterFactory.class); put("commongramsquery", CommonGramsTokenFilterFactory.class); put("czechstem", CzechStemTokenFilterFactory.class); + put("decimaldigit", DecimalDigitFilterFactory.class); put("delimitedpayload", DelimitedPayloadTokenFilterFactory.class); put("dictionarycompoundword", DictionaryCompoundWordTokenFilterFactory.class); put("edgengram", EdgeNGramTokenFilterFactory.class); @@ -176,6 +177,8 @@ public class AnalysisFactoryTests extends ESTestCase { put("tokenoffsetpayload", Void.class); // puts the type into the payload put("typeaspayload", Void.class); + // fingerprint + put("fingerprint", Void.class); }}; public void testTokenFilters() { diff --git a/distribution/licenses/antlr-runtime-3.5.jar.sha1 b/distribution/licenses/antlr-runtime-3.5.jar.sha1 deleted file mode 100644 index d90b777a4a7..00000000000 --- a/distribution/licenses/antlr-runtime-3.5.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -0baa82bff19059401e90e1b90020beb9c96305d7 diff --git a/distribution/licenses/antlr-runtime-LICENSE.txt b/distribution/licenses/antlr-runtime-LICENSE.txt deleted file mode 100644 index a6e3ad08507..00000000000 --- a/distribution/licenses/antlr-runtime-LICENSE.txt +++ /dev/null @@ -1,7 +0,0 @@ -Copyright (c) 2012 Terence Parr and Sam Harwell -All rights reserved. -Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: -Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. -Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. -Neither the name of the author nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/distribution/licenses/antlr-runtime-NOTICE.txt b/distribution/licenses/antlr-runtime-NOTICE.txt deleted file mode 100644 index 8d1c8b69c3f..00000000000 --- a/distribution/licenses/antlr-runtime-NOTICE.txt +++ /dev/null @@ -1 +0,0 @@ - diff --git a/distribution/licenses/antlr4-runtime-4.5.jar.sha1 b/distribution/licenses/antlr4-runtime-4.5.jar.sha1 new file mode 100644 index 00000000000..5299c19c73b --- /dev/null +++ b/distribution/licenses/antlr4-runtime-4.5.jar.sha1 @@ -0,0 +1 @@ +29e48af049f17dd89153b83a7ad5d01b3b4bcdda diff --git a/distribution/licenses/antlr4-runtime-LICENSE.txt b/distribution/licenses/antlr4-runtime-LICENSE.txt new file mode 100644 index 00000000000..95d0a2554f6 --- /dev/null +++ b/distribution/licenses/antlr4-runtime-LICENSE.txt @@ -0,0 +1,26 @@ +[The "BSD license"] +Copyright (c) 2015 Terence Parr, Sam Harwell +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: + + 1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + 3. The name of the author may not be used to endorse or promote products + derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR +IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES +OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. +IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, +INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT +NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF +THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/distribution/licenses/antlr4-runtime-NOTICE.txt b/distribution/licenses/antlr4-runtime-NOTICE.txt new file mode 100644 index 00000000000..e69de29bb2d diff --git a/distribution/licenses/asm-4.1.jar.sha1 b/distribution/licenses/asm-4.1.jar.sha1 deleted file mode 100644 index fca9878081d..00000000000 --- a/distribution/licenses/asm-4.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ad568238ee36a820bd6c6806807e8a14ea34684d diff --git a/distribution/licenses/asm-5.0.4.jar.sha1 b/distribution/licenses/asm-5.0.4.jar.sha1 new file mode 100644 index 00000000000..9223dba380f --- /dev/null +++ b/distribution/licenses/asm-5.0.4.jar.sha1 @@ -0,0 +1 @@ +0da08b8cce7bbf903602a25a3a163ae252435795 diff --git a/distribution/licenses/asm-commons-4.1.jar.sha1 b/distribution/licenses/asm-commons-4.1.jar.sha1 deleted file mode 100644 index 2b534751bf1..00000000000 --- a/distribution/licenses/asm-commons-4.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f8b86f4ee6e02082f63a658e00eb5506821253c6 diff --git a/distribution/licenses/asm-commons-5.0.4.jar.sha1 b/distribution/licenses/asm-commons-5.0.4.jar.sha1 new file mode 100644 index 00000000000..94fe0cd92c9 --- /dev/null +++ b/distribution/licenses/asm-commons-5.0.4.jar.sha1 @@ -0,0 +1 @@ +5a556786086c23cd689a0328f8519db93821c04c diff --git a/distribution/licenses/lucene-analyzers-common-5.3.0.jar.sha1 b/distribution/licenses/lucene-analyzers-common-5.3.0.jar.sha1 deleted file mode 100644 index 4d79ce9d9e2..00000000000 --- a/distribution/licenses/lucene-analyzers-common-5.3.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -1502beac94cf437baff848ffbbb8f76172befa6b diff --git a/distribution/licenses/lucene-analyzers-common-5.4.0-snapshot-1701068.jar.sha1 b/distribution/licenses/lucene-analyzers-common-5.4.0-snapshot-1701068.jar.sha1 new file mode 100644 index 00000000000..3eff5a77688 --- /dev/null +++ b/distribution/licenses/lucene-analyzers-common-5.4.0-snapshot-1701068.jar.sha1 @@ -0,0 +1 @@ +35fca29c4597a15ce4d4eb7dc73a517038684a27 diff --git a/distribution/licenses/lucene-backward-codecs-5.3.0.jar.sha1 b/distribution/licenses/lucene-backward-codecs-5.3.0.jar.sha1 deleted file mode 100644 index 9b802fb5e04..00000000000 --- a/distribution/licenses/lucene-backward-codecs-5.3.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f654901e55fe56bdbe4be202767296929c2f8d9e diff --git a/distribution/licenses/lucene-backward-codecs-5.4.0-snapshot-1701068.jar.sha1 b/distribution/licenses/lucene-backward-codecs-5.4.0-snapshot-1701068.jar.sha1 new file mode 100644 index 00000000000..6fe76092653 --- /dev/null +++ b/distribution/licenses/lucene-backward-codecs-5.4.0-snapshot-1701068.jar.sha1 @@ -0,0 +1 @@ +e4769b5c05fad8339f4eaf9cfa9e850cbeaa10ec diff --git a/distribution/licenses/lucene-core-5.3.0.jar.sha1 b/distribution/licenses/lucene-core-5.3.0.jar.sha1 deleted file mode 100644 index 9765d65189b..00000000000 --- a/distribution/licenses/lucene-core-5.3.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9e12bb7c39e964a544e3a23b9c8ffa9599d38f10 diff --git a/distribution/licenses/lucene-core-5.4.0-snapshot-1701068.jar.sha1 b/distribution/licenses/lucene-core-5.4.0-snapshot-1701068.jar.sha1 new file mode 100644 index 00000000000..fb5b2dd2e54 --- /dev/null +++ b/distribution/licenses/lucene-core-5.4.0-snapshot-1701068.jar.sha1 @@ -0,0 +1 @@ +3bbab9d7a395bd0b6cc8b5bee26287105c8659e8 diff --git a/distribution/licenses/lucene-expressions-5.3.0.jar.sha1 b/distribution/licenses/lucene-expressions-5.3.0.jar.sha1 deleted file mode 100644 index 232b4f3ff34..00000000000 --- a/distribution/licenses/lucene-expressions-5.3.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -dc6f5e352f787d71a7896025c0cdd0eb665b2985 diff --git a/distribution/licenses/lucene-expressions-5.4.0-snapshot-1701068.jar.sha1 b/distribution/licenses/lucene-expressions-5.4.0-snapshot-1701068.jar.sha1 new file mode 100644 index 00000000000..000759a2842 --- /dev/null +++ b/distribution/licenses/lucene-expressions-5.4.0-snapshot-1701068.jar.sha1 @@ -0,0 +1 @@ +d60476428e7d3d8a68fe491d42dbda0d4024f589 diff --git a/distribution/licenses/lucene-grouping-5.3.0.jar.sha1 b/distribution/licenses/lucene-grouping-5.3.0.jar.sha1 deleted file mode 100644 index 82b09e61a01..00000000000 --- a/distribution/licenses/lucene-grouping-5.3.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2d27582889b8676dfed6880a920148f3e32c9b42 diff --git a/distribution/licenses/lucene-grouping-5.4.0-snapshot-1701068.jar.sha1 b/distribution/licenses/lucene-grouping-5.4.0-snapshot-1701068.jar.sha1 new file mode 100644 index 00000000000..1688910396e --- /dev/null +++ b/distribution/licenses/lucene-grouping-5.4.0-snapshot-1701068.jar.sha1 @@ -0,0 +1 @@ +8618da3f400f0a4b140f196bbbecb0686fe754db diff --git a/distribution/licenses/lucene-highlighter-5.3.0.jar.sha1 b/distribution/licenses/lucene-highlighter-5.3.0.jar.sha1 deleted file mode 100644 index 406bc446a08..00000000000 --- a/distribution/licenses/lucene-highlighter-5.3.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3b9d67c0f93e107a9ad8c179505df56a85e3f027 diff --git a/distribution/licenses/lucene-highlighter-5.4.0-snapshot-1701068.jar.sha1 b/distribution/licenses/lucene-highlighter-5.4.0-snapshot-1701068.jar.sha1 new file mode 100644 index 00000000000..5b6a48e527b --- /dev/null +++ b/distribution/licenses/lucene-highlighter-5.4.0-snapshot-1701068.jar.sha1 @@ -0,0 +1 @@ +c7db4fe5587d08ab23b253c622566462aab6796a diff --git a/distribution/licenses/lucene-join-5.3.0.jar.sha1 b/distribution/licenses/lucene-join-5.3.0.jar.sha1 deleted file mode 100644 index fbf636c2649..00000000000 --- a/distribution/licenses/lucene-join-5.3.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -95ddffcd889af106136704ecb7dc7173b3e9cdb3 diff --git a/distribution/licenses/lucene-join-5.4.0-snapshot-1701068.jar.sha1 b/distribution/licenses/lucene-join-5.4.0-snapshot-1701068.jar.sha1 new file mode 100644 index 00000000000..9dbe3284449 --- /dev/null +++ b/distribution/licenses/lucene-join-5.4.0-snapshot-1701068.jar.sha1 @@ -0,0 +1 @@ +f9c8d435d3e1d553b0dca05c99b1fa377568eed0 diff --git a/distribution/licenses/lucene-memory-5.3.0.jar.sha1 b/distribution/licenses/lucene-memory-5.3.0.jar.sha1 deleted file mode 100644 index 0f39068c29b..00000000000 --- a/distribution/licenses/lucene-memory-5.3.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -44f50f425264b4b17e6781ba07bdc80b4d36bb65 diff --git a/distribution/licenses/lucene-memory-5.4.0-snapshot-1701068.jar.sha1 b/distribution/licenses/lucene-memory-5.4.0-snapshot-1701068.jar.sha1 new file mode 100644 index 00000000000..1c0f2f57d56 --- /dev/null +++ b/distribution/licenses/lucene-memory-5.4.0-snapshot-1701068.jar.sha1 @@ -0,0 +1 @@ +571dd2e4363f0a0410de04b3f3f4bbf66e782c31 diff --git a/distribution/licenses/lucene-misc-5.3.0.jar.sha1 b/distribution/licenses/lucene-misc-5.3.0.jar.sha1 deleted file mode 100644 index 50949e57486..00000000000 --- a/distribution/licenses/lucene-misc-5.3.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d03ce6d1bb8ab3926b3acc717418c474a49ade69 diff --git a/distribution/licenses/lucene-misc-5.4.0-snapshot-1701068.jar.sha1 b/distribution/licenses/lucene-misc-5.4.0-snapshot-1701068.jar.sha1 new file mode 100644 index 00000000000..0d44482a658 --- /dev/null +++ b/distribution/licenses/lucene-misc-5.4.0-snapshot-1701068.jar.sha1 @@ -0,0 +1 @@ +423264f839aace3b9159a0dd54f56c250458fd46 diff --git a/distribution/licenses/lucene-queries-5.3.0.jar.sha1 b/distribution/licenses/lucene-queries-5.3.0.jar.sha1 deleted file mode 100644 index 51486ac5c70..00000000000 --- a/distribution/licenses/lucene-queries-5.3.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a0e8ff0bb90fd762800afdd434fdf769b1f9ac28 diff --git a/distribution/licenses/lucene-queries-5.4.0-snapshot-1701068.jar.sha1 b/distribution/licenses/lucene-queries-5.4.0-snapshot-1701068.jar.sha1 new file mode 100644 index 00000000000..a4391c68e60 --- /dev/null +++ b/distribution/licenses/lucene-queries-5.4.0-snapshot-1701068.jar.sha1 @@ -0,0 +1 @@ +872530eeac156faa0989eb87145bbef74a72e66f diff --git a/distribution/licenses/lucene-queryparser-5.3.0.jar.sha1 b/distribution/licenses/lucene-queryparser-5.3.0.jar.sha1 deleted file mode 100644 index f542844d20b..00000000000 --- a/distribution/licenses/lucene-queryparser-5.3.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2c5e08580316c90b56a52e3cb686e1cf69db3f9e diff --git a/distribution/licenses/lucene-queryparser-5.4.0-snapshot-1701068.jar.sha1 b/distribution/licenses/lucene-queryparser-5.4.0-snapshot-1701068.jar.sha1 new file mode 100644 index 00000000000..29c85e8917f --- /dev/null +++ b/distribution/licenses/lucene-queryparser-5.4.0-snapshot-1701068.jar.sha1 @@ -0,0 +1 @@ +6f6b6a024ca96017252efea6d2fc7dc97c69febd diff --git a/distribution/licenses/lucene-sandbox-5.3.0.jar.sha1 b/distribution/licenses/lucene-sandbox-5.3.0.jar.sha1 deleted file mode 100644 index b1bf9194e10..00000000000 --- a/distribution/licenses/lucene-sandbox-5.3.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -152da54a3b1ea6e3e8648d767616a51857b66a8e diff --git a/distribution/licenses/lucene-sandbox-5.4.0-snapshot-1701068.jar.sha1 b/distribution/licenses/lucene-sandbox-5.4.0-snapshot-1701068.jar.sha1 new file mode 100644 index 00000000000..dbc3ec8c8fe --- /dev/null +++ b/distribution/licenses/lucene-sandbox-5.4.0-snapshot-1701068.jar.sha1 @@ -0,0 +1 @@ +a6f5a5c84b165ebde104cdcde46fa9c5948650f0 diff --git a/distribution/licenses/lucene-spatial-5.3.0.jar.sha1 b/distribution/licenses/lucene-spatial-5.3.0.jar.sha1 deleted file mode 100644 index 6499667fa8e..00000000000 --- a/distribution/licenses/lucene-spatial-5.3.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -6d57880a0950416035112f4fcc725854c011b081 diff --git a/distribution/licenses/lucene-spatial-5.4.0-snapshot-1701068.jar.sha1 b/distribution/licenses/lucene-spatial-5.4.0-snapshot-1701068.jar.sha1 new file mode 100644 index 00000000000..1e2c1dc7176 --- /dev/null +++ b/distribution/licenses/lucene-spatial-5.4.0-snapshot-1701068.jar.sha1 @@ -0,0 +1 @@ +a305601f93b6cb02444816c96276a74f91ac7d40 diff --git a/distribution/licenses/lucene-spatial3d-5.3.0.jar.sha1 b/distribution/licenses/lucene-spatial3d-5.3.0.jar.sha1 deleted file mode 100644 index d1dd3219632..00000000000 --- a/distribution/licenses/lucene-spatial3d-5.3.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -23cfd7c19ead7b6fc6b2921f9c490ad3d043770d diff --git a/distribution/licenses/lucene-spatial3d-5.4.0-snapshot-1701068.jar.sha1 b/distribution/licenses/lucene-spatial3d-5.4.0-snapshot-1701068.jar.sha1 new file mode 100644 index 00000000000..ab2be14bc16 --- /dev/null +++ b/distribution/licenses/lucene-spatial3d-5.4.0-snapshot-1701068.jar.sha1 @@ -0,0 +1 @@ +ef1fcaa5b6663dd9382719a1ad40d86fc962c690 diff --git a/distribution/licenses/lucene-suggest-5.3.0.jar.sha1 b/distribution/licenses/lucene-suggest-5.3.0.jar.sha1 deleted file mode 100644 index dc59343223c..00000000000 --- a/distribution/licenses/lucene-suggest-5.3.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a155fc16a20b11205f99603950025522b173edc9 diff --git a/distribution/licenses/lucene-suggest-5.4.0-snapshot-1701068.jar.sha1 b/distribution/licenses/lucene-suggest-5.4.0-snapshot-1701068.jar.sha1 new file mode 100644 index 00000000000..dd69c53dbc1 --- /dev/null +++ b/distribution/licenses/lucene-suggest-5.4.0-snapshot-1701068.jar.sha1 @@ -0,0 +1 @@ +3698e0623f45e181d2ceead46e48a6dd8c2867dd diff --git a/docs/reference/analysis/tokenfilters.asciidoc b/docs/reference/analysis/tokenfilters.asciidoc index ba2ea71c551..ba460465e49 100644 --- a/docs/reference/analysis/tokenfilters.asciidoc +++ b/docs/reference/analysis/tokenfilters.asciidoc @@ -84,3 +84,5 @@ include::tokenfilters/keep-types-tokenfilter.asciidoc[] include::tokenfilters/classic-tokenfilter.asciidoc[] include::tokenfilters/apostrophe-tokenfilter.asciidoc[] + +include::tokenfilters/decimal-digit-tokenfilter.asciidoc[] diff --git a/docs/reference/analysis/tokenfilters/decimal-digit-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/decimal-digit-tokenfilter.asciidoc new file mode 100644 index 00000000000..8dede54d0d2 --- /dev/null +++ b/docs/reference/analysis/tokenfilters/decimal-digit-tokenfilter.asciidoc @@ -0,0 +1,4 @@ +[[analysis-decimal-digit-tokenfilter]] +=== Decimal Digit Token Filter + +The `decimal_digit` token filter folds unicode digits to `0-9` diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-5.3.0.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-5.3.0.jar.sha1 deleted file mode 100644 index 393ebc59ee0..00000000000 --- a/plugins/analysis-icu/licenses/lucene-analyzers-icu-5.3.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e6dd489db555ad84279732c5f189406d20b63c84 diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-5.4.0-snapshot-1701068.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-5.4.0-snapshot-1701068.jar.sha1 new file mode 100644 index 00000000000..c1a1ec208f5 --- /dev/null +++ b/plugins/analysis-icu/licenses/lucene-analyzers-icu-5.4.0-snapshot-1701068.jar.sha1 @@ -0,0 +1 @@ +b7f57ef60f302b30e88196d4f0d11f789c5cfabd diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-5.3.0.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-5.3.0.jar.sha1 deleted file mode 100644 index b9e01cd40fd..00000000000 --- a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-5.3.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b3e67473646e3869fcdeb4a3151ab597b957fbf2 diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-5.4.0-snapshot-1701068.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-5.4.0-snapshot-1701068.jar.sha1 new file mode 100644 index 00000000000..60ea23d0f56 --- /dev/null +++ b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-5.4.0-snapshot-1701068.jar.sha1 @@ -0,0 +1 @@ +5d1023fc3f28a42357d44d3a330ac0df1df4bf42 diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-5.3.0.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-5.3.0.jar.sha1 deleted file mode 100644 index 1008732a647..00000000000 --- a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-5.3.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -471f3ee15053413e75c5c24a978494a6d4984240 diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-5.4.0-snapshot-1701068.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-5.4.0-snapshot-1701068.jar.sha1 new file mode 100644 index 00000000000..92243aee3ee --- /dev/null +++ b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-5.4.0-snapshot-1701068.jar.sha1 @@ -0,0 +1 @@ +654c3e345ffdd74605582d1320c51c1c550a5cca diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-5.3.0.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-5.3.0.jar.sha1 deleted file mode 100644 index 34377b92824..00000000000 --- a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-5.3.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e37000b73d34ba33dda26f46893b09ba275c5294 diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-5.4.0-snapshot-1701068.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-5.4.0-snapshot-1701068.jar.sha1 new file mode 100644 index 00000000000..a9159ebb32a --- /dev/null +++ b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-5.4.0-snapshot-1701068.jar.sha1 @@ -0,0 +1 @@ +80c09e367abf2ad936c86cf74a16ae2b4e805b81 diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-5.3.0.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-5.3.0.jar.sha1 deleted file mode 100644 index 6c2857f65b1..00000000000 --- a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-5.3.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -fcc4bf8ccbda52435d13525d7cfc66cecf5c5125 diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-5.4.0-snapshot-1701068.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-5.4.0-snapshot-1701068.jar.sha1 new file mode 100644 index 00000000000..390511f227b --- /dev/null +++ b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-5.4.0-snapshot-1701068.jar.sha1 @@ -0,0 +1 @@ +7c6ae4fc7e8e1d39c155068fea67b7fabb12c444 diff --git a/pom.xml b/pom.xml index ab032e367c5..5f8690a816a 100644 --- a/pom.xml +++ b/pom.xml @@ -41,8 +41,9 @@ 1.8 - 5.3.0 - 5.3.0 + 5.4.0 + 1701068 + 5.4.0-snapshot-${lucene.snapshot.revision} 2.1.16 2.5.3 1.6.2 @@ -137,6 +138,10 @@ Sonatype OSS Snapshots https://oss.sonatype.org/content/repositories/snapshots/ + + Lucene snapshots + https://download.elasticsearch.org/lucenesnapshots/${lucene.snapshot.revision} + From 529ad7fe79f4f2de4b84e408d488d2e347241e76 Mon Sep 17 00:00:00 2001 From: Robert Muir Date: Thu, 3 Sep 2015 16:01:14 -0400 Subject: [PATCH 03/17] Remove broadcast address check. This was supposed to just help the user, in case they misconfigured something. Broadcast is an ipv4 only thing, the only way you can really detect its a broadcast address, is to look and see if an interface has that address as its broadcast address. But we cannot trust that container interfaces won't have a crazy setup... Closes #13327 --- .../common/network/NetworkService.java | 18 --------- .../common/network/NetworkServiceTests.java | 40 ------------------- 2 files changed, 58 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/common/network/NetworkService.java b/core/src/main/java/org/elasticsearch/common/network/NetworkService.java index c05e61e3969..8eff70e7bd8 100644 --- a/core/src/main/java/org/elasticsearch/common/network/NetworkService.java +++ b/core/src/main/java/org/elasticsearch/common/network/NetworkService.java @@ -27,8 +27,6 @@ import org.elasticsearch.common.unit.TimeValue; import java.io.IOException; import java.net.InetAddress; -import java.net.InterfaceAddress; -import java.net.NetworkInterface; import java.net.UnknownHostException; import java.util.List; import java.util.concurrent.CopyOnWriteArrayList; @@ -120,14 +118,6 @@ public class NetworkService extends AbstractComponent { if (address.isMulticastAddress()) { throw new IllegalArgumentException("bind address: {" + NetworkAddress.format(address) + "} is invalid: multicast address"); } - // check if its broadcast: flat out mistake - for (NetworkInterface nic : NetworkUtils.getInterfaces()) { - for (InterfaceAddress intf : nic.getInterfaceAddresses()) { - if (address.equals(intf.getBroadcast())) { - throw new IllegalArgumentException("bind address: {" + NetworkAddress.format(address) + "} is invalid: broadcast address"); - } - } - } } } return addresses; @@ -161,14 +151,6 @@ public class NetworkService extends AbstractComponent { if (address.isMulticastAddress()) { throw new IllegalArgumentException("publish address: {" + NetworkAddress.format(address) + "} is invalid: multicast address"); } - // check if its broadcast: flat out mistake - for (NetworkInterface nic : NetworkUtils.getInterfaces()) { - for (InterfaceAddress intf : nic.getInterfaceAddresses()) { - if (address.equals(intf.getBroadcast())) { - throw new IllegalArgumentException("publish address: {" + NetworkAddress.format(address) + "} is invalid: broadcast address"); - } - } - } // wildcard address, probably set by network.host if (address.isAnyLocalAddress()) { InetAddress old = address; diff --git a/core/src/test/java/org/elasticsearch/common/network/NetworkServiceTests.java b/core/src/test/java/org/elasticsearch/common/network/NetworkServiceTests.java index fcbd383c912..0a772907a8c 100644 --- a/core/src/test/java/org/elasticsearch/common/network/NetworkServiceTests.java +++ b/core/src/test/java/org/elasticsearch/common/network/NetworkServiceTests.java @@ -23,11 +23,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; import java.net.InetAddress; -import java.net.InterfaceAddress; -import java.net.NetworkInterface; -import java.util.ArrayList; -import java.util.Collections; -import java.util.List; /** * Tests for network service... try to keep them safe depending upon configuration @@ -87,41 +82,6 @@ public class NetworkServiceTests extends ESTestCase { } } - /** - * ensure exception if we bind/publish to broadcast address - */ - public void testBindPublishBroadcast() throws Exception { - NetworkService service = new NetworkService(Settings.EMPTY); - // collect any broadcast addresses on the system - List addresses = new ArrayList<>(); - for (NetworkInterface nic : Collections.list(NetworkInterface.getNetworkInterfaces())) { - for (InterfaceAddress intf : nic.getInterfaceAddresses()) { - InetAddress address = intf.getBroadcast(); - if (address != null) { - addresses.add(address); - } - } - } - // can easily happen (ipv6-only, localhost-only, ...) - assumeTrue("test requires broadcast addresses configured", addresses.size() > 0); - // make sure we fail on each one - for (InetAddress address : addresses) { - try { - service.resolveBindHostAddress(NetworkAddress.formatAddress(address)); - fail("should have hit exception for broadcast address: " + address); - } catch (IllegalArgumentException e) { - assertTrue(e.getMessage().contains("invalid: broadcast")); - } - - try { - service.resolvePublishHostAddress(NetworkAddress.formatAddress(address)); - fail("should have hit exception for broadcast address: " + address); - } catch (IllegalArgumentException e) { - assertTrue(e.getMessage().contains("invalid: broadcast")); - } - } - } - /** * ensure specifying wildcard ipv4 address will bind to all interfaces */ From 6319f383c47626b2325046a4d5545b5ea7a680af Mon Sep 17 00:00:00 2001 From: David Pilato Date: Thu, 3 Sep 2015 20:12:47 +0200 Subject: [PATCH 04/17] Remove assembly files in core Closes #13317. --- core/src/main/assemblies/common-bin.xml | 95 ------------------- core/src/packaging/common/scripts/postrm | 116 ----------------------- 2 files changed, 211 deletions(-) delete mode 100644 core/src/main/assemblies/common-bin.xml delete mode 100644 core/src/packaging/common/scripts/postrm diff --git a/core/src/main/assemblies/common-bin.xml b/core/src/main/assemblies/common-bin.xml deleted file mode 100644 index a67d5842e34..00000000000 --- a/core/src/main/assemblies/common-bin.xml +++ /dev/null @@ -1,95 +0,0 @@ - - - - - /lib - true - - org.apache.lucene:lucene* - log4j:log4j - log4j:apache-log4j-extras - net.java.dev.jna:jna - com.spatial4j:spatial4j - com.vividsolutions:jts - org.codehaus.groovy:groovy-all - com.google.guava:guava - com.carrotsearch:hppc - com.fasterxml.jackson.core:jackson-core - com.fasterxml.jackson.dataformat:jackson-dataformat-smile - com.fasterxml.jackson.dataformat:jackson-dataformat-yaml - com.fasterxml.jackson.dataformat:jackson-dataformat-cbor - joda-time:joda-time - org.joda:joda-convert - io.netty:netty - com.ning:compress-lzf - com.github.spullara.mustache.java:compiler - com.tdunning:t-digest - commons-cli:commons-cli - org.hdrhistogram:HdrHistogram - - - - /lib - false - - org.elasticsearch:elasticsearch - - - - - - config - config - - * - - - - true - bin - bin - dos - - elasticsearch.in.bat - elasticsearch.bat - plugin.bat - service.bat - - - - false - bin - bin - - *.exe - - - - true - bin - bin - 0755 - 0755 - unix - - elasticsearch.in.sh - elasticsearch - plugin - - - - - - README.textile - - - - LICENSE.txt - - - - NOTICE.txt - - - - diff --git a/core/src/packaging/common/scripts/postrm b/core/src/packaging/common/scripts/postrm deleted file mode 100644 index 021509245fb..00000000000 --- a/core/src/packaging/common/scripts/postrm +++ /dev/null @@ -1,116 +0,0 @@ -${packaging.scripts.header} - -# -# This script is executed in the post-removal phase -# -# On Debian, -# $1=remove : indicates a removal -# $1=purge : indicates an upgrade -# -# On RedHat, -# $1=1 : indicates an new install -# $1=2 : indicates an upgrade - - - -SOURCE_ENV_FILE=true -REMOVE_DIRS=false -REMOVE_SERVICE=false -REMOVE_USER_AND_GROUP=false - -case "$1" in - - # Debian #################################################### - remove) - REMOVE_DIRS=true - REMOVE_SERVICE=true - ;; - - purge) - REMOVE_USER_AND_GROUP=true - SOURCE_ENV_FILE=false - ;; - failed-upgrade|abort-install|abort-upgrade|disappear|upgrade|disappear) - ;; - - # RedHat #################################################### - 0) - REMOVE_DIRS=true - REMOVE_SERVICE=true - REMOVE_USER_AND_GROUP=true - ;; - 2) - # If $1=1 this is an upgrade - IS_UPGRADE=true - ;; - - *) - echo "post remove script called with unknown argument \`$1'" >&2 - exit 1 - ;; -esac - -# Sets the default values for elasticsearch variables used in this script -ES_USER="${packaging.elasticsearch.user}" -ES_GROUP="${packaging.elasticsearch.group}" -LOG_DIR="${packaging.elasticsearch.log.dir}" -PLUGINS_DIR="${packaging.elasticsearch.plugins.dir}" -PID_DIR="${packaging.elasticsearch.pid.dir}" -DATA_DIR="${packaging.elasticsearch.data.dir}" - -# Source the default env file -if [ "$SOURCE_ENV_FILE" = "true" ]; then - ES_ENV_FILE="${packaging.env.file}" - if [ -f "$ES_ENV_FILE" ]; then - . "$ES_ENV_FILE" - fi -fi - -if [ "$REMOVE_SERVICE" = "true" ]; then - if command -v systemctl >/dev/null; then - systemctl --no-reload disable elasticsearch.service > /dev/null 2>&1 || true - fi - - if command -v chkconfig >/dev/null; then - chkconfig --del elasticsearch 2> /dev/null || true - fi - - if command -v update-rc.d >/dev/null; then - update-rc.d elasticsearch remove >/dev/null || true - fi -fi - -if [ "$REMOVE_DIRS" = "true" ]; then - - if [ -d "$LOG_DIR" ]; then - echo -n "Deleting log directory..." - rm -rf "$LOG_DIR" && echo " OK" || echo " ERROR: unable to delete directory [$LOG_DIR]" - fi - - if [ -d "$PLUGINS_DIR" ]; then - echo -n "Deleting plugins directory..." - rm -rf "$PLUGINS_DIR" && echo " OK" || echo " ERROR: unable to delete directory [$PLUGINS_DIR]" - fi - - if [ -d "$PID_DIR" ]; then - echo -n "Deleting PID directory..." - rm -rf "$PID_DIR" && echo " OK" || echo " ERROR: unable to delete directory [$PID_DIR]" - fi - - # Delete the data directory if and only if empty - if [ -d "$DATA_DIR" ]; then - rmdir --ignore-fail-on-non-empty "$DATA_DIR" && echo " OK" || echo " ERROR: unable to delete directory [$DATA_DIR]" - fi -fi - -if [ "$REMOVE_USER_AND_GROUP" = "true" ]; then - if id "$ES_USER" > /dev/null 2>&1 ; then - userdel "$ES_USER" - fi - - if getent group "$ES_GROUP" > /dev/null 2>&1 ; then - groupdel "$ES_GROUP" - fi -fi - -${packaging.scripts.footer} From b2c584b21d53d0ef6726845418ac53264ca77fcc Mon Sep 17 00:00:00 2001 From: David Pilato Date: Thu, 3 Sep 2015 20:16:37 +0200 Subject: [PATCH 05/17] Remove jmeter files Follow up for #13317. --- .../src/test/resources/jmeter/index-count.jmx | 240 ------------------ core/src/test/resources/jmeter/index-get.jmx | 211 --------------- .../test/resources/jmeter/index-search.jmx | 240 ------------------ core/src/test/resources/jmeter/index.jmx | 210 --------------- .../src/test/resources/jmeter/ping-single.jmx | 210 --------------- 5 files changed, 1111 deletions(-) delete mode 100644 core/src/test/resources/jmeter/index-count.jmx delete mode 100644 core/src/test/resources/jmeter/index-get.jmx delete mode 100644 core/src/test/resources/jmeter/index-search.jmx delete mode 100644 core/src/test/resources/jmeter/index.jmx delete mode 100644 core/src/test/resources/jmeter/ping-single.jmx diff --git a/core/src/test/resources/jmeter/index-count.jmx b/core/src/test/resources/jmeter/index-count.jmx deleted file mode 100644 index 09a563f3ecd..00000000000 --- a/core/src/test/resources/jmeter/index-count.jmx +++ /dev/null @@ -1,240 +0,0 @@ - - - - - - false - false - - - - - - - - - - host - localhost - = - - - numberOfThreads - 20 - = - - - numberOfLoops - 10000 - = - - - - - - - false - ${numberOfLoops} - - ${numberOfThreads} - 0 - 1260471148000 - 1260471148000 - false - continue - - - - - - 10000 - 0 - - true - - personId - - - - - personAge - - 1 - 50 - - true - - - - - - - ${host} - 9200 - - - - - - - - - - - - false - { name : "person${personId}", age : ${personAge} } - = - true - - - - - - - - - - /test/person/${personId} - PUT - false - false - true - false - - - - false - - - - - - - - false - { name : "person${personId}", age : ${personAge} } - = - true - - - - - - - - - - /test/person/${personId} - PUT - false - false - true - false - - - - false - - - - - - - - false - { term : { age : ${personAge} } } - = - true - - - - - - - - - - /test/person/_count - POST - false - false - true - false - - - - false - - - - - - false - - saveConfig - - - true - true - true - - true - true - true - true - false - true - true - false - false - true - false - false - false - false - false - 0 - true - - - - - - - - - false - - saveConfig - - - true - true - true - - true - true - true - true - false - true - true - false - false - true - false - false - false - false - false - 0 - true - - - - - - - - diff --git a/core/src/test/resources/jmeter/index-get.jmx b/core/src/test/resources/jmeter/index-get.jmx deleted file mode 100644 index c8d7914f60b..00000000000 --- a/core/src/test/resources/jmeter/index-get.jmx +++ /dev/null @@ -1,211 +0,0 @@ - - - - - - false - false - - - - - - - - - - host - localhost - = - - - numberOfThreads - 20 - = - - - numberOfLoops - 10000 - = - - - - - - - false - ${numberOfLoops} - - ${numberOfThreads} - 0 - 1260471148000 - 1260471148000 - false - continue - - - - - - 10000 - 0 - - true - - personId - - - - - personAge - - 1 - 50 - - true - - - - - - - ${host} - 9200 - - - - - - - - - - - - false - { name : "person${personId}", age : ${personAge} } - = - true - - - - - - - - - - /test/person/${personId} - PUT - false - false - true - false - - - - false - - - - - - - - - - - - - - /test/person/${personId} - GET - false - false - true - false - - - - false - - - - - - Assertion.response_code - false - 2 - - - - - - false - - saveConfig - - - true - true - true - - true - true - true - true - false - true - true - false - false - true - false - false - false - false - false - 0 - true - - - - - - - - - false - - saveConfig - - - true - true - true - - true - true - true - true - false - true - true - false - false - true - false - false - false - false - false - 0 - true - - - - - - - - diff --git a/core/src/test/resources/jmeter/index-search.jmx b/core/src/test/resources/jmeter/index-search.jmx deleted file mode 100644 index dc7428545c6..00000000000 --- a/core/src/test/resources/jmeter/index-search.jmx +++ /dev/null @@ -1,240 +0,0 @@ - - - - - - false - false - - - - - - - - - - host - localhost - = - - - numberOfThreads - 20 - = - - - numberOfLoops - 10000 - = - - - - - - - false - ${numberOfLoops} - - ${numberOfThreads} - 0 - 1260471148000 - 1260471148000 - false - continue - - - - - - 10000 - 0 - - true - - personId - - - - - personAge - - 1 - 50 - - true - - - - - - - ${host} - 9200 - - - - - - - - - - - - false - { name : "person${personId}", age : ${personAge} } - = - true - - - - - - - - - - /test/person/${personId} - PUT - false - false - true - false - - - - false - - - - - - - - false - { name : "person${personId}", age : ${personAge} } - = - true - - - - - - - - - - /test/person/${personId} - PUT - false - false - true - false - - - - false - - - - - - - - false - { query : { term : { age : ${personAge} } } } - = - true - - - - - - - - - - /test/person/_search - POST - false - false - true - false - - - - false - - - - - - false - - saveConfig - - - true - true - true - - true - true - true - true - false - true - true - false - false - true - false - false - false - false - false - 0 - true - - - - - - - - - false - - saveConfig - - - true - true - true - - true - true - true - true - false - true - true - false - false - true - false - false - false - false - false - 0 - true - - - - - - - - diff --git a/core/src/test/resources/jmeter/index.jmx b/core/src/test/resources/jmeter/index.jmx deleted file mode 100644 index 64a6849239c..00000000000 --- a/core/src/test/resources/jmeter/index.jmx +++ /dev/null @@ -1,210 +0,0 @@ - - - - - - false - false - - - - - - - - - - host - localhost - = - - - numberOfThreads - 20 - = - - - numberOfLoops - 10000 - = - - - - - - - false - ${numberOfLoops} - - ${numberOfThreads} - 0 - 1260471148000 - 1260471148000 - false - continue - - - - - - 10000 - 0 - - true - - personId - - - - - personAge - - 1 - 50 - - true - - - - - - - ${host} - 9200 - - - - - - - - - - - - false - { name : "person${personId}", age : ${personAge} } - = - true - - - - - - - - - - /test/person/${personId} - PUT - false - false - true - false - - - - false - - - - - - - - false - { name : "person${personId}", age : ${personAge} } - = - true - - - - - - - - - - /test/person/${personId} - PUT - false - false - true - false - - - - false - - - - - - false - - saveConfig - - - true - true - true - - true - true - true - true - false - true - true - false - false - true - false - false - false - false - false - 0 - true - - - - - - - - - false - - saveConfig - - - true - true - true - - true - true - true - true - false - true - true - false - false - true - false - false - false - false - false - 0 - true - - - - - - - - diff --git a/core/src/test/resources/jmeter/ping-single.jmx b/core/src/test/resources/jmeter/ping-single.jmx deleted file mode 100644 index 64a6849239c..00000000000 --- a/core/src/test/resources/jmeter/ping-single.jmx +++ /dev/null @@ -1,210 +0,0 @@ - - - - - - false - false - - - - - - - - - - host - localhost - = - - - numberOfThreads - 20 - = - - - numberOfLoops - 10000 - = - - - - - - - false - ${numberOfLoops} - - ${numberOfThreads} - 0 - 1260471148000 - 1260471148000 - false - continue - - - - - - 10000 - 0 - - true - - personId - - - - - personAge - - 1 - 50 - - true - - - - - - - ${host} - 9200 - - - - - - - - - - - - false - { name : "person${personId}", age : ${personAge} } - = - true - - - - - - - - - - /test/person/${personId} - PUT - false - false - true - false - - - - false - - - - - - - - false - { name : "person${personId}", age : ${personAge} } - = - true - - - - - - - - - - /test/person/${personId} - PUT - false - false - true - false - - - - false - - - - - - false - - saveConfig - - - true - true - true - - true - true - true - true - false - true - true - false - false - true - false - false - false - false - false - 0 - true - - - - - - - - - false - - saveConfig - - - true - true - true - - true - true - true - true - false - true - true - false - false - true - false - false - false - false - false - 0 - true - - - - - - - - From 17460ae92d432948e79d3e880c082b6990170e39 Mon Sep 17 00:00:00 2001 From: Nicholas Knize Date: Wed, 2 Sep 2015 14:03:39 -0500 Subject: [PATCH 06/17] Refactor ignore_malformed and coerce from geo_point field type to mapper This commit moves ignore_malformed and coerce options from the GeoPointFieldType to the Builder in GeoPointFieldMapper. This makes these options consistent with other types in 2.0. --- .../index/mapper/geo/GeoPointFieldMapper.java | 204 ++++++++++-------- .../mapper/geo/GeoPointFieldMapperTests.java | 8 +- .../mapper/geo/GeoPointFieldTypeTests.java | 14 -- 3 files changed, 119 insertions(+), 107 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java index c958998fcf6..08974cbe29c 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapper.java @@ -27,6 +27,7 @@ import org.apache.lucene.index.IndexOptions; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.NumericUtils; import org.elasticsearch.Version; +import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Strings; import org.elasticsearch.common.geo.GeoDistance; import org.elasticsearch.common.geo.GeoHashUtils; @@ -43,6 +44,8 @@ import org.elasticsearch.index.mapper.FieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperParsingException; +import org.elasticsearch.index.mapper.MergeMappingException; +import org.elasticsearch.index.mapper.MergeResult; import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.core.DoubleFieldMapper; import org.elasticsearch.index.mapper.core.NumberFieldMapper; @@ -96,8 +99,8 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper public static final boolean ENABLE_GEOHASH_PREFIX = false; public static final int GEO_HASH_PRECISION = GeoHashUtils.PRECISION; - public static final boolean IGNORE_MALFORMED = false; - public static final boolean COERCE = false; + public static final Explicit IGNORE_MALFORMED = new Explicit(false, false); + public static final Explicit COERCE = new Explicit(false, false); public static final MappedFieldType FIELD_TYPE = new GeoPointFieldType(); @@ -123,11 +126,45 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper private int geoHashPrecision = Defaults.GEO_HASH_PRECISION; + private Boolean ignoreMalformed; + + private Boolean coerce; + public Builder(String name) { super(name, Defaults.FIELD_TYPE); this.builder = this; } + public Builder ignoreMalformed(boolean ignoreMalformed) { + this.ignoreMalformed = ignoreMalformed; + return builder; + } + + protected Explicit ignoreMalformed(BuilderContext context) { + if (ignoreMalformed != null) { + return new Explicit<>(ignoreMalformed, true); + } + if (context.indexSettings() != null) { + return new Explicit<>(context.indexSettings().getAsBoolean("index.mapping.ignore_malformed", Defaults.IGNORE_MALFORMED.value()), false); + } + return Defaults.IGNORE_MALFORMED; + } + + public Builder coerce(boolean coerce) { + this.coerce = coerce; + return builder; + } + + protected Explicit coerce(BuilderContext context) { + if (coerce != null) { + return new Explicit<>(coerce, true); + } + if (context.indexSettings() != null) { + return new Explicit<>(context.indexSettings().getAsBoolean("index.mapping.coerce", Defaults.COERCE.value()), false); + } + return Defaults.COERCE; + } + @Override public GeoPointFieldType fieldType() { return (GeoPointFieldType)fieldType; @@ -208,7 +245,7 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper fieldType.setHasDocValues(false); defaultFieldType.setHasDocValues(false); return new GeoPointFieldMapper(name, fieldType, defaultFieldType, context.indexSettings(), origPathType, - latMapper, lonMapper, geohashMapper, multiFieldsBuilder.build(this, context)); + latMapper, lonMapper, geohashMapper, multiFieldsBuilder.build(this, context), ignoreMalformed(context), coerce(context)); } } @@ -220,71 +257,58 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper parseField(builder, name, node, parserContext); for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { Map.Entry entry = iterator.next(); - String fieldName = Strings.toUnderscoreCase(entry.getKey()); - Object fieldNode = entry.getValue(); - if (fieldName.equals("path") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) { - builder.multiFieldPathType(parsePathType(name, fieldNode.toString())); + String propName = Strings.toUnderscoreCase(entry.getKey()); + Object propNode = entry.getValue(); + if (propName.equals("path") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) { + builder.multiFieldPathType(parsePathType(name, propNode.toString())); iterator.remove(); - } else if (fieldName.equals("lat_lon")) { - builder.enableLatLon(XContentMapValues.nodeBooleanValue(fieldNode)); + } else if (propName.equals("lat_lon")) { + builder.enableLatLon(XContentMapValues.nodeBooleanValue(propNode)); iterator.remove(); - } else if (fieldName.equals("geohash")) { - builder.enableGeoHash(XContentMapValues.nodeBooleanValue(fieldNode)); + } else if (propName.equals("geohash")) { + builder.enableGeoHash(XContentMapValues.nodeBooleanValue(propNode)); iterator.remove(); - } else if (fieldName.equals("geohash_prefix")) { - builder.geohashPrefix(XContentMapValues.nodeBooleanValue(fieldNode)); - if (XContentMapValues.nodeBooleanValue(fieldNode)) { + } else if (propName.equals("geohash_prefix")) { + builder.geohashPrefix(XContentMapValues.nodeBooleanValue(propNode)); + if (XContentMapValues.nodeBooleanValue(propNode)) { builder.enableGeoHash(true); } iterator.remove(); - } else if (fieldName.equals("precision_step")) { - builder.precisionStep(XContentMapValues.nodeIntegerValue(fieldNode)); + } else if (propName.equals("precision_step")) { + builder.precisionStep(XContentMapValues.nodeIntegerValue(propNode)); iterator.remove(); - } else if (fieldName.equals("geohash_precision")) { - if (fieldNode instanceof Integer) { - builder.geoHashPrecision(XContentMapValues.nodeIntegerValue(fieldNode)); + } else if (propName.equals("geohash_precision")) { + if (propNode instanceof Integer) { + builder.geoHashPrecision(XContentMapValues.nodeIntegerValue(propNode)); } else { - builder.geoHashPrecision(GeoUtils.geoHashLevelsForPrecision(fieldNode.toString())); + builder.geoHashPrecision(GeoUtils.geoHashLevelsForPrecision(propNode.toString())); } iterator.remove(); - } else if (fieldName.equals(Names.IGNORE_MALFORMED)) { - if (builder.fieldType().coerce == false) { - builder.fieldType().ignoreMalformed = XContentMapValues.nodeBooleanValue(fieldNode); - } + } else if (propName.equals(Names.IGNORE_MALFORMED)) { + builder.ignoreMalformed(XContentMapValues.nodeBooleanValue(propNode)); iterator.remove(); - } else if (indexCreatedBeforeV2_0 && fieldName.equals("validate")) { - if (builder.fieldType().ignoreMalformed == false) { - builder.fieldType().ignoreMalformed = !XContentMapValues.nodeBooleanValue(fieldNode); - } - iterator.remove(); - } else if (indexCreatedBeforeV2_0 && fieldName.equals("validate_lon")) { - if (builder.fieldType().ignoreMalformed() == false) { - builder.fieldType().ignoreMalformed = !XContentMapValues.nodeBooleanValue(fieldNode); - } + } else if (indexCreatedBeforeV2_0 && propName.equals("validate")) { + builder.ignoreMalformed(!XContentMapValues.nodeBooleanValue(propNode)); iterator.remove(); - } else if (indexCreatedBeforeV2_0 && fieldName.equals("validate_lat")) { - if (builder.fieldType().ignoreMalformed == false) { - builder.fieldType().ignoreMalformed = !XContentMapValues.nodeBooleanValue(fieldNode); - } + } else if (indexCreatedBeforeV2_0 && propName.equals("validate_lon")) { + builder.ignoreMalformed(!XContentMapValues.nodeBooleanValue(propNode)); iterator.remove(); - } else if (fieldName.equals(Names.COERCE)) { - builder.fieldType().coerce = XContentMapValues.nodeBooleanValue(fieldNode); - if (builder.fieldType().coerce == true) { - builder.fieldType().ignoreMalformed = true; - } + } else if (indexCreatedBeforeV2_0 && propName.equals("validate_lat")) { + builder.ignoreMalformed(!XContentMapValues.nodeBooleanValue(propNode)); iterator.remove(); - } else if (indexCreatedBeforeV2_0 && fieldName.equals("normalize")) { - builder.fieldType().coerce = XContentMapValues.nodeBooleanValue(fieldNode); + } else if (propName.equals(Names.COERCE)) { + builder.coerce(XContentMapValues.nodeBooleanValue(propNode)); iterator.remove(); - } else if (indexCreatedBeforeV2_0 && fieldName.equals("normalize_lat")) { - builder.fieldType().coerce = XContentMapValues.nodeBooleanValue(fieldNode); + } else if (indexCreatedBeforeV2_0 && propName.equals("normalize")) { + builder.coerce(XContentMapValues.nodeBooleanValue(propNode)); iterator.remove(); - } else if (indexCreatedBeforeV2_0 && fieldName.equals("normalize_lon")) { - if (builder.fieldType().coerce == false) { - builder.fieldType().coerce = XContentMapValues.nodeBooleanValue(fieldNode); - } + } else if (indexCreatedBeforeV2_0 && propName.equals("normalize_lat")) { + builder.coerce(XContentMapValues.nodeBooleanValue(propNode)); iterator.remove(); - } else if (parseMultiField(builder, name, parserContext, fieldName, fieldNode)) { + } else if (indexCreatedBeforeV2_0 && propName.equals("normalize_lon")) { + builder.coerce(XContentMapValues.nodeBooleanValue(propNode)); + iterator.remove(); + } else if (parseMultiField(builder, name, parserContext, propName, propNode)) { iterator.remove(); } } @@ -300,8 +324,6 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper private MappedFieldType latFieldType; private MappedFieldType lonFieldType; - private boolean ignoreMalformed = false; - private boolean coerce = false; public GeoPointFieldType() {} @@ -312,8 +334,6 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper this.geohashPrefixEnabled = ref.geohashPrefixEnabled; this.latFieldType = ref.latFieldType; // copying ref is ok, this can never be modified this.lonFieldType = ref.lonFieldType; // copying ref is ok, this can never be modified - this.coerce = ref.coerce; - this.ignoreMalformed = ref.ignoreMalformed; } @Override @@ -327,8 +347,6 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper GeoPointFieldType that = (GeoPointFieldType) o; return geohashPrecision == that.geohashPrecision && geohashPrefixEnabled == that.geohashPrefixEnabled && - coerce == that.coerce && - ignoreMalformed == that.ignoreMalformed && java.util.Objects.equals(geohashFieldType, that.geohashFieldType) && java.util.Objects.equals(latFieldType, that.latFieldType) && java.util.Objects.equals(lonFieldType, that.lonFieldType); @@ -337,7 +355,7 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper @Override public int hashCode() { return java.util.Objects.hash(super.hashCode(), geohashFieldType, geohashPrecision, geohashPrefixEnabled, latFieldType, - lonFieldType, coerce, ignoreMalformed); + lonFieldType); } @Override @@ -365,12 +383,6 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper latFieldType().numericPrecisionStep() != other.latFieldType().numericPrecisionStep()) { conflicts.add("mapper [" + names().fullName() + "] has different [precision_step]"); } - if (ignoreMalformed() != other.ignoreMalformed()) { - conflicts.add("mapper [" + names().fullName() + "] has different [ignore_malformed]"); - } - if (coerce() != other.coerce()) { - conflicts.add("mapper [" + names().fullName() + "] has different [coerce]"); - } } public boolean isGeohashEnabled() { @@ -414,24 +426,6 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper this.lonFieldType = lonFieldType; } - public boolean coerce() { - return this.coerce; - } - - public void setCoerce(boolean coerce) { - checkIfFrozen(); - this.coerce = coerce; - } - - public boolean ignoreMalformed() { - return this.ignoreMalformed; - } - - public void setIgnoreMalformed(boolean ignoreMalformed) { - checkIfFrozen(); - this.ignoreMalformed = ignoreMalformed; - } - @Override public GeoPoint value(Object value) { if (value instanceof GeoPoint) { @@ -575,14 +569,20 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper private final StringFieldMapper geohashMapper; + protected Explicit ignoreMalformed; + + protected Explicit coerce; + public GeoPointFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Settings indexSettings, ContentPath.Type pathType, DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper, StringFieldMapper geohashMapper, - MultiFields multiFields) { + MultiFields multiFields, Explicit ignoreMalformed, Explicit coerce) { super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, null); this.pathType = pathType; this.latMapper = latMapper; this.lonMapper = lonMapper; this.geohashMapper = geohashMapper; + this.ignoreMalformed = ignoreMalformed; + this.coerce = coerce; } @Override @@ -595,6 +595,30 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper return (GeoPointFieldType) super.fieldType(); } + @Override + public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException { + super.merge(mergeWith, mergeResult); + if (!this.getClass().equals(mergeWith.getClass())) { + return; + } + + GeoPointFieldMapper gpfmMergeWith = (GeoPointFieldMapper) mergeWith; + if (gpfmMergeWith.coerce.explicit()) { + if (coerce.explicit() && coerce.value() != gpfmMergeWith.coerce.value()) { + mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different [coerce]"); + } + } + + if (mergeResult.simulate() == false && mergeResult.hasConflicts() == false) { + if (gpfmMergeWith.ignoreMalformed.explicit()) { + this.ignoreMalformed = gpfmMergeWith.ignoreMalformed; + } + if (gpfmMergeWith.coerce.explicit()) { + this.coerce = gpfmMergeWith.coerce; + } + } + } + @Override protected void parseCreateField(ParseContext context, List fields) throws IOException { throw new UnsupportedOperationException("Parsing is implemented in parse(), this method should NEVER be called"); @@ -671,16 +695,18 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper } private void parse(ParseContext context, GeoPoint point, String geohash) throws IOException { - if (fieldType().ignoreMalformed == false) { + boolean validPoint = false; + if (coerce.value() == false && ignoreMalformed.value() == false) { if (point.lat() > 90.0 || point.lat() < -90.0) { throw new IllegalArgumentException("illegal latitude value [" + point.lat() + "] for " + name()); } if (point.lon() > 180.0 || point.lon() < -180) { throw new IllegalArgumentException("illegal longitude value [" + point.lon() + "] for " + name()); } + validPoint = true; } - if (fieldType().coerce) { + if (coerce.value() == true && validPoint == false) { // by setting coerce to false we are assuming all geopoints are already in a valid coordinate system // thus this extra step can be skipped // LUCENE WATCH: This will be folded back into Lucene's GeoPointField @@ -747,11 +773,11 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper if (fieldType().isLatLonEnabled() && (includeDefaults || fieldType().latFieldType().numericPrecisionStep() != NumericUtils.PRECISION_STEP_DEFAULT)) { builder.field("precision_step", fieldType().latFieldType().numericPrecisionStep()); } - if (includeDefaults || fieldType().coerce != Defaults.COERCE) { - builder.field(Names.COERCE, fieldType().coerce); + if (includeDefaults || coerce.explicit()) { + builder.field(Names.COERCE, coerce.value()); } - if (includeDefaults || fieldType().ignoreMalformed != Defaults.IGNORE_MALFORMED) { - builder.field(Names.IGNORE_MALFORMED, fieldType().ignoreMalformed); + if (includeDefaults || ignoreMalformed.explicit()) { + builder.field(Names.IGNORE_MALFORMED, ignoreMalformed.value()); } } diff --git a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java index 4d61ecf397e..da2151fb279 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldMapperTests.java @@ -614,13 +614,13 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { public void testGeoPointMapperMerge() throws Exception { String stage1Mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true) - .field("ignore_malformed", true).endObject().endObject() + .field("coerce", true).endObject().endObject() .endObject().endObject().string(); DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); DocumentMapper stage1 = parser.parse(stage1Mapping); String stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", false).field("geohash", true) - .field("ignore_malformed", false).endObject().endObject() + .field("coerce", false).endObject().endObject() .endObject().endObject().string(); DocumentMapper stage2 = parser.parse(stage2Mapping); @@ -629,12 +629,12 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase { assertThat(mergeResult.buildConflicts().length, equalTo(2)); // todo better way of checking conflict? assertThat("mapper [point] has different [lat_lon]", isIn(new ArrayList<>(Arrays.asList(mergeResult.buildConflicts())))); - assertThat("mapper [point] has different [ignore_malformed]", isIn(new ArrayList<>(Arrays.asList(mergeResult.buildConflicts())))); + assertThat("mapper [point] has different [coerce]", isIn(new ArrayList<>(Arrays.asList(mergeResult.buildConflicts())))); // correct mapping and ensure no failures stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true) - .field("ignore_malformed", true).endObject().endObject() + .field("coerce", true).endObject().endObject() .endObject().endObject().string(); stage2 = parser.parse(stage2Mapping); mergeResult = stage1.merge(stage2.mapping(), false, false); diff --git a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldTypeTests.java b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldTypeTests.java index b6ebeb90acf..e3b18831bba 100644 --- a/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldTypeTests.java +++ b/core/src/test/java/org/elasticsearch/index/mapper/geo/GeoPointFieldTypeTests.java @@ -44,19 +44,5 @@ public class GeoPointFieldTypeTests extends FieldTypeTestCase { ((GeoPointFieldMapper.GeoPointFieldType)ft).setLatLonEnabled(new DoubleFieldMapper.DoubleFieldType(), new DoubleFieldMapper.DoubleFieldType()); } }); - addModifier(new Modifier("ignore_malformed", false, true) { - @Override - public void modify(MappedFieldType ft) { - GeoPointFieldMapper.GeoPointFieldType gft = (GeoPointFieldMapper.GeoPointFieldType)ft; - gft.setIgnoreMalformed(!gft.ignoreMalformed()); - } - }); - addModifier(new Modifier("coerce", false, true) { - @Override - public void modify(MappedFieldType ft) { - GeoPointFieldMapper.GeoPointFieldType gft = (GeoPointFieldMapper.GeoPointFieldType)ft; - gft.setCoerce(!gft.coerce()); - } - }); } } From 1016734b4c2950caf9ba6ef81d103787d1b1fc91 Mon Sep 17 00:00:00 2001 From: Zachary Tong Date: Fri, 28 Aug 2015 12:23:19 -0400 Subject: [PATCH 07/17] Aggregations: Add percentiles_bucket pipeline aggregations This pipeline will calculate percentiles over a set of sibling buckets. This is an exact implementation, meaning it needs to cache a copy of the series in memory and sort it to determine the percentiles. This comes with a few limitations: to prevent serializing data around, only the requested percentiles are calculated (unlike the TDigest version, which allows the java API to ask for any percentile). It also needs to store the data in-memory, resulting in some overhead if the requested series is very large. --- .../elasticsearch/search/SearchModule.java | 7 +- .../pipeline/PipelineAggregatorBuilders.java | 5 + .../bucketmetrics/BucketMetricsBuilder.java | 2 +- .../bucketmetrics/BucketMetricsParser.java | 3 +- .../percentile/InternalPercentilesBucket.java | 163 +++++ .../percentile/PercentilesBucket.java | 25 + .../percentile/PercentilesBucketBuilder.java | 49 ++ .../percentile/PercentilesBucketParser.java | 67 ++ .../PercentilesBucketPipelineAggregator.java | 155 +++++ .../pipeline/PercentilesBucketIT.java | 625 ++++++++++++++++++ docs/reference/aggregations/pipeline.asciidoc | 1 + .../percentiles-bucket-aggregation.asciidoc | 121 ++++ 12 files changed, 1218 insertions(+), 5 deletions(-) create mode 100644 core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/InternalPercentilesBucket.java create mode 100644 core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/PercentilesBucket.java create mode 100644 core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/PercentilesBucketBuilder.java create mode 100644 core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/PercentilesBucketParser.java create mode 100644 core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/PercentilesBucketPipelineAggregator.java create mode 100644 core/src/test/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketIT.java create mode 100644 docs/reference/aggregations/pipeline/percentiles-bucket-aggregation.asciidoc diff --git a/core/src/main/java/org/elasticsearch/search/SearchModule.java b/core/src/main/java/org/elasticsearch/search/SearchModule.java index 49acd1163fc..b78fe06f8e9 100644 --- a/core/src/main/java/org/elasticsearch/search/SearchModule.java +++ b/core/src/main/java/org/elasticsearch/search/SearchModule.java @@ -19,7 +19,6 @@ package org.elasticsearch.search; -import org.elasticsearch.common.Classes; import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.multibindings.Multibinder; import org.elasticsearch.common.settings.Settings; @@ -110,6 +109,8 @@ import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.max.MaxBucke import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.max.MaxBucketPipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.min.MinBucketParser; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.min.MinBucketPipelineAggregator; +import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile.PercentilesBucketParser; +import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile.PercentilesBucketPipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.sum.SumBucketParser; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.sum.SumBucketPipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.bucketscript.BucketScriptParser; @@ -143,8 +144,6 @@ import org.elasticsearch.search.highlight.HighlightPhase; import org.elasticsearch.search.highlight.Highlighter; import org.elasticsearch.search.highlight.Highlighters; import org.elasticsearch.search.query.QueryPhase; -import org.elasticsearch.search.suggest.SuggestParseElement; -import org.elasticsearch.search.suggest.SuggestPhase; import org.elasticsearch.search.suggest.Suggester; import org.elasticsearch.search.suggest.Suggesters; @@ -301,6 +300,7 @@ public class SearchModule extends AbstractModule { multibinderPipelineAggParser.addBinding().to(MinBucketParser.class); multibinderPipelineAggParser.addBinding().to(AvgBucketParser.class); multibinderPipelineAggParser.addBinding().to(SumBucketParser.class); + multibinderPipelineAggParser.addBinding().to(PercentilesBucketParser.class); multibinderPipelineAggParser.addBinding().to(MovAvgParser.class); multibinderPipelineAggParser.addBinding().to(CumulativeSumParser.class); multibinderPipelineAggParser.addBinding().to(BucketScriptParser.class); @@ -393,6 +393,7 @@ public class SearchModule extends AbstractModule { MinBucketPipelineAggregator.registerStreams(); AvgBucketPipelineAggregator.registerStreams(); SumBucketPipelineAggregator.registerStreams(); + PercentilesBucketPipelineAggregator.registerStreams(); MovAvgPipelineAggregator.registerStreams(); CumulativeSumPipelineAggregator.registerStreams(); BucketScriptPipelineAggregator.registerStreams(); diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregatorBuilders.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregatorBuilders.java index 7fd1fe03308..96df702072d 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregatorBuilders.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/PipelineAggregatorBuilders.java @@ -22,6 +22,7 @@ package org.elasticsearch.search.aggregations.pipeline; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.avg.AvgBucketBuilder; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.max.MaxBucketBuilder; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.min.MinBucketBuilder; +import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile.PercentilesBucketBuilder; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.sum.SumBucketBuilder; import org.elasticsearch.search.aggregations.pipeline.cumulativesum.CumulativeSumBuilder; import org.elasticsearch.search.aggregations.pipeline.bucketscript.BucketScriptBuilder; @@ -55,6 +56,10 @@ public final class PipelineAggregatorBuilders { return new SumBucketBuilder(name); } + public static final PercentilesBucketBuilder percentilesBucket(String name) { + return new PercentilesBucketBuilder(name); + } + public static final MovAvgBuilder movingAvg(String name) { return new MovAvgBuilder(name); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/BucketMetricsBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/BucketMetricsBuilder.java index 755655755e7..1e5dd46eca6 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/BucketMetricsBuilder.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/BucketMetricsBuilder.java @@ -61,7 +61,7 @@ public abstract class BucketMetricsBuilder> ex return builder; } - protected void doInternalXContent(XContentBuilder builder, Params params) { + protected void doInternalXContent(XContentBuilder builder, Params params) throws IOException { } } \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/BucketMetricsParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/BucketMetricsParser.java index 533b6996cda..80b4c981d12 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/BucketMetricsParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/BucketMetricsParser.java @@ -105,7 +105,8 @@ public abstract class BucketMetricsParser implements PipelineAggregator.Parser { protected abstract PipelineAggregatorFactory buildFactory(String pipelineAggregatorName, String[] bucketsPaths, GapPolicy gapPolicy, ValueFormatter formatter); - protected boolean doParse(String pipelineAggregatorName, String currentFieldName, Token token, XContentParser parser, SearchContext context) { + protected boolean doParse(String pipelineAggregatorName, String currentFieldName, Token token, + XContentParser parser, SearchContext context) throws IOException { return false; } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/InternalPercentilesBucket.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/InternalPercentilesBucket.java new file mode 100644 index 00000000000..10b1481e8d6 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/InternalPercentilesBucket.java @@ -0,0 +1,163 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile; + +import com.google.common.collect.UnmodifiableIterator; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.search.aggregations.AggregationStreams; +import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation; +import org.elasticsearch.search.aggregations.metrics.max.InternalMax; +import org.elasticsearch.search.aggregations.metrics.percentiles.InternalPercentile; +import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile; +import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; +import org.elasticsearch.search.aggregations.support.format.ValueFormatter; +import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Iterator; +import java.util.List; +import java.util.Map; + +public class InternalPercentilesBucket extends InternalNumericMetricsAggregation.MultiValue implements PercentilesBucket { + + public final static Type TYPE = new Type("percentiles_bucket"); + + public final static AggregationStreams.Stream STREAM = new AggregationStreams.Stream() { + @Override + public InternalPercentilesBucket readResult(StreamInput in) throws IOException { + InternalPercentilesBucket result = new InternalPercentilesBucket(); + result.readFrom(in); + return result; + } + }; + + public static void registerStreams() { + AggregationStreams.registerStream(STREAM, TYPE.stream()); + } + + private double[] percentiles; + private double[] percents; + + protected InternalPercentilesBucket() { + } // for serialization + + public InternalPercentilesBucket(String name, double[] percents, double[] percentiles, + ValueFormatter formatter, List pipelineAggregators, + Map metaData) { + super(name, pipelineAggregators, metaData); + this.valueFormatter = formatter; + this.percentiles = percentiles; + this.percents = percents; + } + + @Override + public double percentile(double percent) throws IllegalArgumentException { + int index = Arrays.binarySearch(percents, percent); + if (index < 0) { + throw new IllegalArgumentException("Percent requested [" + String.valueOf(percent) + "] was not" + + " one of the computed percentiles. Available keys are: " + Arrays.toString(percents)); + } + return percentiles[index]; + } + + @Override + public String percentileAsString(double percent) { + return valueFormatter.format(percentile(percent)); + } + + @Override + public Iterator iterator() { + return new Iter(percents, percentiles); + } + + @Override + public double value(String name) { + return percentile(Double.parseDouble(name)); + } + + @Override + public Type type() { + return TYPE; + } + + @Override + public InternalMax doReduce(List aggregations, ReduceContext reduceContext) { + throw new UnsupportedOperationException("Not supported"); + } + + @Override + protected void doReadFrom(StreamInput in) throws IOException { + valueFormatter = ValueFormatterStreams.readOptional(in); + percentiles = in.readDoubleArray(); + percents = in.readDoubleArray(); + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + ValueFormatterStreams.writeOptional(valueFormatter, out); + out.writeDoubleArray(percentiles); + out.writeDoubleArray(percents); + } + + @Override + public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { + builder.startObject("values"); + for (double percent : percents) { + double value = percentile(percent); + boolean hasValue = !(Double.isInfinite(value) || Double.isNaN(value)); + String key = String.valueOf(percent); + builder.field(key, hasValue ? value : null); + if (hasValue && !(valueFormatter instanceof ValueFormatter.Raw)) { + builder.field(key + "_as_string", percentileAsString(percent)); + } + } + builder.endObject(); + return builder; + } + + public static class Iter extends UnmodifiableIterator { + + private final double[] percents; + private final double[] percentiles; + private int i; + + public Iter(double[] percents, double[] percentiles) { + this.percents = percents; + this.percentiles = percentiles; + i = 0; + } + + @Override + public boolean hasNext() { + return i < percents.length; + } + + @Override + public Percentile next() { + final Percentile next = new InternalPercentile(percents[i], percentiles[i]); + ++i; + return next; + } + } +} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/PercentilesBucket.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/PercentilesBucket.java new file mode 100644 index 00000000000..64424ac5abc --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/PercentilesBucket.java @@ -0,0 +1,25 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile; + +import org.elasticsearch.search.aggregations.metrics.percentiles.Percentiles; + +public interface PercentilesBucket extends Percentiles { +} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/PercentilesBucketBuilder.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/PercentilesBucketBuilder.java new file mode 100644 index 00000000000..53e90808f8a --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/PercentilesBucketBuilder.java @@ -0,0 +1,49 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile; + +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsBuilder; + +import java.io.IOException; + +public class PercentilesBucketBuilder extends BucketMetricsBuilder { + + Double[] percents; + + public PercentilesBucketBuilder(String name) { + super(name, PercentilesBucketPipelineAggregator.TYPE.name()); + } + + public PercentilesBucketBuilder percents(Double[] percents) { + this.percents = percents; + return this; + } + + @Override + protected void doInternalXContent(XContentBuilder builder, Params params) throws IOException { + if (percents != null) { + builder.field(PercentilesBucketParser.PERCENTS.getPreferredName(), percents); + } + } + + +} + diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/PercentilesBucketParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/PercentilesBucketParser.java new file mode 100644 index 00000000000..01a428873c2 --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/PercentilesBucketParser.java @@ -0,0 +1,67 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile; + +import com.google.common.primitives.Doubles; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorFactory; +import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsParser; +import org.elasticsearch.search.aggregations.support.format.ValueFormatter; +import org.elasticsearch.search.internal.SearchContext; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import static org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; + + +public class PercentilesBucketParser extends BucketMetricsParser { + + public static final ParseField PERCENTS = new ParseField("percents"); + double[] percents = new double[] { 1.0, 5.0, 25.0, 50.0, 75.0, 95.0, 99.0 }; + + @Override + public String type() { + return PercentilesBucketPipelineAggregator.TYPE.name(); + } + + @Override + protected PipelineAggregatorFactory buildFactory(String pipelineAggregatorName, String[] bucketsPaths, GapPolicy gapPolicy, + ValueFormatter formatter) { + return new PercentilesBucketPipelineAggregator.Factory(pipelineAggregatorName, bucketsPaths, gapPolicy, formatter, percents); + } + + @Override + protected boolean doParse(String pipelineAggregatorName, String currentFieldName, + XContentParser.Token token, XContentParser parser, SearchContext context) throws IOException { + if (context.parseFieldMatcher().match(currentFieldName, PERCENTS)) { + + List parsedPercents = new ArrayList<>(); + while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + parsedPercents.add(parser.doubleValue()); + } + percents = Doubles.toArray(parsedPercents); + return true; + } + return false; + } +} diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/PercentilesBucketPipelineAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/PercentilesBucketPipelineAggregator.java new file mode 100644 index 00000000000..92e8b01f43b --- /dev/null +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/PercentilesBucketPipelineAggregator.java @@ -0,0 +1,155 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.search.aggregations.AggregatorFactory; +import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.InternalAggregation.Type; +import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; +import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorFactory; +import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorStreams; +import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsPipelineAggregator; +import org.elasticsearch.search.aggregations.support.format.ValueFormatter; + +import java.io.IOException; +import java.util.*; + +import static org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; + +public class PercentilesBucketPipelineAggregator extends BucketMetricsPipelineAggregator { + + public final static Type TYPE = new Type("percentiles_bucket"); + + public final static PipelineAggregatorStreams.Stream STREAM = new PipelineAggregatorStreams.Stream() { + @Override + public PercentilesBucketPipelineAggregator readResult(StreamInput in) throws IOException { + PercentilesBucketPipelineAggregator result = new PercentilesBucketPipelineAggregator(); + result.readFrom(in); + return result; + } + }; + + public static void registerStreams() { + PipelineAggregatorStreams.registerStream(STREAM, TYPE.stream()); + InternalPercentilesBucket.registerStreams(); + } + + private double[] percents; + private List data; + + private PercentilesBucketPipelineAggregator() { + } + + protected PercentilesBucketPipelineAggregator(String name, double[] percents, String[] bucketsPaths, GapPolicy gapPolicy, + ValueFormatter formatter, Map metaData) { + super(name, bucketsPaths, gapPolicy, formatter, metaData); + this.percents = percents; + } + + @Override + public Type type() { + return TYPE; + } + + @Override + protected void preCollection() { + data = new ArrayList<>(1024); + } + + @Override + protected void collectBucketValue(String bucketKey, Double bucketValue) { + data.add(bucketValue); + } + + @Override + protected InternalAggregation buildAggregation(List pipelineAggregators, Map metadata) { + + // Perform the sorting and percentile collection now that all the data + // has been collected. + Collections.sort(data); + + double[] percentiles = new double[percents.length]; + if (data.size() == 0) { + for (int i = 0; i < percents.length; i++) { + percentiles[i] = Double.NaN; + } + } else { + for (int i = 0; i < percents.length; i++) { + int index = (int)((percents[i] / 100.0) * data.size()); + percentiles[i] = data.get(index); + } + } + + // todo need postCollection() to clean up temp sorted data? + + return new InternalPercentilesBucket(name(), percents, percentiles, formatter, pipelineAggregators, metadata); + } + + @Override + public void doReadFrom(StreamInput in) throws IOException { + super.doReadFrom(in); + percents = in.readDoubleArray(); + } + + @Override + public void doWriteTo(StreamOutput out) throws IOException { + super.doWriteTo(out); + out.writeDoubleArray(percents); + } + + public static class Factory extends PipelineAggregatorFactory { + + private final ValueFormatter formatter; + private final GapPolicy gapPolicy; + private final double[] percents; + + public Factory(String name, String[] bucketsPaths, GapPolicy gapPolicy, ValueFormatter formatter, double[] percents) { + super(name, TYPE.name(), bucketsPaths); + this.gapPolicy = gapPolicy; + this.formatter = formatter; + this.percents = percents; + } + + @Override + protected PipelineAggregator createInternal(Map metaData) throws IOException { + return new PercentilesBucketPipelineAggregator(name, percents, bucketsPaths, gapPolicy, formatter, metaData); + } + + @Override + public void doValidate(AggregatorFactory parent, AggregatorFactory[] aggFactories, + List pipelineAggregatorFactories) { + if (bucketsPaths.length != 1) { + throw new IllegalStateException(PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName() + + " must contain a single entry for aggregation [" + name + "]"); + } + + for (Double p : percents) { + if (p == null || p < 0.0 || p > 100.0) { + throw new IllegalStateException(PercentilesBucketParser.PERCENTS.getPreferredName() + + " must only contain non-null doubles from 0.0-100.0 inclusive"); + } + } + } + + } + +} diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketIT.java new file mode 100644 index 00000000000..507939e6858 --- /dev/null +++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketIT.java @@ -0,0 +1,625 @@ +package org.elasticsearch.search.aggregations.pipeline; + +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +import org.elasticsearch.action.index.IndexRequestBuilder; +import org.elasticsearch.action.search.SearchPhaseExecutionException; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.search.SearchParseException; +import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; +import org.elasticsearch.search.aggregations.bucket.terms.Terms; +import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile; +import org.elasticsearch.search.aggregations.metrics.sum.Sum; +import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile.PercentilesBucket; +import org.elasticsearch.test.ESIntegTestCase; +import org.junit.Test; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; +import static org.elasticsearch.search.aggregations.AggregationBuilders.histogram; +import static org.elasticsearch.search.aggregations.AggregationBuilders.sum; +import static org.elasticsearch.search.aggregations.AggregationBuilders.terms; +import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.percentilesBucket; +import static org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders.sumBucket; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.core.IsNull.notNullValue; + +@ESIntegTestCase.SuiteScopeTestCase +public class PercentilesBucketIT extends ESIntegTestCase { + + private static final String SINGLE_VALUED_FIELD_NAME = "l_value"; + private static final Double[] PERCENTS = {1.0, 25.0, 50.0, 75.0, 99.0}; + static int numDocs; + static int interval; + static int minRandomValue; + static int maxRandomValue; + static int numValueBuckets; + static long[] valueCounts; + + @Override + public void setupSuiteScopeCluster() throws Exception { + createIndex("idx"); + createIndex("idx_unmapped"); + + numDocs = randomIntBetween(6, 20); + interval = randomIntBetween(2, 5); + + minRandomValue = 0; + maxRandomValue = 20; + + numValueBuckets = ((maxRandomValue - minRandomValue) / interval) + 1; + valueCounts = new long[numValueBuckets]; + + List builders = new ArrayList<>(); + + for (int i = 0; i < numDocs; i++) { + int fieldValue = randomIntBetween(minRandomValue, maxRandomValue); + builders.add(client().prepareIndex("idx", "type").setSource( + jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, fieldValue).field("tag", "tag" + (i % interval)) + .endObject())); + final int bucket = (fieldValue / interval); // + (fieldValue < 0 ? -1 : 0) - (minRandomValue / interval - 1); + valueCounts[bucket]++; + } + + assertAcked(prepareCreate("empty_bucket_idx").addMapping("type", SINGLE_VALUED_FIELD_NAME, "type=integer")); + for (int i = 0; i < 2; i++) { + builders.add(client().prepareIndex("empty_bucket_idx", "type", "" + i).setSource( + jsonBuilder().startObject().field(SINGLE_VALUED_FIELD_NAME, i * 2).endObject())); + } + indexRandom(true, builders); + ensureSearchable(); + } + + @Test + public void testDocCount_topLevel() throws Exception { + SearchResponse response = client().prepareSearch("idx") + .addAggregation(histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) + .extendedBounds((long) minRandomValue, (long) maxRandomValue)) + .addAggregation(percentilesBucket("percentiles_bucket") + .setBucketsPaths("histo>_count") + .percents(PERCENTS)).execute().actionGet(); + + assertSearchResponse(response); + + Histogram histo = response.getAggregations().get("histo"); + assertThat(histo, notNullValue()); + assertThat(histo.getName(), equalTo("histo")); + List buckets = histo.getBuckets(); + assertThat(buckets.size(), equalTo(numValueBuckets)); + + double[] values = new double[numValueBuckets]; + for (int i = 0; i < numValueBuckets; ++i) { + Histogram.Bucket bucket = buckets.get(i); + assertThat(bucket, notNullValue()); + assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) i * interval)); + assertThat(bucket.getDocCount(), equalTo(valueCounts[i])); + values[i] = bucket.getDocCount(); + } + + Arrays.sort(values); + + PercentilesBucket percentilesBucketValue = response.getAggregations().get("percentiles_bucket"); + assertThat(percentilesBucketValue, notNullValue()); + assertThat(percentilesBucketValue.getName(), equalTo("percentiles_bucket")); + for (Double p : PERCENTS) { + double expected = values[(int)((p / 100) * values.length)]; + assertThat(percentilesBucketValue.percentile(p), equalTo(expected)); + } + + } + + @Test + public void testDocCount_asSubAgg() throws Exception { + SearchResponse response = client() + .prepareSearch("idx") + .addAggregation( + terms("terms") + .field("tag") + .order(Terms.Order.term(true)) + .subAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) + .extendedBounds((long) minRandomValue, (long) maxRandomValue)) + .subAggregation(percentilesBucket("percentiles_bucket") + .setBucketsPaths("histo>_count") + .percents(PERCENTS))).execute().actionGet(); + + assertSearchResponse(response); + + Terms terms = response.getAggregations().get("terms"); + assertThat(terms, notNullValue()); + assertThat(terms.getName(), equalTo("terms")); + List termsBuckets = terms.getBuckets(); + assertThat(termsBuckets.size(), equalTo(interval)); + + for (int i = 0; i < interval; ++i) { + Terms.Bucket termsBucket = termsBuckets.get(i); + assertThat(termsBucket, notNullValue()); + assertThat((String) termsBucket.getKey(), equalTo("tag" + (i % interval))); + + Histogram histo = termsBucket.getAggregations().get("histo"); + assertThat(histo, notNullValue()); + assertThat(histo.getName(), equalTo("histo")); + List buckets = histo.getBuckets(); + + double[] values = new double[numValueBuckets]; + for (int j = 0; j < numValueBuckets; ++j) { + Histogram.Bucket bucket = buckets.get(j); + assertThat(bucket, notNullValue()); + assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) j * interval)); + values[j] = bucket.getDocCount(); + } + + Arrays.sort(values); + + PercentilesBucket percentilesBucketValue = termsBucket.getAggregations().get("percentiles_bucket"); + assertThat(percentilesBucketValue, notNullValue()); + assertThat(percentilesBucketValue.getName(), equalTo("percentiles_bucket")); + for (Double p : PERCENTS) { + double expected = values[(int)((p / 100) * values.length)]; + assertThat(percentilesBucketValue.percentile(p), equalTo(expected)); + } + } + } + + @Test + public void testMetric_topLevel() throws Exception { + SearchResponse response = client() + .prepareSearch("idx") + .addAggregation(terms("terms").field("tag").subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) + .addAggregation(percentilesBucket("percentiles_bucket") + .setBucketsPaths("terms>sum") + .percents(PERCENTS)).execute().actionGet(); + + assertSearchResponse(response); + + Terms terms = response.getAggregations().get("terms"); + assertThat(terms, notNullValue()); + assertThat(terms.getName(), equalTo("terms")); + List buckets = terms.getBuckets(); + assertThat(buckets.size(), equalTo(interval)); + + double[] values = new double[interval]; + for (int i = 0; i < interval; ++i) { + Terms.Bucket bucket = buckets.get(i); + assertThat(bucket, notNullValue()); + assertThat((String) bucket.getKey(), equalTo("tag" + (i % interval))); + assertThat(bucket.getDocCount(), greaterThan(0l)); + Sum sum = bucket.getAggregations().get("sum"); + assertThat(sum, notNullValue()); + values[i] = sum.value(); + } + + Arrays.sort(values); + + PercentilesBucket percentilesBucketValue = response.getAggregations().get("percentiles_bucket"); + assertThat(percentilesBucketValue, notNullValue()); + assertThat(percentilesBucketValue.getName(), equalTo("percentiles_bucket")); + for (Double p : PERCENTS) { + double expected = values[(int)((p / 100) * values.length)]; + assertThat(percentilesBucketValue.percentile(p), equalTo(expected)); + } + } + + @Test + public void testMetric_topLevelDefaultPercents() throws Exception { + SearchResponse response = client() + .prepareSearch("idx") + .addAggregation(terms("terms").field("tag").subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) + .addAggregation(percentilesBucket("percentiles_bucket") + .setBucketsPaths("terms>sum")).execute().actionGet(); + + assertSearchResponse(response); + + Terms terms = response.getAggregations().get("terms"); + assertThat(terms, notNullValue()); + assertThat(terms.getName(), equalTo("terms")); + List buckets = terms.getBuckets(); + assertThat(buckets.size(), equalTo(interval)); + + double[] values = new double[interval]; + for (int i = 0; i < interval; ++i) { + Terms.Bucket bucket = buckets.get(i); + assertThat(bucket, notNullValue()); + assertThat((String) bucket.getKey(), equalTo("tag" + (i % interval))); + assertThat(bucket.getDocCount(), greaterThan(0l)); + Sum sum = bucket.getAggregations().get("sum"); + assertThat(sum, notNullValue()); + values[i] = sum.value(); + } + + Arrays.sort(values); + + PercentilesBucket percentilesBucketValue = response.getAggregations().get("percentiles_bucket"); + assertThat(percentilesBucketValue, notNullValue()); + assertThat(percentilesBucketValue.getName(), equalTo("percentiles_bucket")); + for (Percentile p : percentilesBucketValue) { + double expected = values[(int)((p.getPercent() / 100) * values.length)]; + assertThat(percentilesBucketValue.percentile(p.getPercent()), equalTo(expected)); + assertThat(p.getValue(), equalTo(expected)); + } + } + + @Test + public void testMetric_asSubAgg() throws Exception { + SearchResponse response = client() + .prepareSearch("idx") + .addAggregation( + terms("terms") + .field("tag") + .order(Terms.Order.term(true)) + .subAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) + .extendedBounds((long) minRandomValue, (long) maxRandomValue) + .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) + .subAggregation(percentilesBucket("percentiles_bucket") + .setBucketsPaths("histo>sum") + .percents(PERCENTS))).execute().actionGet(); + + assertSearchResponse(response); + + Terms terms = response.getAggregations().get("terms"); + assertThat(terms, notNullValue()); + assertThat(terms.getName(), equalTo("terms")); + List termsBuckets = terms.getBuckets(); + assertThat(termsBuckets.size(), equalTo(interval)); + + for (int i = 0; i < interval; ++i) { + Terms.Bucket termsBucket = termsBuckets.get(i); + assertThat(termsBucket, notNullValue()); + assertThat((String) termsBucket.getKey(), equalTo("tag" + (i % interval))); + + Histogram histo = termsBucket.getAggregations().get("histo"); + assertThat(histo, notNullValue()); + assertThat(histo.getName(), equalTo("histo")); + List buckets = histo.getBuckets(); + + List values = new ArrayList<>(numValueBuckets); + for (int j = 0; j < numValueBuckets; ++j) { + Histogram.Bucket bucket = buckets.get(j); + assertThat(bucket, notNullValue()); + assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) j * interval)); + if (bucket.getDocCount() != 0) { + Sum sum = bucket.getAggregations().get("sum"); + assertThat(sum, notNullValue()); + values.add(sum.value()); + } + } + + Collections.sort(values); + + PercentilesBucket percentilesBucketValue = termsBucket.getAggregations().get("percentiles_bucket"); + assertThat(percentilesBucketValue, notNullValue()); + assertThat(percentilesBucketValue.getName(), equalTo("percentiles_bucket")); + for (Double p : PERCENTS) { + double expected = values.get((int) ((p / 100) * values.size())); + assertThat(percentilesBucketValue.percentile(p), equalTo(expected)); + } + } + } + + @Test + public void testMetric_asSubAggWithInsertZeros() throws Exception { + SearchResponse response = client() + .prepareSearch("idx") + .addAggregation( + terms("terms") + .field("tag") + .order(Terms.Order.term(true)) + .subAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) + .extendedBounds((long) minRandomValue, (long) maxRandomValue) + .subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) + .subAggregation(percentilesBucket("percentiles_bucket") + .setBucketsPaths("histo>sum") + .gapPolicy(BucketHelpers.GapPolicy.INSERT_ZEROS) + .percents(PERCENTS))) + .execute().actionGet(); + + assertSearchResponse(response); + + Terms terms = response.getAggregations().get("terms"); + assertThat(terms, notNullValue()); + assertThat(terms.getName(), equalTo("terms")); + List termsBuckets = terms.getBuckets(); + assertThat(termsBuckets.size(), equalTo(interval)); + + for (int i = 0; i < interval; ++i) { + Terms.Bucket termsBucket = termsBuckets.get(i); + assertThat(termsBucket, notNullValue()); + assertThat((String) termsBucket.getKey(), equalTo("tag" + (i % interval))); + + Histogram histo = termsBucket.getAggregations().get("histo"); + assertThat(histo, notNullValue()); + assertThat(histo.getName(), equalTo("histo")); + List buckets = histo.getBuckets(); + + double[] values = new double[numValueBuckets]; + for (int j = 0; j < numValueBuckets; ++j) { + Histogram.Bucket bucket = buckets.get(j); + assertThat(bucket, notNullValue()); + assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) j * interval)); + Sum sum = bucket.getAggregations().get("sum"); + assertThat(sum, notNullValue()); + + values[j] = sum.value(); + } + + Arrays.sort(values); + + PercentilesBucket percentilesBucketValue = termsBucket.getAggregations().get("percentiles_bucket"); + assertThat(percentilesBucketValue, notNullValue()); + assertThat(percentilesBucketValue.getName(), equalTo("percentiles_bucket")); + for (Double p : PERCENTS) { + double expected = values[(int)((p / 100) * values.length)]; + assertThat(percentilesBucketValue.percentile(p), equalTo(expected)); + } + } + } + + @Test + public void testNoBuckets() throws Exception { + SearchResponse response = client().prepareSearch("idx") + .addAggregation(terms("terms").field("tag").exclude("tag.*").subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) + .addAggregation(percentilesBucket("percentiles_bucket") + .setBucketsPaths("terms>sum") + .percents(PERCENTS)).execute().actionGet(); + + assertSearchResponse(response); + + Terms terms = response.getAggregations().get("terms"); + assertThat(terms, notNullValue()); + assertThat(terms.getName(), equalTo("terms")); + List buckets = terms.getBuckets(); + assertThat(buckets.size(), equalTo(0)); + + PercentilesBucket percentilesBucketValue = response.getAggregations().get("percentiles_bucket"); + assertThat(percentilesBucketValue, notNullValue()); + assertThat(percentilesBucketValue.getName(), equalTo("percentiles_bucket")); + for (Double p : PERCENTS) { + assertThat(percentilesBucketValue.percentile(p), equalTo(Double.NaN)); + } + } + + @Test + public void testWrongPercents() throws Exception { + SearchResponse response = client().prepareSearch("idx") + .addAggregation(terms("terms").field("tag").exclude("tag.*").subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) + .addAggregation(percentilesBucket("percentiles_bucket") + .setBucketsPaths("terms>sum") + .percents(PERCENTS)).execute().actionGet(); + + assertSearchResponse(response); + + Terms terms = response.getAggregations().get("terms"); + assertThat(terms, notNullValue()); + assertThat(terms.getName(), equalTo("terms")); + List buckets = terms.getBuckets(); + assertThat(buckets.size(), equalTo(0)); + + PercentilesBucket percentilesBucketValue = response.getAggregations().get("percentiles_bucket"); + assertThat(percentilesBucketValue, notNullValue()); + assertThat(percentilesBucketValue.getName(), equalTo("percentiles_bucket")); + + try { + percentilesBucketValue.percentile(2.0); + fail("2.0 was not a valid percent, should have thrown exception"); + } catch (IllegalArgumentException exception) { + // All good + } + } + + @Test + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/13179") + public void testBadPercents() throws Exception { + Double[] badPercents = {-1.0, 110.0}; + + try { + SearchResponse response = client().prepareSearch("idx") + .addAggregation(terms("terms").field("tag").subAggregation(sum("sum").field(SINGLE_VALUED_FIELD_NAME))) + .addAggregation(percentilesBucket("percentiles_bucket") + .setBucketsPaths("terms>sum") + .percents(badPercents)).execute().actionGet(); + + assertSearchResponse(response); + + Terms terms = response.getAggregations().get("terms"); + assertThat(terms, notNullValue()); + assertThat(terms.getName(), equalTo("terms")); + List buckets = terms.getBuckets(); + assertThat(buckets.size(), equalTo(0)); + + PercentilesBucket percentilesBucketValue = response.getAggregations().get("percentiles_bucket"); + + fail("Illegal percent's were provided but no exception was thrown."); + } catch (SearchPhaseExecutionException exception) { + // All good + } + + } + + @Test + public void testBadPercents_asSubAgg() throws Exception { + Double[] badPercents = {-1.0, 110.0}; + + try { + SearchResponse response = client() + .prepareSearch("idx") + .addAggregation( + terms("terms") + .field("tag") + .order(Terms.Order.term(true)) + .subAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) + .extendedBounds((long) minRandomValue, (long) maxRandomValue)) + .subAggregation(percentilesBucket("percentiles_bucket") + .setBucketsPaths("histo>_count") + .percents(badPercents))).execute().actionGet(); + + PercentilesBucket percentilesBucketValue = response.getAggregations().get("percentiles_bucket"); + + fail("Illegal percent's were provided but no exception was thrown."); + } catch (SearchPhaseExecutionException exception) { + // All good + } + + } + + @Test + public void testNested() throws Exception { + SearchResponse response = client() + .prepareSearch("idx") + .addAggregation( + terms("terms") + .field("tag") + .order(Terms.Order.term(true)) + .subAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) + .extendedBounds((long) minRandomValue, (long) maxRandomValue)) + .subAggregation(percentilesBucket("percentile_histo_bucket").setBucketsPaths("histo>_count"))) + .addAggregation(percentilesBucket("percentile_terms_bucket") + .setBucketsPaths("terms>percentile_histo_bucket.50") + .percents(PERCENTS)).execute().actionGet(); + + assertSearchResponse(response); + + Terms terms = response.getAggregations().get("terms"); + assertThat(terms, notNullValue()); + assertThat(terms.getName(), equalTo("terms")); + List termsBuckets = terms.getBuckets(); + assertThat(termsBuckets.size(), equalTo(interval)); + + double[] values = new double[termsBuckets.size()]; + for (int i = 0; i < interval; ++i) { + Terms.Bucket termsBucket = termsBuckets.get(i); + assertThat(termsBucket, notNullValue()); + assertThat((String) termsBucket.getKey(), equalTo("tag" + (i % interval))); + + Histogram histo = termsBucket.getAggregations().get("histo"); + assertThat(histo, notNullValue()); + assertThat(histo.getName(), equalTo("histo")); + List buckets = histo.getBuckets(); + + double[] innerValues = new double[numValueBuckets]; + for (int j = 0; j < numValueBuckets; ++j) { + Histogram.Bucket bucket = buckets.get(j); + assertThat(bucket, notNullValue()); + assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) j * interval)); + + innerValues[j] = bucket.getDocCount(); + } + Arrays.sort(innerValues); + + PercentilesBucket percentilesBucketValue = termsBucket.getAggregations().get("percentile_histo_bucket"); + assertThat(percentilesBucketValue, notNullValue()); + assertThat(percentilesBucketValue.getName(), equalTo("percentile_histo_bucket")); + for (Double p : PERCENTS) { + double expected = innerValues[(int)((p / 100) * innerValues.length)]; + assertThat(percentilesBucketValue.percentile(p), equalTo(expected)); + } + values[i] = percentilesBucketValue.percentile(50.0); + } + + Arrays.sort(values); + + PercentilesBucket percentilesBucketValue = response.getAggregations().get("percentile_terms_bucket"); + assertThat(percentilesBucketValue, notNullValue()); + assertThat(percentilesBucketValue.getName(), equalTo("percentile_terms_bucket")); + for (Double p : PERCENTS) { + double expected = values[(int)((p / 100) * values.length)]; + assertThat(percentilesBucketValue.percentile(p), equalTo(expected)); + } + } + + @Test + public void testNestedWithDecimal() throws Exception { + Double[] percent = {99.9}; + SearchResponse response = client() + .prepareSearch("idx") + .addAggregation( + terms("terms") + .field("tag") + .order(Terms.Order.term(true)) + .subAggregation( + histogram("histo").field(SINGLE_VALUED_FIELD_NAME).interval(interval) + .extendedBounds((long) minRandomValue, (long) maxRandomValue)) + .subAggregation(percentilesBucket("percentile_histo_bucket") + .percents(percent) + .setBucketsPaths("histo>_count"))) + .addAggregation(percentilesBucket("percentile_terms_bucket") + .setBucketsPaths("terms>percentile_histo_bucket[99.9]") + .percents(percent)).execute().actionGet(); + + assertSearchResponse(response); + + Terms terms = response.getAggregations().get("terms"); + assertThat(terms, notNullValue()); + assertThat(terms.getName(), equalTo("terms")); + List termsBuckets = terms.getBuckets(); + assertThat(termsBuckets.size(), equalTo(interval)); + + double[] values = new double[termsBuckets.size()]; + for (int i = 0; i < interval; ++i) { + Terms.Bucket termsBucket = termsBuckets.get(i); + assertThat(termsBucket, notNullValue()); + assertThat((String) termsBucket.getKey(), equalTo("tag" + (i % interval))); + + Histogram histo = termsBucket.getAggregations().get("histo"); + assertThat(histo, notNullValue()); + assertThat(histo.getName(), equalTo("histo")); + List buckets = histo.getBuckets(); + + double[] innerValues = new double[numValueBuckets]; + for (int j = 0; j < numValueBuckets; ++j) { + Histogram.Bucket bucket = buckets.get(j); + assertThat(bucket, notNullValue()); + assertThat(((Number) bucket.getKey()).longValue(), equalTo((long) j * interval)); + + innerValues[j] = bucket.getDocCount(); + } + Arrays.sort(innerValues); + + PercentilesBucket percentilesBucketValue = termsBucket.getAggregations().get("percentile_histo_bucket"); + assertThat(percentilesBucketValue, notNullValue()); + assertThat(percentilesBucketValue.getName(), equalTo("percentile_histo_bucket")); + for (Double p : percent) { + double expected = innerValues[(int)((p / 100) * innerValues.length)]; + assertThat(percentilesBucketValue.percentile(p), equalTo(expected)); + } + values[i] = percentilesBucketValue.percentile(99.9); + } + + Arrays.sort(values); + + PercentilesBucket percentilesBucketValue = response.getAggregations().get("percentile_terms_bucket"); + assertThat(percentilesBucketValue, notNullValue()); + assertThat(percentilesBucketValue.getName(), equalTo("percentile_terms_bucket")); + for (Double p : percent) { + double expected = values[(int)((p / 100) * values.length)]; + assertThat(percentilesBucketValue.percentile(p), equalTo(expected)); + } + } +} diff --git a/docs/reference/aggregations/pipeline.asciidoc b/docs/reference/aggregations/pipeline.asciidoc index 670ed6266b0..b6a1073156a 100644 --- a/docs/reference/aggregations/pipeline.asciidoc +++ b/docs/reference/aggregations/pipeline.asciidoc @@ -163,6 +163,7 @@ include::pipeline/derivative-aggregation.asciidoc[] include::pipeline/max-bucket-aggregation.asciidoc[] include::pipeline/min-bucket-aggregation.asciidoc[] include::pipeline/sum-bucket-aggregation.asciidoc[] +include::pipeline/percentiles-bucket-aggregation.asciidoc[] include::pipeline/movavg-aggregation.asciidoc[] include::pipeline/cumulative-sum-aggregation.asciidoc[] include::pipeline/bucket-script-aggregation.asciidoc[] diff --git a/docs/reference/aggregations/pipeline/percentiles-bucket-aggregation.asciidoc b/docs/reference/aggregations/pipeline/percentiles-bucket-aggregation.asciidoc new file mode 100644 index 00000000000..3e3c6d6fcff --- /dev/null +++ b/docs/reference/aggregations/pipeline/percentiles-bucket-aggregation.asciidoc @@ -0,0 +1,121 @@ +[[search-aggregations-pipeline-percentiles-bucket-aggregation]] +=== Percentiles Bucket Aggregation + +coming[2.1.0] + +experimental[] + +A sibling pipeline aggregation which calculates percentiles across all bucket of a specified metric in a sibling aggregation. +The specified metric must be numeric and the sibling aggregation must be a multi-bucket aggregation. + +==== Syntax + +A `percentiles_bucket` aggregation looks like this in isolation: + +[source,js] +-------------------------------------------------- +{ + "percentiles_bucket": { + "buckets_path": "the_sum" + } +} +-------------------------------------------------- + +.`sum_bucket` Parameters +|=== +|Parameter Name |Description |Required |Default Value +|`buckets_path` |The path to the buckets we wish to find the sum for (see <> for more + details) |Required | +|`gap_policy` |The policy to apply when gaps are found in the data (see <> for more + details)|Optional | `skip` +|`format` |format to apply to the output value of this aggregation |Optional | `null` +|`percents` |The list of percentiles to calculate |Optional | `[ 1, 5, 25, 50, 75, 95, 99 ]` +|=== + +The following snippet calculates the sum of all the total monthly `sales` buckets: + +[source,js] +-------------------------------------------------- +{ + "aggs" : { + "sales_per_month" : { + "date_histogram" : { + "field" : "date", + "interval" : "month" + }, + "aggs": { + "sales": { + "sum": { + "field": "price" + } + } + } + }, + "sum_monthly_sales": { + "percentiles_bucket": { + "buckets_paths": "sales_per_month>sales", <1> + "percents": [ 25.0, 50.0, 75.0 ] <2> + } + } + } +} +-------------------------------------------------- +<1> `bucket_paths` instructs this percentiles_bucket aggregation that we want to calculate percentiles for +the `sales` aggregation in the `sales_per_month` date histogram. +<2> `percents` specifies which percentiles we wish to calculate, in this case, the 25th, 50th and 75th percentil + +And the following may be the response: + +[source,js] +-------------------------------------------------- +{ + "aggregations": { + "sales_per_month": { + "buckets": [ + { + "key_as_string": "2015/01/01 00:00:00", + "key": 1420070400000, + "doc_count": 3, + "sales": { + "value": 550 + } + }, + { + "key_as_string": "2015/02/01 00:00:00", + "key": 1422748800000, + "doc_count": 2, + "sales": { + "value": 60 + } + }, + { + "key_as_string": "2015/03/01 00:00:00", + "key": 1425168000000, + "doc_count": 2, + "sales": { + "value": 375 + } + } + ] + }, + "percentiles_monthly_sales": { + "values" : { + "25.0": 60, + "50.0": 375", + "75.0": 550 + } + } + } +} +-------------------------------------------------- + + +==== Percentiles_bucket implementation + +The Percentile Bucket returns the nearest input data point that is not greater than the requested percentile; it does not +interpolate between data points. + +The percentiles are calculated exactly and is not an approximation (unlike the Percentiles Metric). This means +the implementation maintains an in-memory, sorted list of your data to compute the percentiles, before discarding the +data. You may run into memory pressure issues if you attempt to calculate percentiles over many millions of +data-points in a single `percentiles_bucket`. \ No newline at end of file From c5b39ce85ed43e02ce99136c9ad73f51a202cb76 Mon Sep 17 00:00:00 2001 From: Zachary Tong Date: Thu, 3 Sep 2015 23:17:01 -0400 Subject: [PATCH 08/17] [DOCS] Fix broken inter-page link --- .../pipeline/percentiles-bucket-aggregation.asciidoc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/reference/aggregations/pipeline/percentiles-bucket-aggregation.asciidoc b/docs/reference/aggregations/pipeline/percentiles-bucket-aggregation.asciidoc index 3e3c6d6fcff..247696943dd 100644 --- a/docs/reference/aggregations/pipeline/percentiles-bucket-aggregation.asciidoc +++ b/docs/reference/aggregations/pipeline/percentiles-bucket-aggregation.asciidoc @@ -24,7 +24,7 @@ A `percentiles_bucket` aggregation looks like this in isolation: .`sum_bucket` Parameters |=== |Parameter Name |Description |Required |Default Value -|`buckets_path` |The path to the buckets we wish to find the sum for (see <> for more +|`buckets_path` |The path to the buckets we wish to find the sum for (see <> for more details) |Required | |`gap_policy` |The policy to apply when gaps are found in the data (see <> for more details)|Optional | `skip` From 4f5591be8d704868876856a53615ee71c588b07c Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Thu, 3 Sep 2015 13:51:29 +0200 Subject: [PATCH 09/17] Fix deprecations introduced by the upgrade to Lucene 5.3 This changes construction of Phrase and Boolean queries to use the builder, and replaces BitDocIdSetFilter with BitSetProducer for nested and parent/child queries. I had to remove the ParentIdsFilter for the case when there was a single parent as it was using the source of BitSets for parents as a regular Filter, which is not possible anymore now. I don't think this is an issue since this case rarely occurs, and the alternative logic for when there are several matching parent ids should not be much worse. --- .../lucene/queries/BlendedTermQuery.java | 33 ++++-- .../vectorhighlight/CustomFieldQuery.java | 9 +- .../lucene/search/MoreLikeThisQuery.java | 7 +- .../common/lucene/search/Queries.java | 21 ++-- .../index/aliases/IndexAliasesService.java | 4 +- .../index/cache/bitset/BitsetFilterCache.java | 100 ++++++++--------- .../elasticsearch/index/engine/Engine.java | 27 ++++- .../index/engine/InternalEngine.java | 8 +- .../index/fielddata/IndexFieldData.java | 18 +-- .../BytesRefFieldComparatorSource.java | 4 +- .../DoubleValuesComparatorSource.java | 2 +- .../FloatValuesComparatorSource.java | 2 +- .../LongValuesComparatorSource.java | 2 +- .../index/mapper/MapperService.java | 16 +-- .../index/mapper/internal/IdFieldMapper.java | 8 +- .../index/query/AndQueryParser.java | 5 +- .../index/query/BoolQueryParser.java | 6 +- .../index/query/ExistsQueryParser.java | 5 +- .../index/query/GeoShapeQueryParser.java | 4 +- .../index/query/HasChildQueryParser.java | 4 +- .../index/query/HasParentQueryParser.java | 4 +- .../index/query/MissingQueryParser.java | 13 +-- .../index/query/MoreLikeThisQueryParser.java | 6 +- .../index/query/OrQueryParser.java | 5 +- .../index/query/QueryParseContext.java | 17 ++- .../index/query/SimpleQueryParser.java | 30 ++--- .../index/query/TermsQueryParser.java | 5 +- .../FunctionScoreQueryParser.java | 4 +- .../support/NestedInnerQueryParseSupport.java | 4 +- .../index/search/MultiMatchQuery.java | 4 +- .../child/ChildrenConstantScoreQuery.java | 13 +-- .../index/search/child/ChildrenQuery.java | 11 +- .../child/ParentConstantScoreQuery.java | 6 +- .../index/search/child/ParentIdsFilter.java | 89 +++++---------- .../index/search/child/ParentQuery.java | 4 +- .../geo/IndexedGeoBoundingBoxQuery.java | 8 +- .../search/nested/IncludeNestedDocsQuery.java | 28 ++--- .../shard/TranslogRecoveryPerformer.java | 11 +- .../percolator/PercolatorService.java | 4 +- .../search/aggregations/AggregationPhase.java | 7 +- .../bucket/nested/NestedAggregator.java | 40 +++---- .../nested/ReverseNestedAggregator.java | 25 ++--- .../search/fetch/FetchPhase.java | 7 +- .../fetch/innerhits/InnerHitsContext.java | 24 ++-- .../search/internal/DefaultSearchContext.java | 11 +- .../search/query/QueryPhase.java | 7 +- .../search/sort/GeoDistanceSortParser.java | 6 +- .../search/sort/ScriptSortParser.java | 4 +- .../search/sort/SortParseElement.java | 5 +- .../lucene/queries/BlendedTermQueryTests.java | 15 ++- .../cache/bitset/BitSetFilterCacheTests.java | 105 ------------------ .../fielddata/AbstractFieldDataTestCase.java | 2 +- .../query/SimpleIndexQueryParserTests.java | 42 +++---- .../search/child/AbstractChildTestCase.java | 15 ++- .../ChildrenConstantScoreQueryTests.java | 31 +++++- .../search/child/ChildrenQueryTests.java | 4 +- .../child/ParentConstantScoreQueryTests.java | 4 +- .../index/search/child/ParentQueryTests.java | 4 +- .../search/nested/NestedSortingTests.java | 4 +- .../bucket/nested/NestedAggregatorTests.java | 4 +- .../innerhits/NestedChildrenFilterTests.java | 8 +- .../search/query/QueryPhaseTests.java | 7 +- 62 files changed, 433 insertions(+), 499 deletions(-) delete mode 100644 core/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java diff --git a/core/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java b/core/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java index 17485d024ae..7d3c47f9615 100644 --- a/core/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java +++ b/core/src/main/java/org/apache/lucene/queries/BlendedTermQuery.java @@ -27,6 +27,7 @@ import org.apache.lucene.index.Term; import org.apache.lucene.index.TermContext; import org.apache.lucene.index.TermState; import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.DisjunctionMaxQuery; import org.apache.lucene.search.Query; @@ -299,7 +300,8 @@ public abstract class BlendedTermQuery extends Query { return new BlendedTermQuery(terms, boosts) { @Override protected Query topLevelQuery(Term[] terms, TermContext[] ctx, int[] docFreqs, int maxDoc) { - BooleanQuery query = new BooleanQuery(disableCoord); + BooleanQuery.Builder query = new BooleanQuery.Builder(); + query.setDisableCoord(disableCoord); for (int i = 0; i < terms.length; i++) { TermQuery termQuery = new TermQuery(terms[i], ctx[i]); if (boosts != null) { @@ -307,7 +309,7 @@ public abstract class BlendedTermQuery extends Query { } query.add(termQuery, BooleanClause.Occur.SHOULD); } - return query; + return query.build(); } }; } @@ -316,9 +318,10 @@ public abstract class BlendedTermQuery extends Query { return new BlendedTermQuery(terms, boosts) { @Override protected Query topLevelQuery(Term[] terms, TermContext[] ctx, int[] docFreqs, int maxDoc) { - BooleanQuery query = new BooleanQuery(true); - BooleanQuery high = new BooleanQuery(disableCoord); - BooleanQuery low = new BooleanQuery(disableCoord); + BooleanQuery.Builder highBuilder = new BooleanQuery.Builder(); + highBuilder.setDisableCoord(disableCoord); + BooleanQuery.Builder lowBuilder = new BooleanQuery.Builder(); + lowBuilder.setDisableCoord(disableCoord); for (int i = 0; i < terms.length; i++) { TermQuery termQuery = new TermQuery(terms[i], ctx[i]); if (boosts != null) { @@ -327,22 +330,28 @@ public abstract class BlendedTermQuery extends Query { if ((maxTermFrequency >= 1f && docFreqs[i] > maxTermFrequency) || (docFreqs[i] > (int) Math.ceil(maxTermFrequency * (float) maxDoc))) { - high.add(termQuery, BooleanClause.Occur.SHOULD); + highBuilder.add(termQuery, BooleanClause.Occur.SHOULD); } else { - low.add(termQuery, BooleanClause.Occur.SHOULD); + lowBuilder.add(termQuery, BooleanClause.Occur.SHOULD); } } + BooleanQuery high = highBuilder.build(); + BooleanQuery low = lowBuilder.build(); if (low.clauses().isEmpty()) { + BooleanQuery.Builder queryBuilder = new BooleanQuery.Builder(); + queryBuilder.setDisableCoord(disableCoord); for (BooleanClause booleanClause : high) { - booleanClause.setOccur(BooleanClause.Occur.MUST); + queryBuilder.add(booleanClause.getQuery(), Occur.MUST); } - return high; + return queryBuilder.build(); } else if (high.clauses().isEmpty()) { return low; } else { - query.add(high, BooleanClause.Occur.SHOULD); - query.add(low, BooleanClause.Occur.MUST); - return query; + return new BooleanQuery.Builder() + .setDisableCoord(true) + .add(high, BooleanClause.Occur.SHOULD) + .add(low, BooleanClause.Occur.MUST) + .build(); } } }; diff --git a/core/src/main/java/org/apache/lucene/search/vectorhighlight/CustomFieldQuery.java b/core/src/main/java/org/apache/lucene/search/vectorhighlight/CustomFieldQuery.java index 37e1f7a6df1..e1a78ac3b8c 100644 --- a/core/src/main/java/org/apache/lucene/search/vectorhighlight/CustomFieldQuery.java +++ b/core/src/main/java/org/apache/lucene/search/vectorhighlight/CustomFieldQuery.java @@ -104,12 +104,13 @@ public class CustomFieldQuery extends FieldQuery { * It seems expensive but most queries will pretty small. */ if (currentPos == terms.size()) { - PhraseQuery query = new PhraseQuery(); - query.setBoost(orig.getBoost()); - query.setSlop(orig.getSlop()); + PhraseQuery.Builder queryBuilder = new PhraseQuery.Builder(); + queryBuilder.setSlop(orig.getSlop()); for (int i = 0; i < termsIdx.length; i++) { - query.add(terms.get(i)[termsIdx[i]], pos[i]); + queryBuilder.add(terms.get(i)[termsIdx[i]], pos[i]); } + PhraseQuery query = queryBuilder.build(); + query.setBoost(orig.getBoost()); this.flatten(query, reader, flatQueries); } else { Term[] t = terms.get(currentPos); diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/MoreLikeThisQuery.java b/core/src/main/java/org/elasticsearch/common/lucene/search/MoreLikeThisQuery.java index 6bbd97bfccb..2084b675aea 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/MoreLikeThisQuery.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/MoreLikeThisQuery.java @@ -163,11 +163,11 @@ public class MoreLikeThisQuery extends Query { } private Query createQuery(XMoreLikeThis mlt) throws IOException { - BooleanQuery bq = new BooleanQuery(); + BooleanQuery.Builder bqBuilder = new BooleanQuery.Builder(); if (this.likeFields != null) { Query mltQuery = mlt.like(this.likeFields); mltQuery = Queries.applyMinimumShouldMatch((BooleanQuery) mltQuery, minimumShouldMatch); - bq.add(mltQuery, BooleanClause.Occur.SHOULD); + bqBuilder.add(mltQuery, BooleanClause.Occur.SHOULD); } if (this.likeText != null) { Reader[] readers = new Reader[likeText.length]; @@ -177,9 +177,10 @@ public class MoreLikeThisQuery extends Query { //LUCENE 4 UPGRADE this mapps the 3.6 behavior (only use the first field) Query mltQuery = mlt.like(moreLikeFields[0], readers); mltQuery = Queries.applyMinimumShouldMatch((BooleanQuery) mltQuery, minimumShouldMatch); - bq.add(mltQuery, BooleanClause.Occur.SHOULD); + bqBuilder.add(mltQuery, BooleanClause.Occur.SHOULD); } + BooleanQuery bq = bqBuilder.build(); bq.setBoost(getBoost()); return bq; } diff --git a/core/src/main/java/org/elasticsearch/common/lucene/search/Queries.java b/core/src/main/java/org/elasticsearch/common/lucene/search/Queries.java index d3b1f8f0eba..0b7682cd7fd 100644 --- a/core/src/main/java/org/elasticsearch/common/lucene/search/Queries.java +++ b/core/src/main/java/org/elasticsearch/common/lucene/search/Queries.java @@ -40,7 +40,7 @@ public class Queries { /** Return a query that matches no document. */ public static Query newMatchNoDocsQuery() { - return new BooleanQuery(); + return new BooleanQuery.Builder().build(); } public static Filter newNestedFilter() { @@ -64,10 +64,10 @@ public class Queries { /** Return a query that matches all documents but those that match the given query. */ public static Query not(Query q) { - BooleanQuery bq = new BooleanQuery(); - bq.add(new MatchAllDocsQuery(), Occur.MUST); - bq.add(q, Occur.MUST_NOT); - return bq; + return new BooleanQuery.Builder() + .add(new MatchAllDocsQuery(), Occur.MUST) + .add(q, Occur.MUST_NOT) + .build(); } public static boolean isNegativeQuery(Query q) { @@ -86,9 +86,14 @@ public class Queries { public static Query fixNegativeQueryIfNeeded(Query q) { if (isNegativeQuery(q)) { - BooleanQuery newBq = (BooleanQuery) q.clone(); - newBq.add(newMatchAllQuery(), BooleanClause.Occur.MUST); - return newBq; + BooleanQuery bq = (BooleanQuery) q; + BooleanQuery.Builder builder = new BooleanQuery.Builder(); + builder.setDisableCoord(bq.isCoordDisabled()); + for (BooleanClause clause : bq) { + builder.add(clause); + } + builder.add(newMatchAllQuery(), BooleanClause.Occur.MUST); + return builder.build(); } return q; } diff --git a/core/src/main/java/org/elasticsearch/index/aliases/IndexAliasesService.java b/core/src/main/java/org/elasticsearch/index/aliases/IndexAliasesService.java index 8129da4df70..fc93153b502 100644 --- a/core/src/main/java/org/elasticsearch/index/aliases/IndexAliasesService.java +++ b/core/src/main/java/org/elasticsearch/index/aliases/IndexAliasesService.java @@ -73,7 +73,7 @@ public class IndexAliasesService extends AbstractIndexComponent { return parse(alias); } else { // we need to bench here a bit, to see maybe it makes sense to use OrFilter - BooleanQuery combined = new BooleanQuery(); + BooleanQuery.Builder combined = new BooleanQuery.Builder(); for (String aliasName : aliasNames) { AliasMetaData alias = this.aliases.get(aliasName); if (alias == null) { @@ -88,7 +88,7 @@ public class IndexAliasesService extends AbstractIndexComponent { return null; } } - return combined; + return combined.build(); } } diff --git a/core/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java b/core/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java index 2203c94bd5a..20d79fc2a70 100644 --- a/core/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java +++ b/core/src/main/java/org/elasticsearch/index/cache/bitset/BitsetFilterCache.java @@ -24,14 +24,17 @@ import com.google.common.cache.CacheBuilder; import com.google.common.cache.RemovalListener; import com.google.common.cache.RemovalNotification; +import org.apache.lucene.index.IndexReaderContext; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.search.DocIdSet; +import org.apache.lucene.index.ReaderUtil; import org.apache.lucene.search.DocIdSetIterator; -import org.apache.lucene.search.Filter; -import org.apache.lucene.search.join.BitDocIdSetFilter; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.Weight; +import org.apache.lucene.search.join.BitSetProducer; import org.apache.lucene.util.BitDocIdSet; -import org.apache.lucene.util.SparseFixedBitSet; +import org.apache.lucene.util.BitSet; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.inject.Inject; @@ -56,6 +59,7 @@ import java.io.Closeable; import java.io.IOException; import java.util.HashSet; import java.util.Map; +import java.util.Objects; import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.CountDownLatch; @@ -69,13 +73,13 @@ import java.util.concurrent.Executor; * and require that it should always be around should use this cache, otherwise the * {@link org.elasticsearch.index.cache.query.QueryCache} should be used instead. */ -public class BitsetFilterCache extends AbstractIndexComponent implements LeafReader.CoreClosedListener, RemovalListener>, Closeable { +public class BitsetFilterCache extends AbstractIndexComponent implements LeafReader.CoreClosedListener, RemovalListener>, Closeable { public static final String LOAD_RANDOM_ACCESS_FILTERS_EAGERLY = "index.load_fixed_bitset_filters_eagerly"; private final boolean loadRandomAccessFiltersEagerly; - private final Cache> loadedFilters; - private final BitDocIdSetFilterWarmer warmer; + private final Cache> loadedFilters; + private final BitSetProducerWarmer warmer; private IndexService indexService; private IndicesWarmer indicesWarmer; @@ -85,7 +89,7 @@ public class BitsetFilterCache extends AbstractIndexComponent implements LeafRea super(index, indexSettings); this.loadRandomAccessFiltersEagerly = indexSettings.getAsBoolean(LOAD_RANDOM_ACCESS_FILTERS_EAGERLY, true); this.loadedFilters = CacheBuilder.newBuilder().removalListener(this).build(); - this.warmer = new BitDocIdSetFilterWarmer(); + this.warmer = new BitSetProducerWarmer(); } @Inject(optional = true) @@ -101,9 +105,8 @@ public class BitsetFilterCache extends AbstractIndexComponent implements LeafRea indicesWarmer.addListener(warmer); } - public BitDocIdSetFilter getBitDocIdSetFilter(Filter filter) { - assert filter != null; - return new BitDocIdSetFilterWrapper(filter); + public BitSetProducer getBitSetProducer(Query query) { + return new QueryWrapperBitSetProducer(query); } @Override @@ -122,38 +125,29 @@ public class BitsetFilterCache extends AbstractIndexComponent implements LeafRea loadedFilters.invalidateAll(); } - private BitDocIdSet getAndLoadIfNotPresent(final Filter filter, final LeafReaderContext context) throws IOException, ExecutionException { + private BitSet getAndLoadIfNotPresent(final Query query, final LeafReaderContext context) throws IOException, ExecutionException { final Object coreCacheReader = context.reader().getCoreCacheKey(); final ShardId shardId = ShardUtils.extractShardId(context.reader()); - Cache filterToFbs = loadedFilters.get(coreCacheReader, new Callable>() { + Cache filterToFbs = loadedFilters.get(coreCacheReader, new Callable>() { @Override - public Cache call() throws Exception { + public Cache call() throws Exception { context.reader().addCoreClosedListener(BitsetFilterCache.this); return CacheBuilder.newBuilder().build(); } }); - return filterToFbs.get(filter, new Callable() { + return filterToFbs.get(query, new Callable() { @Override public Value call() throws Exception { - DocIdSet docIdSet = filter.getDocIdSet(context, null); - final BitDocIdSet bitSet; - if (docIdSet instanceof BitDocIdSet) { - bitSet = (BitDocIdSet) docIdSet; + final IndexReaderContext topLevelContext = ReaderUtil.getTopLevelContext(context); + final IndexSearcher searcher = new IndexSearcher(topLevelContext); + searcher.setQueryCache(null); + final Weight weight = searcher.createNormalizedWeight(query, false); + final DocIdSetIterator it = weight.scorer(context); + final BitSet bitSet; + if (it == null) { + bitSet = null; } else { - BitDocIdSet.Builder builder = new BitDocIdSet.Builder(context.reader().maxDoc()); - if (docIdSet != null && docIdSet != DocIdSet.EMPTY) { - DocIdSetIterator iterator = docIdSet.iterator(); - // some filters (QueryWrapperFilter) return not null or DocIdSet.EMPTY if there no matching docs - if (iterator != null) { - builder.or(iterator); - } - } - BitDocIdSet bits = builder.build(); - // code expects this to be non-null - if (bits == null) { - bits = new BitDocIdSet(new SparseFixedBitSet(context.reader().maxDoc()), 0); - } - bitSet = bits; + bitSet = BitSet.of(it, context.reader().maxDoc()); } Value value = new Value(bitSet, shardId); @@ -169,18 +163,18 @@ public class BitsetFilterCache extends AbstractIndexComponent implements LeafRea } @Override - public void onRemoval(RemovalNotification> notification) { + public void onRemoval(RemovalNotification> notification) { Object key = notification.getKey(); if (key == null) { return; } - Cache value = notification.getValue(); + Cache value = notification.getValue(); if (value == null) { return; } - for (Map.Entry entry : value.asMap().entrySet()) { + for (Map.Entry entry : value.asMap().entrySet()) { if (entry.getValue().shardId == null) { continue; } @@ -195,50 +189,50 @@ public class BitsetFilterCache extends AbstractIndexComponent implements LeafRea public static final class Value { - final BitDocIdSet bitset; + final BitSet bitset; final ShardId shardId; - public Value(BitDocIdSet bitset, ShardId shardId) { + public Value(BitSet bitset, ShardId shardId) { this.bitset = bitset; this.shardId = shardId; } } - final class BitDocIdSetFilterWrapper extends BitDocIdSetFilter { + final class QueryWrapperBitSetProducer implements BitSetProducer { - final Filter filter; + final Query query; - BitDocIdSetFilterWrapper(Filter filter) { - this.filter = filter; + QueryWrapperBitSetProducer(Query query) { + this.query = Objects.requireNonNull(query); } @Override - public BitDocIdSet getDocIdSet(LeafReaderContext context) throws IOException { + public BitSet getBitSet(LeafReaderContext context) throws IOException { try { - return getAndLoadIfNotPresent(filter, context); + return getAndLoadIfNotPresent(query, context); } catch (ExecutionException e) { throw ExceptionsHelper.convertToElastic(e); } } @Override - public String toString(String field) { - return "random_access(" + filter + ")"; + public String toString() { + return "random_access(" + query + ")"; } @Override public boolean equals(Object o) { - if (!(o instanceof BitDocIdSetFilterWrapper)) return false; - return this.filter.equals(((BitDocIdSetFilterWrapper) o).filter); + if (!(o instanceof QueryWrapperBitSetProducer)) return false; + return this.query.equals(((QueryWrapperBitSetProducer) o).query); } @Override public int hashCode() { - return filter.hashCode() ^ 0x1117BF26; + return 31 * getClass().hashCode() + query.hashCode(); } } - final class BitDocIdSetFilterWarmer extends IndicesWarmer.Listener { + final class BitSetProducerWarmer extends IndicesWarmer.Listener { @Override public IndicesWarmer.TerminationHandle warmNewReaders(final IndexShard indexShard, IndexMetaData indexMetaData, IndicesWarmer.WarmerContext context, ThreadPool threadPool) { @@ -247,7 +241,7 @@ public class BitsetFilterCache extends AbstractIndexComponent implements LeafRea } boolean hasNested = false; - final Set warmUp = new HashSet<>(); + final Set warmUp = new HashSet<>(); final MapperService mapperService = indexShard.mapperService(); for (DocumentMapper docMapper : mapperService.docMappers(false)) { if (docMapper.hasNestedObjects()) { @@ -270,7 +264,7 @@ public class BitsetFilterCache extends AbstractIndexComponent implements LeafRea final Executor executor = threadPool.executor(executor()); final CountDownLatch latch = new CountDownLatch(context.searcher().reader().leaves().size() * warmUp.size()); for (final LeafReaderContext ctx : context.searcher().reader().leaves()) { - for (final Filter filterToWarm : warmUp) { + for (final Query filterToWarm : warmUp) { executor.execute(new Runnable() { @Override @@ -306,7 +300,7 @@ public class BitsetFilterCache extends AbstractIndexComponent implements LeafRea } - Cache> getLoadedFilters() { + Cache> getLoadedFilters() { return loadedFilters; } } diff --git a/core/src/main/java/org/elasticsearch/index/engine/Engine.java b/core/src/main/java/org/elasticsearch/index/engine/Engine.java index 14181cc4c31..071a00ba65c 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/Engine.java +++ b/core/src/main/java/org/elasticsearch/index/engine/Engine.java @@ -20,11 +20,22 @@ package org.elasticsearch.index.engine; import com.google.common.base.Preconditions; -import org.apache.lucene.index.*; + +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.FilterLeafReader; +import org.apache.lucene.index.IndexCommit; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.LeafReader; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.SegmentCommitInfo; +import org.apache.lucene.index.SegmentInfos; +import org.apache.lucene.index.SegmentReader; +import org.apache.lucene.index.Term; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.SearcherManager; -import org.apache.lucene.search.join.BitDocIdSetFilter; +import org.apache.lucene.search.join.BitSetProducer; import org.apache.lucene.util.Accountable; import org.apache.lucene.util.Accountables; import org.elasticsearch.ExceptionsHelper; @@ -55,7 +66,11 @@ import org.elasticsearch.index.translog.Translog; import java.io.Closeable; import java.io.IOException; -import java.util.*; +import java.util.Arrays; +import java.util.Comparator; +import java.util.HashMap; +import java.util.List; +import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.locks.Condition; @@ -911,13 +926,13 @@ public abstract class Engine implements Closeable { private final String[] filteringAliases; private final Query aliasFilter; private final String[] types; - private final BitDocIdSetFilter parentFilter; + private final BitSetProducer parentFilter; private final Operation.Origin origin; private final long startTime; private long endTime; - public DeleteByQuery(Query query, BytesReference source, @Nullable String[] filteringAliases, @Nullable Query aliasFilter, BitDocIdSetFilter parentFilter, Operation.Origin origin, long startTime, String... types) { + public DeleteByQuery(Query query, BytesReference source, @Nullable String[] filteringAliases, @Nullable Query aliasFilter, BitSetProducer parentFilter, Operation.Origin origin, long startTime, String... types) { this.query = query; this.source = source; this.types = types; @@ -952,7 +967,7 @@ public abstract class Engine implements Closeable { return parentFilter != null; } - public BitDocIdSetFilter parentFilter() { + public BitSetProducer parentFilter() { return parentFilter; } diff --git a/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index b32a5e06321..054b0b1dfff 100644 --- a/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/core/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -638,10 +638,10 @@ public class InternalEngine extends Engine { try { Query query = delete.query(); if (delete.aliasFilter() != null) { - BooleanQuery boolQuery = new BooleanQuery(); - boolQuery.add(query, Occur.MUST); - boolQuery.add(delete.aliasFilter(), Occur.FILTER); - query = boolQuery; + query = new BooleanQuery.Builder() + .add(query, Occur.MUST) + .add(delete.aliasFilter(), Occur.FILTER) + .build(); } if (delete.nested()) { query = new IncludeNestedDocsQuery(query, delete.parentFilter()); diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldData.java b/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldData.java index f2d77b5af44..c4faa7fa7ee 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldData.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/IndexFieldData.java @@ -19,11 +19,15 @@ package org.elasticsearch.index.fielddata; -import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.IndexReader; -import org.apache.lucene.search.*; -import org.apache.lucene.search.join.BitDocIdSetFilter; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.DocIdSet; +import org.apache.lucene.search.FieldComparatorSource; +import org.apache.lucene.search.Filter; +import org.apache.lucene.search.SortField; +import org.apache.lucene.search.join.BitSetProducer; import org.apache.lucene.util.BitDocIdSet; +import org.apache.lucene.util.BitSet; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.settings.Settings; @@ -119,10 +123,10 @@ public interface IndexFieldData extends IndexCompone */ public static class Nested { - private final BitDocIdSetFilter rootFilter; + private final BitSetProducer rootFilter; private final Filter innerFilter; - public Nested(BitDocIdSetFilter rootFilter, Filter innerFilter) { + public Nested(BitSetProducer rootFilter, Filter innerFilter) { this.rootFilter = rootFilter; this.innerFilter = innerFilter; } @@ -130,8 +134,8 @@ public interface IndexFieldData extends IndexCompone /** * Get a {@link BitDocIdSet} that matches the root documents. */ - public BitDocIdSet rootDocs(LeafReaderContext ctx) throws IOException { - return rootFilter.getDocIdSet(ctx); + public BitSet rootDocs(LeafReaderContext ctx) throws IOException { + return rootFilter.getBitSet(ctx); } /** diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java b/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java index 8b15049e188..3d5959a054a 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/BytesRefFieldComparatorSource.java @@ -94,7 +94,7 @@ public class BytesRefFieldComparatorSource extends IndexFieldData.XFieldComparat if (nested == null) { selectedValues = sortMode.select(values); } else { - final BitSet rootDocs = nested.rootDocs(context).bits(); + final BitSet rootDocs = nested.rootDocs(context); final DocIdSet innerDocs = nested.innerDocs(context); selectedValues = sortMode.select(values, rootDocs, innerDocs); } @@ -124,7 +124,7 @@ public class BytesRefFieldComparatorSource extends IndexFieldData.XFieldComparat if (nested == null) { selectedValues = sortMode.select(values, nonNullMissingBytes); } else { - final BitSet rootDocs = nested.rootDocs(context).bits(); + final BitSet rootDocs = nested.rootDocs(context); final DocIdSet innerDocs = nested.innerDocs(context); selectedValues = sortMode.select(values, nonNullMissingBytes, rootDocs, innerDocs, context.reader().maxDoc()); } diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/DoubleValuesComparatorSource.java b/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/DoubleValuesComparatorSource.java index 7d991be9200..4ea2eaed7de 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/DoubleValuesComparatorSource.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/DoubleValuesComparatorSource.java @@ -78,7 +78,7 @@ public class DoubleValuesComparatorSource extends IndexFieldData.XFieldComparato if (nested == null) { selectedValues = sortMode.select(values, dMissingValue); } else { - final BitSet rootDocs = nested.rootDocs(context).bits(); + final BitSet rootDocs = nested.rootDocs(context); final DocIdSet innerDocs = nested.innerDocs(context); selectedValues = sortMode.select(values, dMissingValue, rootDocs, innerDocs, context.reader().maxDoc()); } diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/FloatValuesComparatorSource.java b/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/FloatValuesComparatorSource.java index 45862ae9f86..40c0e83ddca 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/FloatValuesComparatorSource.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/FloatValuesComparatorSource.java @@ -70,7 +70,7 @@ public class FloatValuesComparatorSource extends IndexFieldData.XFieldComparator if (nested == null) { selectedValues = sortMode.select(values, dMissingValue); } else { - final BitSet rootDocs = nested.rootDocs(context).bits(); + final BitSet rootDocs = nested.rootDocs(context); final DocIdSet innerDocs = nested.innerDocs(context); selectedValues = sortMode.select(values, dMissingValue, rootDocs, innerDocs, context.reader().maxDoc()); } diff --git a/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/LongValuesComparatorSource.java b/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/LongValuesComparatorSource.java index 44bc23a7b08..2e596c463c7 100644 --- a/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/LongValuesComparatorSource.java +++ b/core/src/main/java/org/elasticsearch/index/fielddata/fieldcomparator/LongValuesComparatorSource.java @@ -69,7 +69,7 @@ public class LongValuesComparatorSource extends IndexFieldData.XFieldComparatorS if (nested == null) { selectedValues = sortMode.select(values, dMissingValue); } else { - final BitSet rootDocs = nested.rootDocs(context).bits(); + final BitSet rootDocs = nested.rootDocs(context); final DocIdSet innerDocs = nested.innerDocs(context); selectedValues = sortMode.select(values, dMissingValue, rootDocs, innerDocs, context.reader().maxDoc()); } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java index 857b2078033..b38b7ad2d34 100755 --- a/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -426,10 +426,10 @@ public class MapperService extends AbstractIndexComponent implements Closeable { if (types == null || types.length == 0) { if (hasNested && filterPercolateType) { - BooleanQuery bq = new BooleanQuery(); + BooleanQuery.Builder bq = new BooleanQuery.Builder(); bq.add(percolatorType, Occur.MUST_NOT); bq.add(Queries.newNonNestedFilter(), Occur.MUST); - return new ConstantScoreQuery(bq); + return new ConstantScoreQuery(bq.build()); } else if (hasNested) { return Queries.newNonNestedFilter(); } else if (filterPercolateType) { @@ -444,10 +444,10 @@ public class MapperService extends AbstractIndexComponent implements Closeable { DocumentMapper docMapper = documentMapper(types[0]); Query filter = docMapper != null ? docMapper.typeFilter() : new TermQuery(new Term(TypeFieldMapper.NAME, types[0])); if (filterPercolateType) { - BooleanQuery bq = new BooleanQuery(); + BooleanQuery.Builder bq = new BooleanQuery.Builder(); bq.add(percolatorType, Occur.MUST_NOT); bq.add(filter, Occur.MUST); - return new ConstantScoreQuery(bq); + return new ConstantScoreQuery(bq.build()); } else { return filter; } @@ -474,16 +474,16 @@ public class MapperService extends AbstractIndexComponent implements Closeable { } TermsQuery termsFilter = new TermsQuery(TypeFieldMapper.NAME, typesBytes); if (filterPercolateType) { - BooleanQuery bq = new BooleanQuery(); + BooleanQuery.Builder bq = new BooleanQuery.Builder(); bq.add(percolatorType, Occur.MUST_NOT); bq.add(termsFilter, Occur.MUST); - return new ConstantScoreQuery(bq); + return new ConstantScoreQuery(bq.build()); } else { return termsFilter; } } else { // Current bool filter requires that at least one should clause matches, even with a must clause. - BooleanQuery bool = new BooleanQuery(); + BooleanQuery.Builder bool = new BooleanQuery.Builder(); for (String type : types) { DocumentMapper docMapper = documentMapper(type); if (docMapper == null) { @@ -499,7 +499,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable { bool.add(Queries.newNonNestedFilter(), BooleanClause.Occur.MUST); } - return new ConstantScoreQuery(bool); + return new ConstantScoreQuery(bool.build()); } } diff --git a/core/src/main/java/org/elasticsearch/index/mapper/internal/IdFieldMapper.java b/core/src/main/java/org/elasticsearch/index/mapper/internal/IdFieldMapper.java index 63fa41faea1..96810ec3b88 100644 --- a/core/src/main/java/org/elasticsearch/index/mapper/internal/IdFieldMapper.java +++ b/core/src/main/java/org/elasticsearch/index/mapper/internal/IdFieldMapper.java @@ -189,7 +189,7 @@ public class IdFieldMapper extends MetadataFieldMapper { return super.prefixQuery(value, method, context); } Collection queryTypes = context.queryTypes(); - BooleanQuery query = new BooleanQuery(); + BooleanQuery.Builder query = new BooleanQuery.Builder(); for (String queryType : queryTypes) { PrefixQuery prefixQuery = new PrefixQuery(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(queryType, BytesRefs.toBytesRef(value)))); if (method != null) { @@ -197,7 +197,7 @@ public class IdFieldMapper extends MetadataFieldMapper { } query.add(prefixQuery, BooleanClause.Occur.SHOULD); } - return query; + return query.build(); } @Override @@ -214,7 +214,7 @@ public class IdFieldMapper extends MetadataFieldMapper { } return regexpQuery; } - BooleanQuery query = new BooleanQuery(); + BooleanQuery.Builder query = new BooleanQuery.Builder(); for (String queryType : queryTypes) { RegexpQuery regexpQuery = new RegexpQuery(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(queryType, BytesRefs.toBytesRef(value))), flags, maxDeterminizedStates); if (method != null) { @@ -222,7 +222,7 @@ public class IdFieldMapper extends MetadataFieldMapper { } query.add(regexpQuery, BooleanClause.Occur.SHOULD); } - return query; + return query.build(); } } diff --git a/core/src/main/java/org/elasticsearch/index/query/AndQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/AndQueryParser.java index bb0e1cbbcd5..9141b163e19 100644 --- a/core/src/main/java/org/elasticsearch/index/query/AndQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/AndQueryParser.java @@ -106,10 +106,11 @@ public class AndQueryParser implements QueryParser { return null; } - BooleanQuery query = new BooleanQuery(); + BooleanQuery.Builder queryBuilder = new BooleanQuery.Builder(); for (Query f : queries) { - query.add(f, Occur.MUST); + queryBuilder.add(f, Occur.MUST); } + BooleanQuery query = queryBuilder.build(); if (queryName != null) { parseContext.addNamedQuery(queryName, query); } diff --git a/core/src/main/java/org/elasticsearch/index/query/BoolQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/BoolQueryParser.java index 6476ea814f3..542a0850aa3 100644 --- a/core/src/main/java/org/elasticsearch/index/query/BoolQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/BoolQueryParser.java @@ -161,10 +161,12 @@ public class BoolQueryParser implements QueryParser { return new MatchAllDocsQuery(); } - BooleanQuery booleanQuery = new BooleanQuery(disableCoord); + BooleanQuery.Builder booleanQueryBuilder = new BooleanQuery.Builder(); + booleanQueryBuilder.setDisableCoord(disableCoord); for (BooleanClause clause : clauses) { - booleanQuery.add(clause); + booleanQueryBuilder.add(clause); } + BooleanQuery booleanQuery = booleanQueryBuilder.build(); booleanQuery.setBoost(boost); booleanQuery = Queries.applyMinimumShouldMatch(booleanQuery, minimumShouldMatch); Query query = adjustPureNegative ? fixNegativeQueryIfNeeded(booleanQuery) : booleanQuery; diff --git a/core/src/main/java/org/elasticsearch/index/query/ExistsQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/ExistsQueryParser.java index 0ce578caad5..951966720d7 100644 --- a/core/src/main/java/org/elasticsearch/index/query/ExistsQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/ExistsQueryParser.java @@ -95,7 +95,7 @@ public class ExistsQueryParser implements QueryParser { return Queries.newMatchNoDocsQuery(); } - BooleanQuery boolFilter = new BooleanQuery(); + BooleanQuery.Builder boolFilterBuilder = new BooleanQuery.Builder(); for (String field : fields) { MappedFieldType fieldType = parseContext.fieldMapper(field); Query filter = null; @@ -115,9 +115,10 @@ public class ExistsQueryParser implements QueryParser { if (filter == null) { filter = new TermRangeQuery(field, null, null, true, true); } - boolFilter.add(filter, BooleanClause.Occur.SHOULD); + boolFilterBuilder.add(filter, BooleanClause.Occur.SHOULD); } + BooleanQuery boolFilter = boolFilterBuilder.build(); if (queryName != null) { parseContext.addNamedQuery(queryName, boolFilter); } diff --git a/core/src/main/java/org/elasticsearch/index/query/GeoShapeQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/GeoShapeQueryParser.java index 286fa1cb74c..e959c424aa5 100644 --- a/core/src/main/java/org/elasticsearch/index/query/GeoShapeQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/GeoShapeQueryParser.java @@ -159,12 +159,12 @@ public class GeoShapeQueryParser implements QueryParser { if (strategy instanceof RecursivePrefixTreeStrategy && shapeRelation == ShapeRelation.DISJOINT) { // this strategy doesn't support disjoint anymore: but it did before, including creating lucene fieldcache (!) // in this case, execute disjoint as exists && !intersects - BooleanQuery bool = new BooleanQuery(); + BooleanQuery.Builder bool = new BooleanQuery.Builder(); Query exists = ExistsQueryParser.newFilter(parseContext, fieldName, null); Filter intersects = strategy.makeFilter(getArgs(shape, ShapeRelation.INTERSECTS)); bool.add(exists, BooleanClause.Occur.MUST); bool.add(intersects, BooleanClause.Occur.MUST_NOT); - query = new ConstantScoreQuery(bool); + query = new ConstantScoreQuery(bool.build()); } else { query = strategy.makeQuery(getArgs(shape, shapeRelation)); } diff --git a/core/src/main/java/org/elasticsearch/index/query/HasChildQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/HasChildQueryParser.java index 87a76689a67..4e9b1f3d346 100644 --- a/core/src/main/java/org/elasticsearch/index/query/HasChildQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/HasChildQueryParser.java @@ -25,7 +25,7 @@ import org.apache.lucene.search.Filter; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.QueryWrapperFilter; -import org.apache.lucene.search.join.BitDocIdSetFilter; +import org.apache.lucene.search.join.BitSetProducer; import org.apache.lucene.search.join.JoinUtil; import org.apache.lucene.search.join.ScoreMode; import org.elasticsearch.Version; @@ -167,7 +167,7 @@ public class HasChildQueryParser implements QueryParser { throw new QueryParsingException(parseContext, "[has_child] 'max_children' is less than 'min_children'"); } - BitDocIdSetFilter nonNestedDocsFilter = null; + BitSetProducer nonNestedDocsFilter = null; if (parentDocMapper.hasNestedObjects()) { nonNestedDocsFilter = parseContext.bitsetFilter(Queries.newNonNestedFilter()); } diff --git a/core/src/main/java/org/elasticsearch/index/query/HasParentQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/HasParentQueryParser.java index 67422d0aa2f..954e57757b2 100644 --- a/core/src/main/java/org/elasticsearch/index/query/HasParentQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/HasParentQueryParser.java @@ -180,14 +180,14 @@ public class HasParentQueryParser implements QueryParser { parentFilter = documentMapper.typeFilter(); } } else { - BooleanQuery parentsFilter = new BooleanQuery(); + BooleanQuery.Builder parentsFilter = new BooleanQuery.Builder(); for (String parentTypeStr : parentTypes) { DocumentMapper documentMapper = parseContext.mapperService().documentMapper(parentTypeStr); if (documentMapper != null) { parentsFilter.add(documentMapper.typeFilter(), BooleanClause.Occur.SHOULD); } } - parentFilter = parentsFilter; + parentFilter = parentsFilter.build(); } if (parentFilter == null) { diff --git a/core/src/main/java/org/elasticsearch/index/query/MissingQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/MissingQueryParser.java index 6ef19d7e4f6..8d13caa0e6c 100644 --- a/core/src/main/java/org/elasticsearch/index/query/MissingQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/MissingQueryParser.java @@ -118,7 +118,7 @@ public class MissingQueryParser implements QueryParser { Query nullFilter = null; if (existence) { - BooleanQuery boolFilter = new BooleanQuery(); + BooleanQuery.Builder boolFilter = new BooleanQuery.Builder(); for (String field : fields) { MappedFieldType fieldType = parseContext.fieldMapper(field); Query filter = null; @@ -141,7 +141,7 @@ public class MissingQueryParser implements QueryParser { boolFilter.add(filter, BooleanClause.Occur.SHOULD); } - existenceFilter = boolFilter; + existenceFilter = boolFilter.build(); existenceFilter = Queries.not(existenceFilter);; } @@ -157,11 +157,10 @@ public class MissingQueryParser implements QueryParser { Query filter; if (nullFilter != null) { if (existenceFilter != null) { - BooleanQuery combined = new BooleanQuery(); - combined.add(existenceFilter, BooleanClause.Occur.SHOULD); - combined.add(nullFilter, BooleanClause.Occur.SHOULD); - // cache the not filter as well, so it will be faster - filter = combined; + filter = new BooleanQuery.Builder() + .add(existenceFilter, BooleanClause.Occur.SHOULD) + .add(nullFilter, BooleanClause.Occur.SHOULD) + .build(); } else { filter = nullFilter; } diff --git a/core/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryParser.java index 98c3e2b5bf9..4397091fe65 100644 --- a/core/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryParser.java @@ -290,14 +290,14 @@ public class MoreLikeThisQueryParser implements QueryParser { } } - BooleanQuery boolQuery = new BooleanQuery(); + BooleanQuery.Builder boolQuery = new BooleanQuery.Builder(); boolQuery.add(mltQuery, BooleanClause.Occur.SHOULD); // exclude the items from the search if (!include) { handleExclude(boolQuery, likeItems); } - return boolQuery; + return boolQuery.build(); } return mltQuery; @@ -342,7 +342,7 @@ public class MoreLikeThisQueryParser implements QueryParser { return moreLikeFields; } - private void handleExclude(BooleanQuery boolQuery, MultiTermVectorsRequest likeItems) { + private void handleExclude(BooleanQuery.Builder boolQuery, MultiTermVectorsRequest likeItems) { // artificial docs get assigned a random id and should be disregarded List uids = new ArrayList<>(); for (TermVectorsRequest item : likeItems) { diff --git a/core/src/main/java/org/elasticsearch/index/query/OrQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/OrQueryParser.java index ff2c0b2c432..fca3f93d497 100644 --- a/core/src/main/java/org/elasticsearch/index/query/OrQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/OrQueryParser.java @@ -103,10 +103,11 @@ public class OrQueryParser implements QueryParser { return null; } - BooleanQuery query = new BooleanQuery(); + BooleanQuery.Builder queryBuilder = new BooleanQuery.Builder(); for (Query f : queries) { - query.add(f, Occur.SHOULD); + queryBuilder.add(f, Occur.SHOULD); } + BooleanQuery query = queryBuilder.build(); if (queryName != null) { parseContext.addNamedQuery(queryName, query); } diff --git a/core/src/main/java/org/elasticsearch/index/query/QueryParseContext.java b/core/src/main/java/org/elasticsearch/index/query/QueryParseContext.java index 4b122008749..c8d6da065cc 100644 --- a/core/src/main/java/org/elasticsearch/index/query/QueryParseContext.java +++ b/core/src/main/java/org/elasticsearch/index/query/QueryParseContext.java @@ -27,7 +27,7 @@ import org.apache.lucene.queryparser.classic.MapperQueryParser; import org.apache.lucene.queryparser.classic.QueryParserSettings; import org.apache.lucene.search.Filter; import org.apache.lucene.search.Query; -import org.apache.lucene.search.join.BitDocIdSetFilter; +import org.apache.lucene.search.join.BitSetProducer; import org.apache.lucene.search.similarities.Similarity; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -39,7 +39,11 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.Index; import org.elasticsearch.index.analysis.AnalysisService; import org.elasticsearch.index.fielddata.IndexFieldData; -import org.elasticsearch.index.mapper.*; +import org.elasticsearch.index.mapper.ContentPath; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.mapper.Mapper; +import org.elasticsearch.index.mapper.MapperBuilders; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.core.StringFieldMapper; import org.elasticsearch.index.mapper.object.ObjectMapper; import org.elasticsearch.index.query.support.NestedScope; @@ -50,7 +54,10 @@ import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.lookup.SearchLookup; import java.io.IOException; -import java.util.*; +import java.util.Arrays; +import java.util.Collection; +import java.util.HashMap; +import java.util.Map; public class QueryParseContext { @@ -173,8 +180,8 @@ public class QueryParseContext { return queryParser; } - public BitDocIdSetFilter bitsetFilter(Filter filter) { - return indexQueryParser.bitsetFilterCache.getBitDocIdSetFilter(filter); + public BitSetProducer bitsetFilter(Filter filter) { + return indexQueryParser.bitsetFilterCache.getBitSetProducer(filter); } public > IFD getForField(MappedFieldType mapper) { diff --git a/core/src/main/java/org/elasticsearch/index/query/SimpleQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/SimpleQueryParser.java index 48f3ce64e50..9ae0703282f 100644 --- a/core/src/main/java/org/elasticsearch/index/query/SimpleQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/SimpleQueryParser.java @@ -56,7 +56,8 @@ public class SimpleQueryParser extends org.apache.lucene.queryparser.simple.Simp @Override public Query newDefaultQuery(String text) { - BooleanQuery bq = new BooleanQuery(true); + BooleanQuery.Builder bq = new BooleanQuery.Builder(); + bq.setDisableCoord(true); for (Map.Entry entry : weights.entrySet()) { try { Query q = createBooleanQuery(entry.getKey(), text, super.getDefaultOperator()); @@ -68,7 +69,7 @@ public class SimpleQueryParser extends org.apache.lucene.queryparser.simple.Simp rethrowUnlessLenient(e); } } - return super.simplify(bq); + return super.simplify(bq.build()); } /** @@ -80,24 +81,24 @@ public class SimpleQueryParser extends org.apache.lucene.queryparser.simple.Simp if (settings.lowercaseExpandedTerms()) { text = text.toLowerCase(settings.locale()); } - BooleanQuery bq = new BooleanQuery(true); + BooleanQuery.Builder bq = new BooleanQuery.Builder(); + bq.setDisableCoord(true); for (Map.Entry entry : weights.entrySet()) { try { Query q = new FuzzyQuery(new Term(entry.getKey(), text), fuzziness); - if (q != null) { - q.setBoost(entry.getValue()); - bq.add(q, BooleanClause.Occur.SHOULD); - } + q.setBoost(entry.getValue()); + bq.add(q, BooleanClause.Occur.SHOULD); } catch (RuntimeException e) { rethrowUnlessLenient(e); } } - return super.simplify(bq); + return super.simplify(bq.build()); } @Override public Query newPhraseQuery(String text, int slop) { - BooleanQuery bq = new BooleanQuery(true); + BooleanQuery.Builder bq = new BooleanQuery.Builder(); + bq.setDisableCoord(true); for (Map.Entry entry : weights.entrySet()) { try { Query q = createPhraseQuery(entry.getKey(), text, slop); @@ -109,7 +110,7 @@ public class SimpleQueryParser extends org.apache.lucene.queryparser.simple.Simp rethrowUnlessLenient(e); } } - return super.simplify(bq); + return super.simplify(bq.build()); } /** @@ -121,7 +122,8 @@ public class SimpleQueryParser extends org.apache.lucene.queryparser.simple.Simp if (settings.lowercaseExpandedTerms()) { text = text.toLowerCase(settings.locale()); } - BooleanQuery bq = new BooleanQuery(true); + BooleanQuery.Builder bq = new BooleanQuery.Builder(); + bq.setDisableCoord(true); for (Map.Entry entry : weights.entrySet()) { try { if (settings.analyzeWildcard()) { @@ -137,7 +139,7 @@ public class SimpleQueryParser extends org.apache.lucene.queryparser.simple.Simp return rethrowUnlessLenient(e); } } - return super.simplify(bq); + return super.simplify(bq.build()); } /** @@ -182,7 +184,7 @@ public class SimpleQueryParser extends org.apache.lucene.queryparser.simple.Simp } return new PrefixQuery(new Term(field, BytesRef.deepCopyOf(termAtt.getBytesRef()))); } else { - BooleanQuery bq = new BooleanQuery(); + BooleanQuery.Builder bq = new BooleanQuery.Builder(); for (int i = 0; i < numTokens; i++) { try { boolean hasNext = buffer.incrementToken(); @@ -192,7 +194,7 @@ public class SimpleQueryParser extends org.apache.lucene.queryparser.simple.Simp } bq.add(new BooleanClause(new PrefixQuery(new Term(field, BytesRef.deepCopyOf(termAtt.getBytesRef()))), BooleanClause.Occur.SHOULD)); } - return bq; + return bq.build(); } } catch (IOException e) { // Bail on any exceptions, going with a regular prefix query diff --git a/core/src/main/java/org/elasticsearch/index/query/TermsQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/TermsQueryParser.java index c18ef81d8c1..b5fbce4b1bf 100644 --- a/core/src/main/java/org/elasticsearch/index/query/TermsQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/TermsQueryParser.java @@ -193,7 +193,8 @@ public class TermsQueryParser implements QueryParser { query = new TermsQuery(fieldName, filterValues); } } else { - BooleanQuery bq = new BooleanQuery(disableCoord); + BooleanQuery.Builder bq = new BooleanQuery.Builder(); + bq.setDisableCoord(disableCoord); for (Object term : terms) { if (fieldType != null) { bq.add(fieldType.termQuery(term, parseContext), Occur.SHOULD); @@ -201,7 +202,7 @@ public class TermsQueryParser implements QueryParser { bq.add(new TermQuery(new Term(fieldName, BytesRefs.toBytesRef(term))), Occur.SHOULD); } } - query = Queries.applyMinimumShouldMatch(bq, minShouldMatch); + query = Queries.applyMinimumShouldMatch(bq.build(), minShouldMatch); } query.setBoost(boost); diff --git a/core/src/main/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryParser.java index 02fc425dc52..c2c649424a7 100644 --- a/core/src/main/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/functionscore/FunctionScoreQueryParser.java @@ -156,10 +156,10 @@ public class FunctionScoreQueryParser implements QueryParser { } else if (query == null && filter != null) { query = new ConstantScoreQuery(filter); } else if (query != null && filter != null) { - final BooleanQuery filtered = new BooleanQuery(); + final BooleanQuery.Builder filtered = new BooleanQuery.Builder(); filtered.add(query, Occur.MUST); filtered.add(filter, Occur.FILTER); - query = filtered; + query = filtered.build(); } // if all filter elements returned null, just use the query if (filterFunctions.isEmpty() && combineFunction == null) { diff --git a/core/src/main/java/org/elasticsearch/index/query/support/NestedInnerQueryParseSupport.java b/core/src/main/java/org/elasticsearch/index/query/support/NestedInnerQueryParseSupport.java index 49e1a218d0c..49610a7e510 100644 --- a/core/src/main/java/org/elasticsearch/index/query/support/NestedInnerQueryParseSupport.java +++ b/core/src/main/java/org/elasticsearch/index/query/support/NestedInnerQueryParseSupport.java @@ -21,7 +21,7 @@ package org.elasticsearch.index.query.support; import org.apache.lucene.search.Filter; import org.apache.lucene.search.Query; -import org.apache.lucene.search.join.BitDocIdSetFilter; +import org.apache.lucene.search.join.BitSetProducer; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.xcontent.XContentFactory; @@ -53,7 +53,7 @@ public class NestedInnerQueryParseSupport { protected boolean queryFound = false; protected boolean filterFound = false; - protected BitDocIdSetFilter parentFilter; + protected BitSetProducer parentFilter; protected Filter childFilter; protected ObjectMapper nestedObjectMapper; diff --git a/core/src/main/java/org/elasticsearch/index/search/MultiMatchQuery.java b/core/src/main/java/org/elasticsearch/index/search/MultiMatchQuery.java index 34bf9445131..08cc55f3bd8 100644 --- a/core/src/main/java/org/elasticsearch/index/search/MultiMatchQuery.java +++ b/core/src/main/java/org/elasticsearch/index/search/MultiMatchQuery.java @@ -132,11 +132,11 @@ public class MultiMatchQuery extends MatchQuery { } return disMaxQuery; } else { - final BooleanQuery booleanQuery = new BooleanQuery(); + final BooleanQuery.Builder booleanQuery = new BooleanQuery.Builder(); for (Query query : groupQuery) { booleanQuery.add(query, BooleanClause.Occur.SHOULD); } - return booleanQuery; + return booleanQuery.build(); } } diff --git a/core/src/main/java/org/elasticsearch/index/search/child/ChildrenConstantScoreQuery.java b/core/src/main/java/org/elasticsearch/index/search/child/ChildrenConstantScoreQuery.java index 4ec1007bbb1..81c33abab21 100644 --- a/core/src/main/java/org/elasticsearch/index/search/child/ChildrenConstantScoreQuery.java +++ b/core/src/main/java/org/elasticsearch/index/search/child/ChildrenConstantScoreQuery.java @@ -35,8 +35,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Weight; import org.apache.lucene.search.XFilteredDocIdSetIterator; -import org.apache.lucene.search.join.BitDocIdSetFilter; -import org.apache.lucene.util.Bits; +import org.apache.lucene.search.join.BitSetProducer; import org.apache.lucene.util.LongBitSet; import org.elasticsearch.common.lucene.IndexCacheableQuery; import org.elasticsearch.common.lucene.Lucene; @@ -61,9 +60,9 @@ public class ChildrenConstantScoreQuery extends IndexCacheableQuery { private final String childType; private final Filter parentFilter; private final int shortCircuitParentDocSet; - private final BitDocIdSetFilter nonNestedDocsFilter; + private final BitSetProducer nonNestedDocsFilter; - public ChildrenConstantScoreQuery(IndexParentChildFieldData parentChildIndexFieldData, Query childQuery, String parentType, String childType, Filter parentFilter, int shortCircuitParentDocSet, BitDocIdSetFilter nonNestedDocsFilter) { + public ChildrenConstantScoreQuery(IndexParentChildFieldData parentChildIndexFieldData, Query childQuery, String parentType, String childType, Filter parentFilter, int shortCircuitParentDocSet, BitSetProducer nonNestedDocsFilter) { this.parentChildIndexFieldData = parentChildIndexFieldData; this.parentFilter = parentFilter; this.parentType = parentType; @@ -92,7 +91,7 @@ public class ChildrenConstantScoreQuery extends IndexCacheableQuery { final long valueCount; List leaves = searcher.getIndexReader().leaves(); if (globalIfd == null || leaves.isEmpty()) { - return new BooleanQuery().createWeight(searcher, needsScores); + return new BooleanQuery.Builder().build().createWeight(searcher, needsScores); } else { AtomicParentChildFieldData afd = globalIfd.load(leaves.get(0)); SortedDocValues globalValues = afd.getOrdinalsValues(parentType); @@ -100,7 +99,7 @@ public class ChildrenConstantScoreQuery extends IndexCacheableQuery { } if (valueCount == 0) { - return new BooleanQuery().createWeight(searcher, needsScores); + return new BooleanQuery.Builder().build().createWeight(searcher, needsScores); } ParentOrdCollector collector = new ParentOrdCollector(globalIfd, valueCount, parentType); @@ -108,7 +107,7 @@ public class ChildrenConstantScoreQuery extends IndexCacheableQuery { final long remaining = collector.foundParents(); if (remaining == 0) { - return new BooleanQuery().createWeight(searcher, needsScores); + return new BooleanQuery.Builder().build().createWeight(searcher, needsScores); } Filter shortCircuitFilter = null; diff --git a/core/src/main/java/org/elasticsearch/index/search/child/ChildrenQuery.java b/core/src/main/java/org/elasticsearch/index/search/child/ChildrenQuery.java index b869a4f7cb6..cf43b2293e1 100644 --- a/core/src/main/java/org/elasticsearch/index/search/child/ChildrenQuery.java +++ b/core/src/main/java/org/elasticsearch/index/search/child/ChildrenQuery.java @@ -34,8 +34,7 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Weight; import org.apache.lucene.search.XFilteredDocIdSetIterator; -import org.apache.lucene.search.join.BitDocIdSetFilter; -import org.apache.lucene.util.Bits; +import org.apache.lucene.search.join.BitSetProducer; import org.apache.lucene.util.ToStringUtils; import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.lease.Releasables; @@ -74,9 +73,9 @@ public final class ChildrenQuery extends IndexCacheableQuery { protected final int minChildren; protected final int maxChildren; protected final int shortCircuitParentDocSet; - protected final BitDocIdSetFilter nonNestedDocsFilter; + protected final BitSetProducer nonNestedDocsFilter; - public ChildrenQuery(ParentChildIndexFieldData ifd, String parentType, String childType, Filter parentFilter, Query childQuery, ScoreType scoreType, int minChildren, int maxChildren, int shortCircuitParentDocSet, BitDocIdSetFilter nonNestedDocsFilter) { + public ChildrenQuery(ParentChildIndexFieldData ifd, String parentType, String childType, Filter parentFilter, Query childQuery, ScoreType scoreType, int minChildren, int maxChildren, int shortCircuitParentDocSet, BitSetProducer nonNestedDocsFilter) { this.ifd = ifd; this.parentType = parentType; this.childType = childType; @@ -150,7 +149,7 @@ public final class ChildrenQuery extends IndexCacheableQuery { IndexParentChildFieldData globalIfd = ifd.loadGlobal(searcher.getIndexReader()); if (globalIfd == null) { // No docs of the specified type exist on this shard - return new BooleanQuery().createWeight(searcher, needsScores); + return new BooleanQuery.Builder().build().createWeight(searcher, needsScores); } boolean abort = true; @@ -193,7 +192,7 @@ public final class ChildrenQuery extends IndexCacheableQuery { searcher.search(childQuery, collector); numFoundParents = collector.foundParents(); if (numFoundParents == 0) { - return new BooleanQuery().createWeight(searcher, needsScores); + return new BooleanQuery.Builder().build().createWeight(searcher, needsScores); } abort = false; } finally { diff --git a/core/src/main/java/org/elasticsearch/index/search/child/ParentConstantScoreQuery.java b/core/src/main/java/org/elasticsearch/index/search/child/ParentConstantScoreQuery.java index af764bd70e7..0f81afbb7a2 100644 --- a/core/src/main/java/org/elasticsearch/index/search/child/ParentConstantScoreQuery.java +++ b/core/src/main/java/org/elasticsearch/index/search/child/ParentConstantScoreQuery.java @@ -81,7 +81,7 @@ public class ParentConstantScoreQuery extends IndexCacheableQuery { final long maxOrd; List leaves = searcher.getIndexReader().leaves(); if (globalIfd == null || leaves.isEmpty()) { - return new BooleanQuery().createWeight(searcher, needsScores); + return new BooleanQuery.Builder().build().createWeight(searcher, needsScores); } else { AtomicParentChildFieldData afd = globalIfd.load(leaves.get(0)); SortedDocValues globalValues = afd.getOrdinalsValues(parentType); @@ -89,14 +89,14 @@ public class ParentConstantScoreQuery extends IndexCacheableQuery { } if (maxOrd == 0) { - return new BooleanQuery().createWeight(searcher, needsScores); + return new BooleanQuery.Builder().build().createWeight(searcher, needsScores); } ParentOrdsCollector collector = new ParentOrdsCollector(globalIfd, maxOrd, parentType); searcher.search(parentQuery, collector); if (collector.parentCount() == 0) { - return new BooleanQuery().createWeight(searcher, needsScores); + return new BooleanQuery.Builder().build().createWeight(searcher, needsScores); } return new ChildrenWeight(this, childrenFilter, collector, globalIfd); diff --git a/core/src/main/java/org/elasticsearch/index/search/child/ParentIdsFilter.java b/core/src/main/java/org/elasticsearch/index/search/child/ParentIdsFilter.java index 7743cfe0ab4..10ead155274 100644 --- a/core/src/main/java/org/elasticsearch/index/search/child/ParentIdsFilter.java +++ b/core/src/main/java/org/elasticsearch/index/search/child/ParentIdsFilter.java @@ -21,17 +21,12 @@ package org.elasticsearch.index.search.child; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.PostingsEnum; import org.apache.lucene.index.SortedDocValues; -import org.apache.lucene.index.Term; import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; -import org.apache.lucene.search.BooleanClause.Occur; -import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.DocIdSet; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Filter; -import org.apache.lucene.search.QueryWrapperFilter; -import org.apache.lucene.search.TermQuery; -import org.apache.lucene.search.join.BitDocIdSetFilter; +import org.apache.lucene.search.join.BitSetProducer; import org.apache.lucene.util.BitDocIdSet; import org.apache.lucene.util.BitSet; import org.apache.lucene.util.Bits; @@ -57,75 +52,51 @@ import java.io.IOException; */ final class ParentIdsFilter extends Filter { - static Filter createShortCircuitFilter(BitDocIdSetFilter nonNestedDocsFilter, SearchContext searchContext, + static Filter createShortCircuitFilter(BitSetProducer nonNestedDocsFilter, SearchContext searchContext, String parentType, SortedDocValues globalValues, LongBitSet parentOrds, long numFoundParents) { - if (numFoundParents == 1) { - BytesRef id = globalValues.lookupOrd((int) parentOrds.nextSetBit(0)); - if (nonNestedDocsFilter != null) { - BooleanQuery bq = new BooleanQuery(); - bq.add(new TermQuery(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(parentType, id))), Occur.MUST); - bq.add(nonNestedDocsFilter, Occur.MUST); - return new QueryWrapperFilter(bq); - } else { - return new QueryWrapperFilter(new TermQuery(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(parentType, id)))); + BytesRefHash parentIds= null; + boolean constructed = false; + try { + parentIds = new BytesRefHash(numFoundParents, searchContext.bigArrays()); + for (long parentOrd = parentOrds.nextSetBit(0); parentOrd != -1; parentOrd = parentOrds.nextSetBit(parentOrd + 1)) { + parentIds.add(globalValues.lookupOrd((int) parentOrd)); } - } else { - BytesRefHash parentIds= null; - boolean constructed = false; - try { - parentIds = new BytesRefHash(numFoundParents, searchContext.bigArrays()); - for (long parentOrd = parentOrds.nextSetBit(0); parentOrd != -1; parentOrd = parentOrds.nextSetBit(parentOrd + 1)) { - parentIds.add(globalValues.lookupOrd((int) parentOrd)); - } - constructed = true; - } finally { - if (!constructed) { - Releasables.close(parentIds); - } + constructed = true; + } finally { + if (!constructed) { + Releasables.close(parentIds); } - searchContext.addReleasable(parentIds, SearchContext.Lifetime.COLLECTION); - return new ParentIdsFilter(parentType, nonNestedDocsFilter, parentIds); } + searchContext.addReleasable(parentIds, SearchContext.Lifetime.COLLECTION); + return new ParentIdsFilter(parentType, nonNestedDocsFilter, parentIds); } - static Filter createShortCircuitFilter(BitDocIdSetFilter nonNestedDocsFilter, SearchContext searchContext, + static Filter createShortCircuitFilter(BitSetProducer nonNestedDocsFilter, SearchContext searchContext, String parentType, SortedDocValues globalValues, LongHash parentIdxs, long numFoundParents) { - if (numFoundParents == 1) { - BytesRef id = globalValues.lookupOrd((int) parentIdxs.get(0)); - if (nonNestedDocsFilter != null) { - BooleanQuery bq = new BooleanQuery(); - bq.add(new TermQuery(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(parentType, id))), Occur.MUST); - bq.add(nonNestedDocsFilter, Occur.MUST); - return new QueryWrapperFilter(bq); - } else { - return new QueryWrapperFilter(new TermQuery(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(parentType, id)))); + BytesRefHash parentIds = null; + boolean constructed = false; + try { + parentIds = new BytesRefHash(numFoundParents, searchContext.bigArrays()); + for (int id = 0; id < parentIdxs.size(); id++) { + parentIds.add(globalValues.lookupOrd((int) parentIdxs.get(id))); } - } else { - BytesRefHash parentIds = null; - boolean constructed = false; - try { - parentIds = new BytesRefHash(numFoundParents, searchContext.bigArrays()); - for (int id = 0; id < parentIdxs.size(); id++) { - parentIds.add(globalValues.lookupOrd((int) parentIdxs.get(id))); - } - constructed = true; - } finally { - if (!constructed) { - Releasables.close(parentIds); - } + constructed = true; + } finally { + if (!constructed) { + Releasables.close(parentIds); } - searchContext.addReleasable(parentIds, SearchContext.Lifetime.COLLECTION); - return new ParentIdsFilter(parentType, nonNestedDocsFilter, parentIds); } + searchContext.addReleasable(parentIds, SearchContext.Lifetime.COLLECTION); + return new ParentIdsFilter(parentType, nonNestedDocsFilter, parentIds); } private final BytesRef parentTypeBr; - private final BitDocIdSetFilter nonNestedDocsFilter; + private final BitSetProducer nonNestedDocsFilter; private final BytesRefHash parentIds; - private ParentIdsFilter(String parentType, BitDocIdSetFilter nonNestedDocsFilter, BytesRefHash parentIds) { + private ParentIdsFilter(String parentType, BitSetProducer nonNestedDocsFilter, BytesRefHash parentIds) { this.nonNestedDocsFilter = nonNestedDocsFilter; this.parentTypeBr = new BytesRef(parentType); this.parentIds = parentIds; @@ -148,7 +119,7 @@ final class ParentIdsFilter extends Filter { BitSet nonNestedDocs = null; if (nonNestedDocsFilter != null) { - nonNestedDocs = nonNestedDocsFilter.getDocIdSet(context).bits(); + nonNestedDocs = nonNestedDocsFilter.getBitSet(context); } PostingsEnum docsEnum = null; diff --git a/core/src/main/java/org/elasticsearch/index/search/child/ParentQuery.java b/core/src/main/java/org/elasticsearch/index/search/child/ParentQuery.java index dff42416af1..141d4f1b423 100644 --- a/core/src/main/java/org/elasticsearch/index/search/child/ParentQuery.java +++ b/core/src/main/java/org/elasticsearch/index/search/child/ParentQuery.java @@ -125,14 +125,14 @@ public class ParentQuery extends IndexCacheableQuery { IndexParentChildFieldData globalIfd = parentChildIndexFieldData.loadGlobal(searcher.getIndexReader()); if (globalIfd == null) { // No docs of the specified type don't exist on this shard - return new BooleanQuery().createWeight(searcher, needsScores); + return new BooleanQuery.Builder().build().createWeight(searcher, needsScores); } try { collector = new ParentOrdAndScoreCollector(sc, globalIfd, parentType); searcher.search(parentQuery, collector); if (collector.parentCount() == 0) { - return new BooleanQuery().createWeight(searcher, needsScores); + return new BooleanQuery.Builder().build().createWeight(searcher, needsScores); } childWeight = new ChildWeight(this, parentQuery.createWeight(searcher, needsScores), childrenFilter, collector, globalIfd); releaseCollectorResource = false; diff --git a/core/src/main/java/org/elasticsearch/index/search/geo/IndexedGeoBoundingBoxQuery.java b/core/src/main/java/org/elasticsearch/index/search/geo/IndexedGeoBoundingBoxQuery.java index 117914518b4..2430ac3cf7e 100644 --- a/core/src/main/java/org/elasticsearch/index/search/geo/IndexedGeoBoundingBoxQuery.java +++ b/core/src/main/java/org/elasticsearch/index/search/geo/IndexedGeoBoundingBoxQuery.java @@ -43,18 +43,18 @@ public class IndexedGeoBoundingBoxQuery { } private static Query westGeoBoundingBoxFilter(GeoPoint topLeft, GeoPoint bottomRight, GeoPointFieldMapper.GeoPointFieldType fieldType) { - BooleanQuery filter = new BooleanQuery(); + BooleanQuery.Builder filter = new BooleanQuery.Builder(); filter.setMinimumNumberShouldMatch(1); filter.add(fieldType.lonFieldType().rangeQuery(null, bottomRight.lon(), true, true), Occur.SHOULD); filter.add(fieldType.lonFieldType().rangeQuery(topLeft.lon(), null, true, true), Occur.SHOULD); filter.add(fieldType.latFieldType().rangeQuery(bottomRight.lat(), topLeft.lat(), true, true), Occur.MUST); - return new ConstantScoreQuery(filter); + return new ConstantScoreQuery(filter.build()); } private static Query eastGeoBoundingBoxFilter(GeoPoint topLeft, GeoPoint bottomRight, GeoPointFieldMapper.GeoPointFieldType fieldType) { - BooleanQuery filter = new BooleanQuery(); + BooleanQuery.Builder filter = new BooleanQuery.Builder(); filter.add(fieldType.lonFieldType().rangeQuery(topLeft.lon(), bottomRight.lon(), true, true), Occur.MUST); filter.add(fieldType.latFieldType().rangeQuery(bottomRight.lat(), topLeft.lat(), true, true), Occur.MUST); - return new ConstantScoreQuery(filter); + return new ConstantScoreQuery(filter.build()); } } diff --git a/core/src/main/java/org/elasticsearch/index/search/nested/IncludeNestedDocsQuery.java b/core/src/main/java/org/elasticsearch/index/search/nested/IncludeNestedDocsQuery.java index 553685d0b56..e3631269fbe 100644 --- a/core/src/main/java/org/elasticsearch/index/search/nested/IncludeNestedDocsQuery.java +++ b/core/src/main/java/org/elasticsearch/index/search/nested/IncludeNestedDocsQuery.java @@ -19,15 +19,17 @@ package org.elasticsearch.index.search.nested; -import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.Term; -import org.apache.lucene.search.*; -import org.apache.lucene.search.join.BitDocIdSetFilter; +import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.Explanation; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.Weight; +import org.apache.lucene.search.join.BitSetProducer; import org.apache.lucene.util.BitSet; -import org.apache.lucene.util.Bits; -import org.apache.lucene.util.BitDocIdSet; -import org.apache.lucene.util.BytesRef; import java.io.IOException; import java.util.Collection; @@ -41,7 +43,7 @@ import java.util.Set; */ public class IncludeNestedDocsQuery extends Query { - private final BitDocIdSetFilter parentFilter; + private final BitSetProducer parentFilter; private final Query parentQuery; // If we are rewritten, this is the original childQuery we @@ -52,7 +54,7 @@ public class IncludeNestedDocsQuery extends Query { private final Query origParentQuery; - public IncludeNestedDocsQuery(Query parentQuery, BitDocIdSetFilter parentFilter) { + public IncludeNestedDocsQuery(Query parentQuery, BitSetProducer parentFilter) { this.origParentQuery = parentQuery; this.parentQuery = parentQuery; this.parentFilter = parentFilter; @@ -82,9 +84,9 @@ public class IncludeNestedDocsQuery extends Query { private final Query parentQuery; private final Weight parentWeight; - private final BitDocIdSetFilter parentsFilter; + private final BitSetProducer parentsFilter; - IncludeNestedDocsWeight(Query query, Query parentQuery, Weight parentWeight, BitDocIdSetFilter parentsFilter) { + IncludeNestedDocsWeight(Query query, Query parentQuery, Weight parentWeight, BitSetProducer parentsFilter) { super(query); this.parentQuery = parentQuery; this.parentWeight = parentWeight; @@ -115,7 +117,7 @@ public class IncludeNestedDocsQuery extends Query { return null; } - BitDocIdSet parents = parentsFilter.getDocIdSet(context); + BitSet parents = parentsFilter.getBitSet(context); if (parents == null) { // No matches return null; @@ -144,10 +146,10 @@ public class IncludeNestedDocsQuery extends Query { int currentParentPointer = -1; int currentDoc = -1; - IncludeNestedDocsScorer(Weight weight, Scorer parentScorer, BitDocIdSet parentBits, int currentParentPointer) { + IncludeNestedDocsScorer(Weight weight, Scorer parentScorer, BitSet parentBits, int currentParentPointer) { super(weight); this.parentScorer = parentScorer; - this.parentBits = parentBits.bits(); + this.parentBits = parentBits; this.currentParentPointer = currentParentPointer; if (currentParentPointer == 0) { currentChildPointer = 0; diff --git a/core/src/main/java/org/elasticsearch/index/shard/TranslogRecoveryPerformer.java b/core/src/main/java/org/elasticsearch/index/shard/TranslogRecoveryPerformer.java index 890b70996cd..7cb42852d34 100644 --- a/core/src/main/java/org/elasticsearch/index/shard/TranslogRecoveryPerformer.java +++ b/core/src/main/java/org/elasticsearch/index/shard/TranslogRecoveryPerformer.java @@ -19,7 +19,7 @@ package org.elasticsearch.index.shard; import org.apache.lucene.search.Query; -import org.apache.lucene.search.join.BitDocIdSetFilter; +import org.apache.lucene.search.join.BitSetProducer; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; @@ -34,7 +34,12 @@ import org.elasticsearch.index.aliases.IndexAliasesService; import org.elasticsearch.index.cache.IndexCache; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.IgnoreOnRecoveryEngineException; -import org.elasticsearch.index.mapper.*; +import org.elasticsearch.index.mapper.DocumentMapperForType; +import org.elasticsearch.index.mapper.MapperException; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.MapperUtils; +import org.elasticsearch.index.mapper.Mapping; +import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.query.IndexQueryParserService; import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.index.query.QueryParsingException; @@ -222,7 +227,7 @@ public class TranslogRecoveryPerformer { } Query aliasFilter = indexAliasesService.aliasFilter(filteringAliases); - BitDocIdSetFilter parentFilter = mapperService.hasNested() ? indexCache.bitsetFilterCache().getBitDocIdSetFilter(Queries.newNonNestedFilter()) : null; + BitSetProducer parentFilter = mapperService.hasNested() ? indexCache.bitsetFilterCache().getBitSetProducer(Queries.newNonNestedFilter()) : null; return new Engine.DeleteByQuery(query, source, filteringAliases, aliasFilter, parentFilter, origin, startTime, types); } diff --git a/core/src/main/java/org/elasticsearch/percolator/PercolatorService.java b/core/src/main/java/org/elasticsearch/percolator/PercolatorService.java index 73ca113165a..06000b06949 100644 --- a/core/src/main/java/org/elasticsearch/percolator/PercolatorService.java +++ b/core/src/main/java/org/elasticsearch/percolator/PercolatorService.java @@ -805,10 +805,10 @@ public class PercolatorService extends AbstractComponent { final Query filter; if (context.aliasFilter() != null) { - BooleanQuery booleanFilter = new BooleanQuery(); + BooleanQuery.Builder booleanFilter = new BooleanQuery.Builder(); booleanFilter.add(context.aliasFilter(), BooleanClause.Occur.MUST); booleanFilter.add(percolatorTypeFilter, BooleanClause.Occur.MUST); - filter = booleanFilter; + filter = booleanFilter.build(); } else { filter = percolatorTypeFilter; } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/AggregationPhase.java b/core/src/main/java/org/elasticsearch/search/aggregations/AggregationPhase.java index 167d2d9d09a..123da5afff8 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/AggregationPhase.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/AggregationPhase.java @@ -119,9 +119,10 @@ public class AggregationPhase implements SearchPhase { Query query = Queries.newMatchAllQuery(); Query searchFilter = context.searchFilter(context.types()); if (searchFilter != null) { - BooleanQuery filtered = new BooleanQuery(); - filtered.add(query, Occur.MUST); - filtered.add(searchFilter, Occur.FILTER); + BooleanQuery filtered = new BooleanQuery.Builder() + .add(query, Occur.MUST) + .add(searchFilter, Occur.FILTER) + .build(); query = filtered; } try { diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java index 9aaf1eb1cb6..8c227694bf3 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregator.java @@ -18,16 +18,16 @@ */ package org.elasticsearch.search.aggregations.bucket.nested; +import org.apache.lucene.index.IndexReaderContext; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.search.DocIdSet; +import org.apache.lucene.index.ReaderUtil; import org.apache.lucene.search.DocIdSetIterator; -import org.apache.lucene.search.Filter; -import org.apache.lucene.search.join.BitDocIdSetFilter; -import org.apache.lucene.util.BitDocIdSet; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.Weight; +import org.apache.lucene.search.join.BitSetProducer; import org.apache.lucene.util.BitSet; -import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.search.Queries; -import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.object.ObjectMapper; import org.elasticsearch.search.aggregations.AggregationExecutionException; import org.elasticsearch.search.aggregations.Aggregator; @@ -50,8 +50,8 @@ import java.util.Map; */ public class NestedAggregator extends SingleBucketAggregator { - private BitDocIdSetFilter parentFilter; - private final Filter childFilter; + private BitSetProducer parentFilter; + private final Query childFilter; private DocIdSetIterator childDocs; private BitSet parentDocs; @@ -65,13 +65,11 @@ public class NestedAggregator extends SingleBucketAggregator { public LeafBucketCollector getLeafCollector(final LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException { // Reset parentFilter, so we resolve the parentDocs for each new segment being searched this.parentFilter = null; - // In ES if parent is deleted, then also the children are deleted. Therefore acceptedDocs can also null here. - DocIdSet childDocIdSet = childFilter.getDocIdSet(ctx, null); - if (Lucene.isEmpty(childDocIdSet)) { - childDocs = null; - } else { - childDocs = childDocIdSet.iterator(); - } + final IndexReaderContext topLevelContext = ReaderUtil.getTopLevelContext(ctx); + final IndexSearcher searcher = new IndexSearcher(topLevelContext); + searcher.setQueryCache(null); + final Weight weight = searcher.createNormalizedWeight(childFilter, false); + childDocs = weight.scorer(ctx); return new LeafBucketCollectorBase(sub, null) { @Override @@ -91,18 +89,16 @@ public class NestedAggregator extends SingleBucketAggregator { // Additional NOTE: Before this logic was performed in the setNextReader(...) method, but the the assumption // that aggs instances are constructed in reverse doesn't hold when buckets are constructed lazily during // aggs execution - Filter parentFilterNotCached = findClosestNestedPath(parent()); + Query parentFilterNotCached = findClosestNestedPath(parent()); if (parentFilterNotCached == null) { parentFilterNotCached = Queries.newNonNestedFilter(); } - parentFilter = context.searchContext().bitsetFilterCache().getBitDocIdSetFilter(parentFilterNotCached); - BitDocIdSet parentSet = parentFilter.getDocIdSet(ctx); - if (Lucene.isEmpty(parentSet)) { + parentFilter = context.searchContext().bitsetFilterCache().getBitSetProducer(parentFilterNotCached); + parentDocs = parentFilter.getBitSet(ctx); + if (parentDocs == null) { // There are no parentDocs in the segment, so return and set childDocs to null, so we exit early for future invocations. childDocs = null; return; - } else { - parentDocs = parentSet.bits(); } } @@ -130,7 +126,7 @@ public class NestedAggregator extends SingleBucketAggregator { return new InternalNested(name, 0, buildEmptySubAggregations(), pipelineAggregators(), metaData()); } - private static Filter findClosestNestedPath(Aggregator parent) { + private static Query findClosestNestedPath(Aggregator parent) { for (; parent != null; parent = parent.parent()) { if (parent instanceof NestedAggregator) { return ((NestedAggregator) parent).childFilter; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregator.java b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregator.java index 38c65a250fc..4010858f259 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregator.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/bucket/nested/ReverseNestedAggregator.java @@ -22,13 +22,10 @@ import com.carrotsearch.hppc.LongIntHashMap; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.DocIdSetIterator; -import org.apache.lucene.search.Filter; -import org.apache.lucene.search.join.BitDocIdSetFilter; -import org.apache.lucene.util.BitDocIdSet; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.join.BitSetProducer; import org.apache.lucene.util.BitSet; -import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.search.Queries; -import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.object.ObjectMapper; import org.elasticsearch.search.SearchParseException; import org.elasticsearch.search.aggregations.AggregationExecutionException; @@ -52,30 +49,28 @@ import java.util.Map; */ public class ReverseNestedAggregator extends SingleBucketAggregator { - private final BitDocIdSetFilter parentFilter; + private final Query parentFilter; + private final BitSetProducer parentBitsetProducer; public ReverseNestedAggregator(String name, AggregatorFactories factories, ObjectMapper objectMapper, AggregationContext aggregationContext, Aggregator parent, List pipelineAggregators, Map metaData) throws IOException { super(name, factories, aggregationContext, parent, pipelineAggregators, metaData); if (objectMapper == null) { - parentFilter = context.searchContext().bitsetFilterCache().getBitDocIdSetFilter(Queries.newNonNestedFilter()); + parentFilter = Queries.newNonNestedFilter(); } else { - parentFilter = context.searchContext().bitsetFilterCache().getBitDocIdSetFilter(objectMapper.nestedTypeFilter()); + parentFilter = objectMapper.nestedTypeFilter(); } - + parentBitsetProducer = context.searchContext().bitsetFilterCache().getBitSetProducer(parentFilter); } @Override protected LeafBucketCollector getLeafCollector(LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException { // In ES if parent is deleted, then also the children are deleted, so the child docs this agg receives // must belong to parent docs that is alive. For this reason acceptedDocs can be null here. - BitDocIdSet docIdSet = parentFilter.getDocIdSet(ctx); - final BitSet parentDocs; - if (Lucene.isEmpty(docIdSet)) { + final BitSet parentDocs = parentBitsetProducer.getBitSet(ctx); + if (parentDocs == null) { return LeafBucketCollector.NO_OP_COLLECTOR; - } else { - parentDocs = docIdSet.bits(); } final LongIntHashMap bucketOrdToLastCollectedParentDoc = new LongIntHashMap(32); return new LeafBucketCollectorBase(sub, null) { @@ -120,7 +115,7 @@ public class ReverseNestedAggregator extends SingleBucketAggregator { return new InternalReverseNested(name, 0, buildEmptySubAggregations(), pipelineAggregators(), metaData()); } - Filter getParentFilter() { + Query getParentFilter() { return parentFilter; } diff --git a/core/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java b/core/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java index 12bb85b7697..e8e2e0e1a03 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java @@ -25,7 +25,6 @@ import org.apache.lucene.index.ReaderUtil; import org.apache.lucene.search.DocIdSet; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.Filter; -import org.apache.lucene.util.BitDocIdSet; import org.apache.lucene.util.BitSet; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.bytes.BytesReference; @@ -192,8 +191,7 @@ public class FetchPhase implements SearchPhase { private int findRootDocumentIfNested(SearchContext context, LeafReaderContext subReaderContext, int subDocId) throws IOException { if (context.mapperService().hasNested()) { - BitDocIdSet nonNested = context.bitsetFilterCache().getBitDocIdSetFilter(Queries.newNonNestedFilter()).getDocIdSet(subReaderContext); - BitSet bits = nonNested.bits(); + BitSet bits = context.bitsetFilterCache().getBitSetProducer(Queries.newNonNestedFilter()).getBitSet(subReaderContext); if (!bits.get(subDocId)) { return bits.nextSetBit(subDocId); } @@ -384,8 +382,7 @@ public class FetchPhase implements SearchPhase { continue; } - BitDocIdSet parentBitSet = context.bitsetFilterCache().getBitDocIdSetFilter(parentFilter).getDocIdSet(subReaderContext); - BitSet parentBits = parentBitSet.bits(); + BitSet parentBits = context.bitsetFilterCache().getBitSetProducer(parentFilter).getBitSet(subReaderContext); int offset = 0; int nextParent = parentBits.nextSetBit(currentParent); diff --git a/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsContext.java b/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsContext.java index 460346c44c0..a0df6388812 100644 --- a/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsContext.java +++ b/core/src/main/java/org/elasticsearch/search/fetch/innerhits/InnerHitsContext.java @@ -24,9 +24,8 @@ import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.Term; import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.*; -import org.apache.lucene.search.join.BitDocIdSetFilter; +import org.apache.lucene.search.join.BitSetProducer; import org.apache.lucene.util.BitSet; -import org.apache.lucene.util.Bits; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.search.Queries; @@ -117,7 +116,7 @@ public final class InnerHitsContext { } else { rawParentFilter = parentObjectMapper.nestedTypeFilter(); } - BitDocIdSetFilter parentFilter = context.bitsetFilterCache().getBitDocIdSetFilter(rawParentFilter); + BitSetProducer parentFilter = context.bitsetFilterCache().getBitSetProducer(rawParentFilter); Filter childFilter = childObjectMapper.nestedTypeFilter(); Query q = Queries.filtered(query.query(), new NestedChildrenQuery(parentFilter, childFilter, hitContext)); @@ -147,12 +146,12 @@ public final class InnerHitsContext { // A filter that only emits the nested children docs of a specific nested parent doc static class NestedChildrenQuery extends Query { - private final BitDocIdSetFilter parentFilter; + private final BitSetProducer parentFilter; private final Filter childFilter; private final int docId; private final LeafReader leafReader; - NestedChildrenQuery(BitDocIdSetFilter parentFilter, Filter childFilter, FetchSubPhase.HitContext hitContext) { + NestedChildrenQuery(BitSetProducer parentFilter, Filter childFilter, FetchSubPhase.HitContext hitContext) { this.parentFilter = parentFilter; this.childFilter = childFilter; this.docId = hitContext.docId(); @@ -202,7 +201,7 @@ public final class InnerHitsContext { return null; } - final BitSet parents = parentFilter.getDocIdSet(context).bits(); + final BitSet parents = parentFilter.getBitSet(context); final int firstChildDocId = parents.prevSetBit(docId - 1) + 1; // A parent doc doesn't have child docs, so we can early exit here: if (firstChildDocId == docId) { @@ -293,12 +292,13 @@ public final class InnerHitsContext { return Lucene.EMPTY_TOP_DOCS; } - BooleanQuery q = new BooleanQuery(); - q.add(query.query(), Occur.MUST); - // Only include docs that have the current hit as parent - q.add(new TermQuery(new Term(field, term)), Occur.MUST); - // Only include docs that have this inner hits type - q.add(documentMapper.typeFilter(), Occur.MUST); + BooleanQuery q = new BooleanQuery.Builder() + .add(query.query(), Occur.MUST) + // Only include docs that have the current hit as parent + .add(new TermQuery(new Term(field, term)), Occur.MUST) + // Only include docs that have this inner hits type + .add(documentMapper.typeFilter(), Occur.MUST) + .build(); if (size() == 0) { final int count = context.searcher().count(q); return new TopDocs(count, Lucene.EMPTY_SCORE_DOCS, 0); diff --git a/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java b/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java index 2d24d26bae4..435bd219509 100644 --- a/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java +++ b/core/src/main/java/org/elasticsearch/search/internal/DefaultSearchContext.java @@ -197,9 +197,10 @@ public class DefaultSearchContext extends SearchContext { q.setBoost(query().getBoost()); parsedQuery(new ParsedQuery(q, parsedQuery())); } else { - BooleanQuery filtered = new BooleanQuery(); - filtered.add(query(), Occur.MUST); - filtered.add(searchFilter, Occur.FILTER); + BooleanQuery filtered = new BooleanQuery.Builder() + .add(query(), Occur.MUST) + .add(searchFilter, Occur.FILTER) + .build(); parsedQuery(new ParsedQuery(filtered, parsedQuery())); } } @@ -216,14 +217,14 @@ public class DefaultSearchContext extends SearchContext { if (filter == null && aliasFilter == null) { return null; } - BooleanQuery bq = new BooleanQuery(); + BooleanQuery.Builder bq = new BooleanQuery.Builder(); if (filter != null) { bq.add(filter, Occur.MUST); } if (aliasFilter != null) { bq.add(aliasFilter, Occur.MUST); } - return new ConstantScoreQuery(bq); + return new ConstantScoreQuery(bq.build()); } @Override diff --git a/core/src/main/java/org/elasticsearch/search/query/QueryPhase.java b/core/src/main/java/org/elasticsearch/search/query/QueryPhase.java index 054d33866c7..fd82ce0ef57 100644 --- a/core/src/main/java/org/elasticsearch/search/query/QueryPhase.java +++ b/core/src/main/java/org/elasticsearch/search/query/QueryPhase.java @@ -187,9 +187,10 @@ public class QueryPhase implements SearchPhase { // now this gets interesting: since we sort in index-order, we can directly // skip to the desired doc and stop collecting after ${size} matches if (scrollContext.lastEmittedDoc != null) { - BooleanQuery bq = new BooleanQuery(); - bq.add(query, BooleanClause.Occur.MUST); - bq.add(new MinDocQuery(lastEmittedDoc.doc + 1), BooleanClause.Occur.FILTER); + BooleanQuery bq = new BooleanQuery.Builder() + .add(query, BooleanClause.Occur.MUST) + .add(new MinDocQuery(lastEmittedDoc.doc + 1), BooleanClause.Occur.FILTER) + .build(); query = bq; } searchContext.terminateAfter(numDocs); diff --git a/core/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortParser.java b/core/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortParser.java index 6f4a0dfbb4a..0916128b9f7 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortParser.java +++ b/core/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortParser.java @@ -26,7 +26,7 @@ import org.apache.lucene.search.FieldComparator; import org.apache.lucene.search.Filter; import org.apache.lucene.search.QueryWrapperFilter; import org.apache.lucene.search.SortField; -import org.apache.lucene.search.join.BitDocIdSetFilter; +import org.apache.lucene.search.join.BitSetProducer; import org.apache.lucene.util.BitSet; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.Version; @@ -182,7 +182,7 @@ public class GeoDistanceSortParser implements SortParser { final Nested nested; if (nestedHelper != null && nestedHelper.getPath() != null) { - BitDocIdSetFilter rootDocumentsFilter = context.bitsetFilterCache().getBitDocIdSetFilter(Queries.newNonNestedFilter()); + BitSetProducer rootDocumentsFilter = context.bitsetFilterCache().getBitSetProducer(Queries.newNonNestedFilter()); Filter innerDocumentsFilter; if (nestedHelper.filterFound()) { // TODO: use queries instead @@ -213,7 +213,7 @@ public class GeoDistanceSortParser implements SortParser { if (nested == null) { selectedValues = finalSortMode.select(distanceValues, Double.MAX_VALUE); } else { - final BitSet rootDocs = nested.rootDocs(context).bits(); + final BitSet rootDocs = nested.rootDocs(context); final DocIdSet innerDocs = nested.innerDocs(context); selectedValues = finalSortMode.select(distanceValues, Double.MAX_VALUE, rootDocs, innerDocs, context.reader().maxDoc()); } diff --git a/core/src/main/java/org/elasticsearch/search/sort/ScriptSortParser.java b/core/src/main/java/org/elasticsearch/search/sort/ScriptSortParser.java index 24a5e37a0b7..e327a7a500d 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/ScriptSortParser.java +++ b/core/src/main/java/org/elasticsearch/search/sort/ScriptSortParser.java @@ -25,7 +25,7 @@ import org.apache.lucene.search.Filter; import org.apache.lucene.search.QueryWrapperFilter; import org.apache.lucene.search.Scorer; import org.apache.lucene.search.SortField; -import org.apache.lucene.search.join.BitDocIdSetFilter; +import org.apache.lucene.search.join.BitSetProducer; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.elasticsearch.common.lucene.search.Queries; @@ -145,7 +145,7 @@ public class ScriptSortParser implements SortParser { // If nested_path is specified, then wrap the `fieldComparatorSource` in a `NestedFieldComparatorSource` final Nested nested; if (nestedHelper != null && nestedHelper.getPath() != null) { - BitDocIdSetFilter rootDocumentsFilter = context.bitsetFilterCache().getBitDocIdSetFilter(Queries.newNonNestedFilter()); + BitSetProducer rootDocumentsFilter = context.bitsetFilterCache().getBitSetProducer(Queries.newNonNestedFilter()); Filter innerDocumentsFilter; if (nestedHelper.filterFound()) { // TODO: use queries instead diff --git a/core/src/main/java/org/elasticsearch/search/sort/SortParseElement.java b/core/src/main/java/org/elasticsearch/search/sort/SortParseElement.java index 2f4dcb37acb..c7a31929775 100644 --- a/core/src/main/java/org/elasticsearch/search/sort/SortParseElement.java +++ b/core/src/main/java/org/elasticsearch/search/sort/SortParseElement.java @@ -20,11 +20,12 @@ package org.elasticsearch.search.sort; import com.google.common.collect.ImmutableMap; + import org.apache.lucene.search.Filter; import org.apache.lucene.search.QueryWrapperFilter; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; -import org.apache.lucene.search.join.BitDocIdSetFilter; +import org.apache.lucene.search.join.BitSetProducer; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.lucene.search.Queries; @@ -251,7 +252,7 @@ public class SortParseElement implements SearchParseElement { } final Nested nested; if (nestedHelper != null && nestedHelper.getPath() != null) { - BitDocIdSetFilter rootDocumentsFilter = context.bitsetFilterCache().getBitDocIdSetFilter(Queries.newNonNestedFilter()); + BitSetProducer rootDocumentsFilter = context.bitsetFilterCache().getBitSetProducer(Queries.newNonNestedFilter()); Filter innerDocumentsFilter; if (nestedHelper.filterFound()) { // TODO: use queries instead diff --git a/core/src/test/java/org/apache/lucene/queries/BlendedTermQueryTests.java b/core/src/test/java/org/apache/lucene/queries/BlendedTermQueryTests.java index ec66a53f1e4..7f729c75d2b 100644 --- a/core/src/test/java/org/apache/lucene/queries/BlendedTermQueryTests.java +++ b/core/src/test/java/org/apache/lucene/queries/BlendedTermQueryTests.java @@ -97,10 +97,11 @@ public class BlendedTermQueryTests extends ESTestCase { assertEquals(Integer.toString(0), reader.document(scoreDocs[0].doc).getField("id").stringValue()); } { - BooleanQuery query = new BooleanQuery(false); + BooleanQuery.Builder query = new BooleanQuery.Builder(); + query.setDisableCoord(true); query.add(new TermQuery(new Term("firstname", "simon")), BooleanClause.Occur.SHOULD); query.add(new TermQuery(new Term("surname", "simon")), BooleanClause.Occur.SHOULD); - TopDocs search = searcher.search(query, 1); + TopDocs search = searcher.search(query.build(), 1); ScoreDoc[] scoreDocs = search.scoreDocs; assertEquals(Integer.toString(1), reader.document(scoreDocs[0].doc).getField("id").stringValue()); @@ -150,16 +151,18 @@ public class BlendedTermQueryTests extends ESTestCase { IndexSearcher searcher = setSimilarity(newSearcher(reader)); { String[] fields = new String[]{"username", "song"}; - BooleanQuery query = new BooleanQuery(false); + BooleanQuery.Builder query = new BooleanQuery.Builder(); + query.setDisableCoord(true); query.add(BlendedTermQuery.dismaxBlendedQuery(toTerms(fields, "foo"), 0.1f), BooleanClause.Occur.SHOULD); query.add(BlendedTermQuery.dismaxBlendedQuery(toTerms(fields, "fighters"), 0.1f), BooleanClause.Occur.SHOULD); query.add(BlendedTermQuery.dismaxBlendedQuery(toTerms(fields, "generator"), 0.1f), BooleanClause.Occur.SHOULD); - TopDocs search = searcher.search(query, 10); + TopDocs search = searcher.search(query.build(), 10); ScoreDoc[] scoreDocs = search.scoreDocs; assertEquals(Integer.toString(0), reader.document(scoreDocs[0].doc).getField("id").stringValue()); } { - BooleanQuery query = new BooleanQuery(false); + BooleanQuery.Builder query = new BooleanQuery.Builder(); + query.setDisableCoord(true); DisjunctionMaxQuery uname = new DisjunctionMaxQuery(0.0f); uname.add(new TermQuery(new Term("username", "foo"))); uname.add(new TermQuery(new Term("song", "foo"))); @@ -173,7 +176,7 @@ public class BlendedTermQueryTests extends ESTestCase { query.add(uname, BooleanClause.Occur.SHOULD); query.add(s, BooleanClause.Occur.SHOULD); query.add(gen, BooleanClause.Occur.SHOULD); - TopDocs search = searcher.search(query, 4); + TopDocs search = searcher.search(query.build(), 4); ScoreDoc[] scoreDocs = search.scoreDocs; assertEquals(Integer.toString(1), reader.document(scoreDocs[0].doc).getField("id").stringValue()); diff --git a/core/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java b/core/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java deleted file mode 100644 index 7888704dd89..00000000000 --- a/core/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java +++ /dev/null @@ -1,105 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.cache.bitset; - -import org.apache.lucene.analysis.standard.StandardAnalyzer; -import org.apache.lucene.document.Document; -import org.apache.lucene.document.Field; -import org.apache.lucene.document.StringField; -import org.apache.lucene.index.DirectoryReader; -import org.apache.lucene.index.IndexReader; -import org.apache.lucene.index.IndexWriter; -import org.apache.lucene.index.IndexWriterConfig; -import org.apache.lucene.index.LogByteSizeMergePolicy; -import org.apache.lucene.index.Term; -import org.apache.lucene.search.ConstantScoreQuery; -import org.apache.lucene.search.IndexSearcher; -import org.apache.lucene.search.QueryWrapperFilter; -import org.apache.lucene.search.TermQuery; -import org.apache.lucene.search.TopDocs; -import org.apache.lucene.search.join.BitDocIdSetFilter; -import org.apache.lucene.store.RAMDirectory; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.Index; -import org.elasticsearch.test.ESTestCase; -import org.junit.Test; - -import static org.hamcrest.Matchers.equalTo; - -/** - */ -public class BitSetFilterCacheTests extends ESTestCase { - - @Test - public void testInvalidateEntries() throws Exception { - IndexWriter writer = new IndexWriter( - new RAMDirectory(), - new IndexWriterConfig(new StandardAnalyzer()).setMergePolicy(new LogByteSizeMergePolicy()) - ); - Document document = new Document(); - document.add(new StringField("field", "value", Field.Store.NO)); - writer.addDocument(document); - writer.commit(); - - document = new Document(); - document.add(new StringField("field", "value", Field.Store.NO)); - writer.addDocument(document); - writer.commit(); - - document = new Document(); - document.add(new StringField("field", "value", Field.Store.NO)); - writer.addDocument(document); - writer.commit(); - - IndexReader reader = DirectoryReader.open(writer, false); - IndexSearcher searcher = new IndexSearcher(reader); - - BitsetFilterCache cache = new BitsetFilterCache(new Index("test"), Settings.EMPTY); - BitDocIdSetFilter filter = cache.getBitDocIdSetFilter(new QueryWrapperFilter(new TermQuery(new Term("field", "value")))); - TopDocs docs = searcher.search(new ConstantScoreQuery(filter), 1); - assertThat(docs.totalHits, equalTo(3)); - - // now cached - docs = searcher.search(new ConstantScoreQuery(filter), 1); - assertThat(docs.totalHits, equalTo(3)); - // There are 3 segments - assertThat(cache.getLoadedFilters().size(), equalTo(3l)); - - writer.forceMerge(1); - reader.close(); - reader = DirectoryReader.open(writer, false); - searcher = new IndexSearcher(reader); - - docs = searcher.search(new ConstantScoreQuery(filter), 1); - assertThat(docs.totalHits, equalTo(3)); - - // now cached - docs = searcher.search(new ConstantScoreQuery(filter), 1); - assertThat(docs.totalHits, equalTo(3)); - // Only one segment now, so the size must be 1 - assertThat(cache.getLoadedFilters().size(), equalTo(1l)); - - reader.close(); - writer.close(); - // There is no reference from readers and writer to any segment in the test index, so the size in the fbs cache must be 0 - assertThat(cache.getLoadedFilters().size(), equalTo(0l)); - } - -} diff --git a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java index 74891629d20..a4f000ebc86 100644 --- a/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java @@ -130,7 +130,7 @@ public abstract class AbstractFieldDataTestCase extends ESSingleNodeTestCase { protected Nested createNested(Filter parentFilter, Filter childFilter) { BitsetFilterCache s = indexService.bitsetFilterCache(); - return new Nested(s.getBitDocIdSetFilter(parentFilter), s.getBitDocIdSetFilter(childFilter)); + return new Nested(s.getBitSetProducer(parentFilter), childFilter); } public void testEmpty() throws Exception { diff --git a/core/src/test/java/org/elasticsearch/index/query/SimpleIndexQueryParserTests.java b/core/src/test/java/org/elasticsearch/index/query/SimpleIndexQueryParserTests.java index 6e3d0a59283..599f10be455 100644 --- a/core/src/test/java/org/elasticsearch/index/query/SimpleIndexQueryParserTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/SimpleIndexQueryParserTests.java @@ -834,7 +834,7 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase { IndexQueryParserService queryParser = queryParser(); Query parsedQuery = queryParser.parse(filteredQuery(termQuery("name.first", "shay"), boolQuery().must(termQuery("name.first", "shay1")).must(termQuery("name.first", "shay4")).mustNot(termQuery("name.first", "shay2")).should(termQuery("name.first", "shay3")))).query(); - BooleanQuery filter = new BooleanQuery(); + BooleanQuery.Builder filter = new BooleanQuery.Builder(); filter.add(new TermQuery(new Term("name.first", "shay1")), Occur.MUST); filter.add(new TermQuery(new Term("name.first", "shay4")), Occur.MUST); filter.add(new TermQuery(new Term("name.first", "shay2")), Occur.MUST_NOT); @@ -842,7 +842,7 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase { filter.setMinimumNumberShouldMatch(1); Query expected = Queries.filtered( new TermQuery(new Term("name.first", "shay")), - filter); + filter.build()); assertEquals(expected, parsedQuery); } @@ -851,7 +851,7 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/bool-filter.json"); Query parsedQuery = queryParser.parse(query).query(); - BooleanQuery filter = new BooleanQuery(); + BooleanQuery.Builder filter = new BooleanQuery.Builder(); filter.add(new TermQuery(new Term("name.first", "shay1")), Occur.MUST); filter.add(new TermQuery(new Term("name.first", "shay4")), Occur.MUST); filter.add(new TermQuery(new Term("name.first", "shay2")), Occur.MUST_NOT); @@ -859,7 +859,7 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase { filter.setMinimumNumberShouldMatch(1); Query expected = Queries.filtered( new TermQuery(new Term("name.first", "shay")), - filter); + filter.build()); assertEquals(expected, parsedQuery); } @@ -867,12 +867,12 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase { public void testAndFilteredQueryBuilder() throws IOException { IndexQueryParserService queryParser = queryParser(); Query parsedQuery = queryParser.parse(filteredQuery(matchAllQuery(), andQuery(termQuery("name.first", "shay1"), termQuery("name.first", "shay4")))).query(); - BooleanQuery and = new BooleanQuery(); + BooleanQuery.Builder and = new BooleanQuery.Builder(); and.add(new TermQuery(new Term("name.first", "shay1")), Occur.MUST); and.add(new TermQuery(new Term("name.first", "shay4")), Occur.MUST); BooleanQuery.Builder builder = new BooleanQuery.Builder(); - builder.add(new BooleanClause(new MatchAllDocsQuery(), Occur.MUST)); - builder.add(new BooleanClause(and, Occur.FILTER)); + builder.add(new MatchAllDocsQuery(), Occur.MUST); + builder.add(and.build(), Occur.FILTER); assertEquals(builder.build(), parsedQuery); } @@ -881,12 +881,12 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/and-filter.json"); Query parsedQuery = queryParser.parse(query).query(); - BooleanQuery and = new BooleanQuery(); + BooleanQuery.Builder and = new BooleanQuery.Builder(); and.add(new TermQuery(new Term("name.first", "shay1")), Occur.MUST); and.add(new TermQuery(new Term("name.first", "shay4")), Occur.MUST); Query expected = Queries.filtered( new TermQuery(new Term("name.first", "shay")), - and); + and.build()); assertEquals(expected, parsedQuery); } @@ -895,12 +895,12 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/and-filter-named.json"); Query parsedQuery = queryParser.parse(query).query(); - BooleanQuery and = new BooleanQuery(); + BooleanQuery.Builder and = new BooleanQuery.Builder(); and.add(new TermQuery(new Term("name.first", "shay1")), Occur.MUST); and.add(new TermQuery(new Term("name.first", "shay4")), Occur.MUST); Query expected = Queries.filtered( new TermQuery(new Term("name.first", "shay")), - and); + and.build()); assertEquals(expected, parsedQuery); } @@ -909,12 +909,12 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/and-filter2.json"); Query parsedQuery = queryParser.parse(query).query(); - BooleanQuery and = new BooleanQuery(); + BooleanQuery.Builder and = new BooleanQuery.Builder(); and.add(new TermQuery(new Term("name.first", "shay1")), Occur.MUST); and.add(new TermQuery(new Term("name.first", "shay4")), Occur.MUST); Query expected = Queries.filtered( new TermQuery(new Term("name.first", "shay")), - and); + and.build()); assertEquals(expected, parsedQuery); } @@ -922,12 +922,12 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase { public void testOrFilteredQueryBuilder() throws IOException { IndexQueryParserService queryParser = queryParser(); Query parsedQuery = queryParser.parse(filteredQuery(matchAllQuery(), orQuery(termQuery("name.first", "shay1"), termQuery("name.first", "shay4")))).query(); - BooleanQuery or = new BooleanQuery(); + BooleanQuery.Builder or = new BooleanQuery.Builder(); or.add(new TermQuery(new Term("name.first", "shay1")), Occur.SHOULD); or.add(new TermQuery(new Term("name.first", "shay4")), Occur.SHOULD); BooleanQuery.Builder builder = new BooleanQuery.Builder(); builder.add(new MatchAllDocsQuery(), Occur.MUST); - builder.add(or, Occur.FILTER); + builder.add(or.build(), Occur.FILTER); assertEquals(builder.build(), parsedQuery); } @@ -936,12 +936,12 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/or-filter.json"); Query parsedQuery = queryParser.parse(query).query(); - BooleanQuery or = new BooleanQuery(); + BooleanQuery.Builder or = new BooleanQuery.Builder(); or.add(new TermQuery(new Term("name.first", "shay1")), Occur.SHOULD); or.add(new TermQuery(new Term("name.first", "shay4")), Occur.SHOULD); Query expected = Queries.filtered( new TermQuery(new Term("name.first", "shay")), - or); + or.build()); assertEquals(expected, parsedQuery); } @@ -950,12 +950,12 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase { IndexQueryParserService queryParser = queryParser(); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/or-filter2.json"); Query parsedQuery = queryParser.parse(query).query(); - BooleanQuery or = new BooleanQuery(); + BooleanQuery.Builder or = new BooleanQuery.Builder(); or.add(new TermQuery(new Term("name.first", "shay1")), Occur.SHOULD); or.add(new TermQuery(new Term("name.first", "shay4")), Occur.SHOULD); Query expected = Queries.filtered( new TermQuery(new Term("name.first", "shay")), - or); + or.build()); assertEquals(expected, parsedQuery); } @@ -2520,14 +2520,14 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase { try (Engine.Searcher searcher = indexService.shardSafe(0).acquireSearcher("test")) { Query rewrittenQuery = searcher.searcher().rewrite(parsedQuery); - BooleanQuery expected = new BooleanQuery(); + BooleanQuery.Builder expected = new BooleanQuery.Builder(); expected.add(new TermQuery(new Term("foobar", "banon")), Occur.SHOULD); TermQuery tq1 = new TermQuery(new Term("name.first", "banon")); tq1.setBoost(2); TermQuery tq2 = new TermQuery(new Term("name.last", "banon")); tq2.setBoost(3); expected.add(new DisjunctionMaxQuery(Arrays.asList(tq1, tq2), 0f), Occur.SHOULD); - assertEquals(expected, rewrittenQuery); + assertEquals(expected.build(), rewrittenQuery); } } diff --git a/core/src/test/java/org/elasticsearch/index/search/child/AbstractChildTestCase.java b/core/src/test/java/org/elasticsearch/index/search/child/AbstractChildTestCase.java index c7dd274796c..20bdcb43f31 100644 --- a/core/src/test/java/org/elasticsearch/index/search/child/AbstractChildTestCase.java +++ b/core/src/test/java/org/elasticsearch/index/search/child/AbstractChildTestCase.java @@ -19,14 +19,19 @@ package org.elasticsearch.index.search.child; -import org.apache.lucene.search.*; -import org.apache.lucene.search.join.BitDocIdSetFilter; +import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.Filter; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.ScoreDoc; +import org.apache.lucene.search.TopDocs; +import org.apache.lucene.search.join.BitSetProducer; import org.apache.lucene.util.BitDocIdSet; import org.apache.lucene.util.BitSet; import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; -import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; @@ -130,8 +135,8 @@ public abstract class AbstractChildTestCase extends ESSingleNodeTestCase { } } - static BitDocIdSetFilter wrapWithBitSetFilter(Filter filter) { - return SearchContext.current().bitsetFilterCache().getBitDocIdSetFilter(filter); + static BitSetProducer wrapWithBitSetFilter(Filter filter) { + return SearchContext.current().bitsetFilterCache().getBitSetProducer(filter); } static Query parseQuery(QueryBuilder queryBuilder) throws IOException { diff --git a/core/src/test/java/org/elasticsearch/index/search/child/ChildrenConstantScoreQueryTests.java b/core/src/test/java/org/elasticsearch/index/search/child/ChildrenConstantScoreQueryTests.java index 95b3bca7694..99a9799e0ed 100644 --- a/core/src/test/java/org/elasticsearch/index/search/child/ChildrenConstantScoreQueryTests.java +++ b/core/src/test/java/org/elasticsearch/index/search/child/ChildrenConstantScoreQueryTests.java @@ -20,13 +20,28 @@ package org.elasticsearch.index.search.child; import com.carrotsearch.hppc.IntHashSet; import com.carrotsearch.hppc.ObjectObjectHashMap; + import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.StringField; -import org.apache.lucene.index.*; -import org.apache.lucene.search.*; -import org.apache.lucene.search.join.BitDocIdSetFilter; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.LeafReader; +import org.apache.lucene.index.PostingsEnum; +import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.index.SlowCompositeReaderWrapper; +import org.apache.lucene.index.Term; +import org.apache.lucene.index.Terms; +import org.apache.lucene.index.TermsEnum; +import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.Filter; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.QueryUtils; +import org.apache.lucene.search.QueryWrapperFilter; +import org.apache.lucene.search.TermQuery; import org.apache.lucene.store.Directory; import org.apache.lucene.util.Bits; import org.apache.lucene.util.FixedBitSet; @@ -51,7 +66,11 @@ import java.util.NavigableSet; import java.util.Random; import java.util.TreeSet; -import static org.elasticsearch.index.query.QueryBuilders.*; +import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery; +import static org.elasticsearch.index.query.QueryBuilders.filteredQuery; +import static org.elasticsearch.index.query.QueryBuilders.hasChildQuery; +import static org.elasticsearch.index.query.QueryBuilders.notQuery; +import static org.elasticsearch.index.query.QueryBuilders.termQuery; import static org.hamcrest.Matchers.equalTo; public class ChildrenConstantScoreQueryTests extends AbstractChildTestCase { @@ -73,7 +92,7 @@ public class ChildrenConstantScoreQueryTests extends AbstractChildTestCase { Query childQuery = new TermQuery(new Term("field", "value")); ParentFieldMapper parentFieldMapper = SearchContext.current().mapperService().documentMapper("child").parentFieldMapper(); ParentChildIndexFieldData parentChildIndexFieldData = SearchContext.current().fieldData().getForField(parentFieldMapper.fieldType()); - BitDocIdSetFilter parentFilter = wrapWithBitSetFilter(new QueryWrapperFilter(new TermQuery(new Term(TypeFieldMapper.NAME, "parent")))); + Filter parentFilter = new QueryWrapperFilter(new TermQuery(new Term(TypeFieldMapper.NAME, "parent"))); Query query = new ChildrenConstantScoreQuery(parentChildIndexFieldData, childQuery, "parent", "child", parentFilter, 12, wrapWithBitSetFilter(Queries.newNonNestedFilter())); QueryUtils.check(query); } @@ -106,7 +125,7 @@ public class ChildrenConstantScoreQueryTests extends AbstractChildTestCase { ); TermQuery childQuery = new TermQuery(new Term("field1", "value" + (1 + random().nextInt(3)))); - BitDocIdSetFilter parentFilter = wrapWithBitSetFilter(new QueryWrapperFilter(new TermQuery(new Term(TypeFieldMapper.NAME, "parent")))); + Filter parentFilter = new QueryWrapperFilter(new TermQuery(new Term(TypeFieldMapper.NAME, "parent"))); int shortCircuitParentDocSet = random().nextInt(5); ParentFieldMapper parentFieldMapper = SearchContext.current().mapperService().documentMapper("child").parentFieldMapper(); ParentChildIndexFieldData parentChildIndexFieldData = SearchContext.current().fieldData().getForField(parentFieldMapper.fieldType()); diff --git a/core/src/test/java/org/elasticsearch/index/search/child/ChildrenQueryTests.java b/core/src/test/java/org/elasticsearch/index/search/child/ChildrenQueryTests.java index d8d09fe0b9c..0337a5e2b9e 100644 --- a/core/src/test/java/org/elasticsearch/index/search/child/ChildrenQueryTests.java +++ b/core/src/test/java/org/elasticsearch/index/search/child/ChildrenQueryTests.java @@ -22,6 +22,7 @@ import com.carrotsearch.hppc.FloatArrayList; import com.carrotsearch.hppc.IntHashSet; import com.carrotsearch.hppc.ObjectObjectHashMap; import com.carrotsearch.randomizedtesting.generators.RandomInts; + import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.DoubleField; @@ -29,7 +30,6 @@ import org.apache.lucene.document.Field; import org.apache.lucene.document.StringField; import org.apache.lucene.index.*; import org.apache.lucene.search.*; -import org.apache.lucene.search.join.BitDocIdSetFilter; import org.apache.lucene.store.Directory; import org.apache.lucene.util.Bits; import org.apache.lucene.util.FixedBitSet; @@ -78,7 +78,7 @@ public class ChildrenQueryTests extends AbstractChildTestCase { ScoreType scoreType = ScoreType.values()[random().nextInt(ScoreType.values().length)]; ParentFieldMapper parentFieldMapper = SearchContext.current().mapperService().documentMapper("child").parentFieldMapper(); ParentChildIndexFieldData parentChildIndexFieldData = SearchContext.current().fieldData().getForField(parentFieldMapper.fieldType()); - BitDocIdSetFilter parentFilter = wrapWithBitSetFilter(new QueryWrapperFilter(new TermQuery(new Term(TypeFieldMapper.NAME, "parent")))); + Filter parentFilter = new QueryWrapperFilter(new TermQuery(new Term(TypeFieldMapper.NAME, "parent"))); int minChildren = random().nextInt(10); int maxChildren = scaledRandomIntBetween(minChildren, 10); Query query = new ChildrenQuery(parentChildIndexFieldData, "parent", "child", parentFilter, childQuery, scoreType, minChildren, diff --git a/core/src/test/java/org/elasticsearch/index/search/child/ParentConstantScoreQueryTests.java b/core/src/test/java/org/elasticsearch/index/search/child/ParentConstantScoreQueryTests.java index 71eb8214d1d..83488f2bce4 100644 --- a/core/src/test/java/org/elasticsearch/index/search/child/ParentConstantScoreQueryTests.java +++ b/core/src/test/java/org/elasticsearch/index/search/child/ParentConstantScoreQueryTests.java @@ -20,13 +20,13 @@ package org.elasticsearch.index.search.child; import com.carrotsearch.hppc.IntIntHashMap; import com.carrotsearch.hppc.ObjectObjectHashMap; + import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.StringField; import org.apache.lucene.index.*; import org.apache.lucene.search.*; -import org.apache.lucene.search.join.BitDocIdSetFilter; import org.apache.lucene.store.Directory; import org.apache.lucene.util.Bits; import org.apache.lucene.util.FixedBitSet; @@ -73,7 +73,7 @@ public class ParentConstantScoreQueryTests extends AbstractChildTestCase { Query parentQuery = new TermQuery(new Term("field", "value")); ParentFieldMapper parentFieldMapper = SearchContext.current().mapperService().documentMapper("child").parentFieldMapper(); ParentChildIndexFieldData parentChildIndexFieldData = SearchContext.current().fieldData().getForField(parentFieldMapper.fieldType()); - BitDocIdSetFilter childrenFilter = wrapWithBitSetFilter(new QueryWrapperFilter(new TermQuery(new Term(TypeFieldMapper.NAME, "child")))); + Filter childrenFilter = new QueryWrapperFilter(new TermQuery(new Term(TypeFieldMapper.NAME, "child"))); Query query = new ParentConstantScoreQuery(parentChildIndexFieldData, parentQuery, "parent", childrenFilter); QueryUtils.check(query); } diff --git a/core/src/test/java/org/elasticsearch/index/search/child/ParentQueryTests.java b/core/src/test/java/org/elasticsearch/index/search/child/ParentQueryTests.java index 57dd8af9efd..ebc56be8817 100644 --- a/core/src/test/java/org/elasticsearch/index/search/child/ParentQueryTests.java +++ b/core/src/test/java/org/elasticsearch/index/search/child/ParentQueryTests.java @@ -21,13 +21,13 @@ package org.elasticsearch.index.search.child; import com.carrotsearch.hppc.FloatArrayList; import com.carrotsearch.hppc.IntIntHashMap; import com.carrotsearch.hppc.ObjectObjectHashMap; + import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.document.StringField; import org.apache.lucene.index.*; import org.apache.lucene.search.*; -import org.apache.lucene.search.join.BitDocIdSetFilter; import org.apache.lucene.store.Directory; import org.apache.lucene.util.Bits; import org.apache.lucene.util.FixedBitSet; @@ -73,7 +73,7 @@ public class ParentQueryTests extends AbstractChildTestCase { Query parentQuery = new TermQuery(new Term("field", "value")); ParentFieldMapper parentFieldMapper = SearchContext.current().mapperService().documentMapper("child").parentFieldMapper(); ParentChildIndexFieldData parentChildIndexFieldData = SearchContext.current().fieldData().getForField(parentFieldMapper.fieldType()); - BitDocIdSetFilter childrenFilter = wrapWithBitSetFilter(new QueryWrapperFilter(new TermQuery(new Term(TypeFieldMapper.NAME, "child")))); + Filter childrenFilter = new QueryWrapperFilter(new TermQuery(new Term(TypeFieldMapper.NAME, "child"))); Query query = new ParentQuery(parentChildIndexFieldData, parentQuery, "parent", childrenFilter); QueryUtils.check(query); } diff --git a/core/src/test/java/org/elasticsearch/index/search/nested/NestedSortingTests.java b/core/src/test/java/org/elasticsearch/index/search/nested/NestedSortingTests.java index 7d30eb5a519..36981fe4a52 100644 --- a/core/src/test/java/org/elasticsearch/index/search/nested/NestedSortingTests.java +++ b/core/src/test/java/org/elasticsearch/index/search/nested/NestedSortingTests.java @@ -322,10 +322,10 @@ public class NestedSortingTests extends AbstractFieldDataTestCase { assertThat(((BytesRef) ((FieldDoc) topDocs.scoreDocs[4]).fields[0]).utf8ToString(), equalTo("g")); - BooleanQuery bq = new BooleanQuery(); + BooleanQuery.Builder bq = new BooleanQuery.Builder(); bq.add(parentFilter, Occur.MUST_NOT); bq.add(new TermQuery(new Term("filter_1", "T")), Occur.MUST); - childFilter = new QueryWrapperFilter(bq); + childFilter = new QueryWrapperFilter(bq.build()); nestedComparatorSource = new BytesRefFieldComparatorSource(indexFieldData, null, sortMode, createNested(parentFilter, childFilter)); query = new ToParentBlockJoinQuery( new FilteredQuery(new MatchAllDocsQuery(), childFilter), diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java index ef6112d87df..2f8103fd9b7 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/bucket/nested/NestedAggregatorTests.java @@ -131,10 +131,10 @@ public class NestedAggregatorTests extends ESSingleNodeTestCase { // A regular search always exclude nested docs, so we use NonNestedDocsFilter.INSTANCE here (otherwise MatchAllDocsQuery would be sufficient) // We exclude root doc with uid type#2, this will trigger the bug if we don't reset the root doc when we process a new segment, because // root doc type#3 and root doc type#1 have the same segment docid - BooleanQuery bq = new BooleanQuery(); + BooleanQuery.Builder bq = new BooleanQuery.Builder(); bq.add(Queries.newNonNestedFilter(), Occur.MUST); bq.add(new TermQuery(new Term(UidFieldMapper.NAME, "type#2")), Occur.MUST_NOT); - searcher.search(new ConstantScoreQuery(bq), collector); + searcher.search(new ConstantScoreQuery(bq.build()), collector); collector.postCollection(); Nested nested = (Nested) aggs[0].buildAggregation(0); diff --git a/core/src/test/java/org/elasticsearch/search/fetch/innerhits/NestedChildrenFilterTests.java b/core/src/test/java/org/elasticsearch/search/fetch/innerhits/NestedChildrenFilterTests.java index ee0b52e319f..d946f11b813 100644 --- a/core/src/test/java/org/elasticsearch/search/fetch/innerhits/NestedChildrenFilterTests.java +++ b/core/src/test/java/org/elasticsearch/search/fetch/innerhits/NestedChildrenFilterTests.java @@ -34,8 +34,8 @@ import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.QueryWrapperFilter; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TotalHitCountCollector; -import org.apache.lucene.search.join.BitDocIdSetCachingWrapperFilter; -import org.apache.lucene.search.join.BitDocIdSetFilter; +import org.apache.lucene.search.join.BitSetProducer; +import org.apache.lucene.search.join.QueryBitSetProducer; import org.apache.lucene.store.Directory; import org.elasticsearch.search.fetch.FetchSubPhase; import org.elasticsearch.search.fetch.innerhits.InnerHitsContext.NestedInnerHits.NestedChildrenQuery; @@ -79,11 +79,11 @@ public class NestedChildrenFilterTests extends ESTestCase { IndexSearcher searcher = new IndexSearcher(reader); FetchSubPhase.HitContext hitContext = new FetchSubPhase.HitContext(); - BitDocIdSetFilter parentFilter = new BitDocIdSetCachingWrapperFilter(new QueryWrapperFilter(new TermQuery(new Term("type", "parent")))); + BitSetProducer parentFilter = new QueryBitSetProducer(new TermQuery(new Term("type", "parent"))); Filter childFilter = new QueryWrapperFilter(new TermQuery(new Term("type", "child"))); int checkedParents = 0; for (LeafReaderContext leaf : reader.leaves()) { - DocIdSetIterator parents = parentFilter.getDocIdSet(leaf).iterator(); + DocIdSetIterator parents = new QueryWrapperFilter(new TermQuery(new Term("type", "parent"))).getDocIdSet(leaf, null).iterator(); for (int parentDoc = parents.nextDoc(); parentDoc != DocIdSetIterator.NO_MORE_DOCS ; parentDoc = parents.nextDoc()) { int expectedChildDocs = leaf.reader().document(parentDoc).getField("num_child_docs").numericValue().intValue(); hitContext.reset(null, leaf, parentDoc, searcher); diff --git a/core/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java b/core/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java index 99d600752ad..098825a90b1 100644 --- a/core/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java +++ b/core/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java @@ -96,9 +96,10 @@ public class QueryPhaseTests extends ESTestCase { Query matchAllCsq = new ConstantScoreQuery(matchAll); Query tq = new TermQuery(new Term("foo", "bar")); Query tCsq = new ConstantScoreQuery(tq); - BooleanQuery bq = new BooleanQuery(); - bq.add(matchAll, Occur.SHOULD); - bq.add(tq, Occur.MUST); + BooleanQuery bq = new BooleanQuery.Builder() + .add(matchAll, Occur.SHOULD) + .add(tq, Occur.MUST) + .build(); countTestCase(matchAll, reader, false); countTestCase(matchAllCsq, reader, false); From 216335abcf7faf4b2cb52514e7e0a07ff7bf366a Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Fri, 4 Sep 2015 10:47:30 +0200 Subject: [PATCH 10/17] Tests: @AwaitsFix on PercentilesBucketIT.testNested. --- .../search/aggregations/pipeline/PercentilesBucketIT.java | 1 + 1 file changed, 1 insertion(+) diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketIT.java index 507939e6858..bc50f88e540 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketIT.java @@ -489,6 +489,7 @@ public class PercentilesBucketIT extends ESIntegTestCase { } @Test + @AwaitsFix(bugUrl="https://github.com/elastic/elasticsearch/issues/13337") public void testNested() throws Exception { SearchResponse response = client() .prepareSearch("idx") From 41aa1a7a7138a15fa58d09a3cc641596dbd89647 Mon Sep 17 00:00:00 2001 From: Tanguy Leroux Date: Wed, 2 Sep 2015 17:01:20 +0200 Subject: [PATCH 11/17] Manually synchronize listeners when iterating on them in InternalClusterInfoService --- .../elasticsearch/cluster/InternalClusterInfoService.java | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java b/core/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java index 71ac5673911..8a6a7ab68fc 100644 --- a/core/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java +++ b/core/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java @@ -44,7 +44,11 @@ import org.elasticsearch.node.settings.NodeSettingsService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.ReceiveTimeoutTransportException; -import java.util.*; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; @@ -77,7 +81,7 @@ public class InternalClusterInfoService extends AbstractComponent implements Clu private final TransportIndicesStatsAction transportIndicesStatsAction; private final ClusterService clusterService; private final ThreadPool threadPool; - private final Set listeners = Collections.synchronizedSet(new HashSet()); + private final List listeners = new CopyOnWriteArrayList<>(); @Inject public InternalClusterInfoService(Settings settings, NodeSettingsService nodeSettingsService, From ab2f295c1671167bc3dfbada22e94fa614bd09f0 Mon Sep 17 00:00:00 2001 From: Zachary Tong Date: Fri, 4 Sep 2015 08:11:36 -0400 Subject: [PATCH 12/17] Collect and pass unparsed params to buildFactory(), replacing doParse() doParse() was supposed to allow aggs to perform extra parsing. Unfortunately, this forced the parser to carry instance-level state, which would carry-over and "corrupt" any other aggs of the same type in the same query. Instead, we are now collecting all unknown params and pasing them as a Map to buildFactory(). The agg may then parse them and instantiate a factory. Each param the agg uses, it should unset from the unusedParams object. After building the factory, the parser verifies that unusedParams is empty. If it is not empty, an exception is raised so the user knows they provided unknown params. Fixes #13337 --- .../bucketmetrics/BucketMetricsParser.java | 39 +++++++++------ .../bucketmetrics/avg/AvgBucketParser.java | 4 +- .../bucketmetrics/max/MaxBucketParser.java | 5 +- .../bucketmetrics/min/MinBucketParser.java | 4 +- .../percentile/PercentilesBucketParser.java | 49 ++++++++++--------- .../bucketmetrics/sum/SumBucketParser.java | 4 +- .../pipeline/PercentilesBucketIT.java | 1 - 7 files changed, 63 insertions(+), 43 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/BucketMetricsParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/BucketMetricsParser.java index 80b4c981d12..f994d9314ae 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/BucketMetricsParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/BucketMetricsParser.java @@ -31,8 +31,11 @@ import org.elasticsearch.search.aggregations.support.format.ValueFormatter; import org.elasticsearch.search.internal.SearchContext; import java.io.IOException; +import java.text.ParseException; import java.util.ArrayList; +import java.util.HashMap; import java.util.List; +import java.util.Map; /** * A parser for parsing requests for a {@link BucketMetricsPipelineAggregator} @@ -52,12 +55,11 @@ public abstract class BucketMetricsParser implements PipelineAggregator.Parser { String[] bucketsPaths = null; String format = null; GapPolicy gapPolicy = GapPolicy.SKIP; + Map leftover = new HashMap<>(5); while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); - } else if (doParse(pipelineAggregatorName, currentFieldName, token, parser, context)) { - // Do nothing as subclass has stored the state for this token } else if (token == XContentParser.Token.VALUE_STRING) { if (context.parseFieldMatcher().match(currentFieldName, FORMAT)) { format = parser.text(); @@ -66,8 +68,7 @@ public abstract class BucketMetricsParser implements PipelineAggregator.Parser { } else if (context.parseFieldMatcher().match(currentFieldName, GAP_POLICY)) { gapPolicy = GapPolicy.parse(context, parser.text(), parser.getTokenLocation()); } else { - throw new SearchParseException(context, "Unknown key for a " + token + " in [" + pipelineAggregatorName + "]: [" - + currentFieldName + "].", parser.getTokenLocation()); + leftover.put(currentFieldName, parser.text()); } } else if (token == XContentParser.Token.START_ARRAY) { if (context.parseFieldMatcher().match(currentFieldName, BUCKETS_PATH)) { @@ -78,18 +79,16 @@ public abstract class BucketMetricsParser implements PipelineAggregator.Parser { } bucketsPaths = paths.toArray(new String[paths.size()]); } else { - throw new SearchParseException(context, "Unknown key for a " + token + " in [" + pipelineAggregatorName + "]: [" - + currentFieldName + "].", parser.getTokenLocation()); + leftover.put(currentFieldName, parser.list()); } } else { - throw new SearchParseException(context, "Unexpected token " + token + " in [" + pipelineAggregatorName + "].", - parser.getTokenLocation()); + leftover.put(currentFieldName, parser.objectText()); } } if (bucketsPaths == null) { throw new SearchParseException(context, "Missing required field [" + BUCKETS_PATH.getPreferredName() - + "] for derivative aggregation [" + pipelineAggregatorName + "]", parser.getTokenLocation()); + + "] for aggregation [" + pipelineAggregatorName + "]", parser.getTokenLocation()); } ValueFormatter formatter = null; @@ -99,15 +98,23 @@ public abstract class BucketMetricsParser implements PipelineAggregator.Parser { formatter = ValueFormatter.RAW; } - return buildFactory(pipelineAggregatorName, bucketsPaths, gapPolicy, formatter); + PipelineAggregatorFactory factory = null; + try { + factory = buildFactory(pipelineAggregatorName, bucketsPaths, gapPolicy, formatter, leftover); + } catch (ParseException exception) { + throw new SearchParseException(context, "Could not parse settings for aggregation [" + + pipelineAggregatorName + "].", null, exception); + } + + if (leftover.size() > 0) { + throw new SearchParseException(context, "Unexpected tokens " + leftover.keySet() + " in [" + pipelineAggregatorName + "].", null); + } + assert(factory != null); + + return factory; } protected abstract PipelineAggregatorFactory buildFactory(String pipelineAggregatorName, String[] bucketsPaths, GapPolicy gapPolicy, - ValueFormatter formatter); - - protected boolean doParse(String pipelineAggregatorName, String currentFieldName, Token token, - XContentParser parser, SearchContext context) throws IOException { - return false; - } + ValueFormatter formatter, Map unparsedParams) throws ParseException; } \ No newline at end of file diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/avg/AvgBucketParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/avg/AvgBucketParser.java index 01811cb1746..658284f1825 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/avg/AvgBucketParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/avg/AvgBucketParser.java @@ -24,6 +24,8 @@ import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorFactory; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsParser; import org.elasticsearch.search.aggregations.support.format.ValueFormatter; +import java.util.Map; + public class AvgBucketParser extends BucketMetricsParser { @Override public String type() { @@ -32,7 +34,7 @@ public class AvgBucketParser extends BucketMetricsParser { @Override protected PipelineAggregatorFactory buildFactory(String pipelineAggregatorName, String[] bucketsPaths, GapPolicy gapPolicy, - ValueFormatter formatter) { + ValueFormatter formatter, Map unparsedParams) { return new AvgBucketPipelineAggregator.Factory(pipelineAggregatorName, bucketsPaths, gapPolicy, formatter); } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/max/MaxBucketParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/max/MaxBucketParser.java index 864734b956f..683db6c7d68 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/max/MaxBucketParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/max/MaxBucketParser.java @@ -24,6 +24,8 @@ import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsParser; import org.elasticsearch.search.aggregations.support.format.ValueFormatter; +import java.util.Map; + public class MaxBucketParser extends BucketMetricsParser { @Override @@ -32,7 +34,8 @@ public class MaxBucketParser extends BucketMetricsParser { } @Override - protected PipelineAggregatorFactory buildFactory(String pipelineAggregatorName, String[] bucketsPaths, GapPolicy gapPolicy, ValueFormatter formatter) { + protected PipelineAggregatorFactory buildFactory(String pipelineAggregatorName, String[] bucketsPaths, GapPolicy gapPolicy, + ValueFormatter formatter, Map unparsedParams) { return new MaxBucketPipelineAggregator.Factory(pipelineAggregatorName, bucketsPaths, gapPolicy, formatter); } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/min/MinBucketParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/min/MinBucketParser.java index 0a5aa7c1123..db7bc9b0ced 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/min/MinBucketParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/min/MinBucketParser.java @@ -24,6 +24,8 @@ import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorFactory; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsParser; import org.elasticsearch.search.aggregations.support.format.ValueFormatter; +import java.util.Map; + public class MinBucketParser extends BucketMetricsParser { @Override @@ -32,7 +34,7 @@ public class MinBucketParser extends BucketMetricsParser { } protected PipelineAggregatorFactory buildFactory(String pipelineAggregatorName, String[] bucketsPaths, GapPolicy gapPolicy, - ValueFormatter formatter) { + ValueFormatter formatter, Map unparsedParams) { return new MinBucketPipelineAggregator.Factory(pipelineAggregatorName, bucketsPaths, gapPolicy, formatter); }; diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/PercentilesBucketParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/PercentilesBucketParser.java index 01a428873c2..7c9da5cbe70 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/PercentilesBucketParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/percentile/PercentilesBucketParser.java @@ -19,17 +19,14 @@ package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile; -import com.google.common.primitives.Doubles; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorFactory; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsParser; import org.elasticsearch.search.aggregations.support.format.ValueFormatter; -import org.elasticsearch.search.internal.SearchContext; -import java.io.IOException; -import java.util.ArrayList; +import java.text.ParseException; import java.util.List; +import java.util.Map; import static org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy; @@ -37,7 +34,6 @@ import static org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPo public class PercentilesBucketParser extends BucketMetricsParser { public static final ParseField PERCENTS = new ParseField("percents"); - double[] percents = new double[] { 1.0, 5.0, 25.0, 50.0, 75.0, 95.0, 99.0 }; @Override public String type() { @@ -46,22 +42,31 @@ public class PercentilesBucketParser extends BucketMetricsParser { @Override protected PipelineAggregatorFactory buildFactory(String pipelineAggregatorName, String[] bucketsPaths, GapPolicy gapPolicy, - ValueFormatter formatter) { + ValueFormatter formatter, Map unparsedParams) throws ParseException { + + double[] percents = new double[] { 1.0, 5.0, 25.0, 50.0, 75.0, 95.0, 99.0 }; + int counter = 0; + Object percentParam = unparsedParams.get(PERCENTS.getPreferredName()); + + if (percentParam != null) { + if (percentParam instanceof List) { + percents = new double[((List) percentParam).size()]; + for (Object p : (List) percentParam) { + if (p instanceof Double) { + percents[counter] = (Double) p; + counter += 1; + } else { + throw new ParseException("Parameter [" + PERCENTS.getPreferredName() + "] must be an array of doubles, type `" + + percentParam.getClass().getSimpleName() + "` provided instead", 0); + } + } + unparsedParams.remove(PERCENTS.getPreferredName()); + } else { + throw new ParseException("Parameter [" + PERCENTS.getPreferredName() + "] must be an array of doubles, type `" + + percentParam.getClass().getSimpleName() + "` provided instead", 0); + } + } + return new PercentilesBucketPipelineAggregator.Factory(pipelineAggregatorName, bucketsPaths, gapPolicy, formatter, percents); } - - @Override - protected boolean doParse(String pipelineAggregatorName, String currentFieldName, - XContentParser.Token token, XContentParser parser, SearchContext context) throws IOException { - if (context.parseFieldMatcher().match(currentFieldName, PERCENTS)) { - - List parsedPercents = new ArrayList<>(); - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - parsedPercents.add(parser.doubleValue()); - } - percents = Doubles.toArray(parsedPercents); - return true; - } - return false; - } } diff --git a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/sum/SumBucketParser.java b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/sum/SumBucketParser.java index c71cdcf18c6..3fad95d6e51 100644 --- a/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/sum/SumBucketParser.java +++ b/core/src/main/java/org/elasticsearch/search/aggregations/pipeline/bucketmetrics/sum/SumBucketParser.java @@ -24,6 +24,8 @@ import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorFactory; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsParser; import org.elasticsearch.search.aggregations.support.format.ValueFormatter; +import java.util.Map; + public class SumBucketParser extends BucketMetricsParser { @Override public String type() { @@ -32,7 +34,7 @@ public class SumBucketParser extends BucketMetricsParser { @Override protected PipelineAggregatorFactory buildFactory(String pipelineAggregatorName, String[] bucketsPaths, GapPolicy gapPolicy, - ValueFormatter formatter) { + ValueFormatter formatter, Map unparsedParams) { return new SumBucketPipelineAggregator.Factory(pipelineAggregatorName, bucketsPaths, gapPolicy, formatter); } } diff --git a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketIT.java b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketIT.java index bc50f88e540..507939e6858 100644 --- a/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketIT.java +++ b/core/src/test/java/org/elasticsearch/search/aggregations/pipeline/PercentilesBucketIT.java @@ -489,7 +489,6 @@ public class PercentilesBucketIT extends ESIntegTestCase { } @Test - @AwaitsFix(bugUrl="https://github.com/elastic/elasticsearch/issues/13337") public void testNested() throws Exception { SearchResponse response = client() .prepareSearch("idx") From cb12107123969c4c6cb2104008abfc2a05192522 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Fri, 4 Sep 2015 09:08:12 -0400 Subject: [PATCH 13/17] Fix Javadoc for o.e.c.r.RoutingNodes.UnassignedIterator#remove --- .../java/org/elasticsearch/cluster/routing/RoutingNodes.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java index 0836909af8e..fc2c887cf8e 100644 --- a/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java +++ b/core/src/main/java/org/elasticsearch/cluster/routing/RoutingNodes.java @@ -630,7 +630,7 @@ public class RoutingNodes implements Iterable { /** * Unsupported operation, just there for the interface. Use {@link #removeAndIgnore()} or - * {@link #initialize(String)}. + * {@link #initialize(String, long, long)}. */ @Override public void remove() { From bd5613708e9fa93e32bf051b1d6193cab878c76f Mon Sep 17 00:00:00 2001 From: javanna Date: Thu, 3 Sep 2015 22:03:30 +0200 Subject: [PATCH 14/17] Query DSL: simple_query_string overrides boost coming from lucene SimpleQueryStringParser applies whatever boost the query holds, even if the default 1, to the query obtained from parsing of the query string. that might contain its boost, for instance if it resolved to a simple query like term (single term query against a single field). We should rather multiply the existing boost with the boost set to the query, same as we do in query_string Relates to #13272 Closes #13331 --- .../index/query/SimpleQueryStringParser.java | 2 +- .../query/SimpleIndexQueryParserTests.java | 17 +++++++++++++++++ 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/core/src/main/java/org/elasticsearch/index/query/SimpleQueryStringParser.java b/core/src/main/java/org/elasticsearch/index/query/SimpleQueryStringParser.java index a3614bef72a..fa65e51d177 100644 --- a/core/src/main/java/org/elasticsearch/index/query/SimpleQueryStringParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/SimpleQueryStringParser.java @@ -219,7 +219,7 @@ public class SimpleQueryStringParser implements QueryParser { } if (query != null) { - query.setBoost(boost); + query.setBoost(boost * query.getBoost()); } return query; diff --git a/core/src/test/java/org/elasticsearch/index/query/SimpleIndexQueryParserTests.java b/core/src/test/java/org/elasticsearch/index/query/SimpleIndexQueryParserTests.java index 599f10be455..55efbb7cf56 100644 --- a/core/src/test/java/org/elasticsearch/index/query/SimpleIndexQueryParserTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/SimpleIndexQueryParserTests.java @@ -2539,6 +2539,23 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase { assertThat(parsedQuery, instanceOf(BooleanQuery.class)); } + @Test + public void testSimpleQueryStringBoost() throws Exception { + IndexQueryParserService queryParser = queryParser(); + SimpleQueryStringBuilder simpleQueryStringBuilder = new SimpleQueryStringBuilder("test"); + simpleQueryStringBuilder.field("body", 5); + Query parsedQuery = queryParser.parse(simpleQueryStringBuilder.toString()).query(); + assertThat(parsedQuery, instanceOf(TermQuery.class)); + assertThat(parsedQuery.getBoost(), equalTo(5f)); + + simpleQueryStringBuilder = new SimpleQueryStringBuilder("test"); + simpleQueryStringBuilder.field("body", 5); + simpleQueryStringBuilder.boost(2); + parsedQuery = queryParser.parse(simpleQueryStringBuilder.toString()).query(); + assertThat(parsedQuery, instanceOf(TermQuery.class)); + assertThat(parsedQuery.getBoost(), equalTo(10f)); + } + @Test public void testMatchWithFuzzyTranspositions() throws Exception { IndexQueryParserService queryParser = queryParser(); From 37c90b1047c4f22af12ef9f29d48517a1a13ec37 Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Fri, 4 Sep 2015 17:13:19 +0200 Subject: [PATCH 15/17] Tests: Add back BitSetFilterCacheTests which was lost on #13308 --- .../cache/bitset/BitSetFilterCacheTests.java | 112 ++++++++++++++++++ 1 file changed, 112 insertions(+) create mode 100644 core/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java diff --git a/core/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java b/core/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java new file mode 100644 index 00000000000..380e8b1a57e --- /dev/null +++ b/core/src/test/java/org/elasticsearch/index/cache/bitset/BitSetFilterCacheTests.java @@ -0,0 +1,112 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.cache.bitset; + +import org.apache.lucene.analysis.standard.StandardAnalyzer; +import org.apache.lucene.document.Document; +import org.apache.lucene.document.Field; +import org.apache.lucene.document.StringField; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.LogByteSizeMergePolicy; +import org.apache.lucene.index.Term; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.QueryWrapperFilter; +import org.apache.lucene.search.TermQuery; +import org.apache.lucene.search.join.BitSetProducer; +import org.apache.lucene.store.RAMDirectory; +import org.apache.lucene.util.BitSet; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.Index; +import org.elasticsearch.test.ESTestCase; +import org.junit.Test; + +import java.io.IOException; + +import static org.hamcrest.Matchers.equalTo; + +public class BitSetFilterCacheTests extends ESTestCase { + + private static int matchCount(BitSetProducer producer, IndexReader reader) throws IOException { + int count = 0; + for (LeafReaderContext ctx : reader.leaves()) { + final BitSet bitSet = producer.getBitSet(ctx); + if (bitSet != null) { + count += bitSet.cardinality(); + } + } + return count; + } + + @Test + public void testInvalidateEntries() throws Exception { + IndexWriter writer = new IndexWriter( + new RAMDirectory(), + new IndexWriterConfig(new StandardAnalyzer()).setMergePolicy(new LogByteSizeMergePolicy()) + ); + Document document = new Document(); + document.add(new StringField("field", "value", Field.Store.NO)); + writer.addDocument(document); + writer.commit(); + + document = new Document(); + document.add(new StringField("field", "value", Field.Store.NO)); + writer.addDocument(document); + writer.commit(); + + document = new Document(); + document.add(new StringField("field", "value", Field.Store.NO)); + writer.addDocument(document); + writer.commit(); + + IndexReader reader = DirectoryReader.open(writer, false); + IndexSearcher searcher = new IndexSearcher(reader); + + BitsetFilterCache cache = new BitsetFilterCache(new Index("test"), Settings.EMPTY); + BitSetProducer filter = cache.getBitSetProducer(new QueryWrapperFilter(new TermQuery(new Term("field", "value")))); + assertThat(matchCount(filter, reader), equalTo(3)); + + // now cached + assertThat(matchCount(filter, reader), equalTo(3)); + // There are 3 segments + assertThat(cache.getLoadedFilters().size(), equalTo(3l)); + + writer.forceMerge(1); + reader.close(); + reader = DirectoryReader.open(writer, false); + searcher = new IndexSearcher(reader); + + assertThat(matchCount(filter, reader), equalTo(3)); + + // now cached + assertThat(matchCount(filter, reader), equalTo(3)); + // Only one segment now, so the size must be 1 + assertThat(cache.getLoadedFilters().size(), equalTo(1l)); + + reader.close(); + writer.close(); + // There is no reference from readers and writer to any segment in the test index, so the size in the fbs cache must be 0 + assertThat(cache.getLoadedFilters().size(), equalTo(0l)); + } + +} From 873d69f1576aeab4138e5fafcbafc20357a9998f Mon Sep 17 00:00:00 2001 From: javanna Date: Fri, 4 Sep 2015 09:32:32 +0200 Subject: [PATCH 16/17] Query DSL: span_containing and span_within override default boost coming from lucene SpanContainingQueryParser and SpanWithinQueryParser always set the boost to the parsed lucene query, even if it is the default one. The default boost of the main query though is the boost coming from the inner little query, value that we end up overriding all the time. We should instead set the boost to the main query only if it differs from the default, to mimic lucene's behaviour. Relates to #13272 Closes #13339 --- .../query/SpanContainingQueryParser.java | 4 +- .../index/query/SpanWithinQueryParser.java | 4 +- .../query/SimpleIndexQueryParserTests.java | 44 ++++++++++++++----- 3 files changed, 38 insertions(+), 14 deletions(-) diff --git a/core/src/main/java/org/elasticsearch/index/query/SpanContainingQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/SpanContainingQueryParser.java index 63e312bf384..e2dc813f69f 100644 --- a/core/src/main/java/org/elasticsearch/index/query/SpanContainingQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/SpanContainingQueryParser.java @@ -91,7 +91,9 @@ public class SpanContainingQueryParser implements QueryParser { } Query query = new SpanContainingQuery(big, little); - query.setBoost(boost); + if (boost != 1.0F) { + query.setBoost(boost); + } if (queryName != null) { parseContext.addNamedQuery(queryName, query); } diff --git a/core/src/main/java/org/elasticsearch/index/query/SpanWithinQueryParser.java b/core/src/main/java/org/elasticsearch/index/query/SpanWithinQueryParser.java index 9194cbd2d0e..c801e0d76a5 100644 --- a/core/src/main/java/org/elasticsearch/index/query/SpanWithinQueryParser.java +++ b/core/src/main/java/org/elasticsearch/index/query/SpanWithinQueryParser.java @@ -91,7 +91,9 @@ public class SpanWithinQueryParser implements QueryParser { } Query query = new SpanWithinQuery(big, little); - query.setBoost(boost); + if (boost != 1.0F) { + query.setBoost(boost); + } if (queryName != null) { parseContext.addNamedQuery(queryName, query); } diff --git a/core/src/test/java/org/elasticsearch/index/query/SimpleIndexQueryParserTests.java b/core/src/test/java/org/elasticsearch/index/query/SimpleIndexQueryParserTests.java index 55efbb7cf56..58a2b3e3a29 100644 --- a/core/src/test/java/org/elasticsearch/index/query/SimpleIndexQueryParserTests.java +++ b/core/src/test/java/org/elasticsearch/index/query/SimpleIndexQueryParserTests.java @@ -1410,12 +1410,22 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase { @Test public void testSpanWithinQueryBuilder() throws IOException { IndexQueryParserService queryParser = queryParser(); - Query expectedQuery = new SpanWithinQuery(new SpanTermQuery(new Term("age", longToPrefixCoded(34, 0))), - new SpanTermQuery(new Term("age", longToPrefixCoded(35, 0)))); - Query actualQuery = queryParser.parse(spanWithinQuery() - .big(spanTermQuery("age", 34)) - .little(spanTermQuery("age", 35))) - .query(); + SpanTermQuery big = new SpanTermQuery(new Term("age", longToPrefixCoded(34, 0))); + big.setBoost(2); + SpanTermQuery little = new SpanTermQuery(new Term("age", longToPrefixCoded(35, 0))); + little.setBoost(3); + Query expectedQuery = new SpanWithinQuery(big, little); + + SpanWithinQueryBuilder spanWithinQueryBuilder = spanWithinQuery() + .big(spanTermQuery("age", 34).boost(2)) + .little(spanTermQuery("age", 35).boost(3)); + Query actualQuery = queryParser.parse(spanWithinQueryBuilder).query(); + assertEquals(expectedQuery, actualQuery); + + float boost = randomFloat(); + expectedQuery.setBoost(boost); + spanWithinQueryBuilder.boost(boost); + actualQuery = queryParser.parse(spanWithinQueryBuilder).query(); assertEquals(expectedQuery, actualQuery); } @@ -1432,12 +1442,22 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase { @Test public void testSpanContainingQueryBuilder() throws IOException { IndexQueryParserService queryParser = queryParser(); - Query expectedQuery = new SpanContainingQuery(new SpanTermQuery(new Term("age", longToPrefixCoded(34, 0))), - new SpanTermQuery(new Term("age", longToPrefixCoded(35, 0)))); - Query actualQuery = queryParser.parse(spanContainingQuery() - .big(spanTermQuery("age", 34)) - .little(spanTermQuery("age", 35))) - .query(); + SpanTermQuery big = new SpanTermQuery(new Term("age", longToPrefixCoded(34, 0))); + big.setBoost(2); + SpanTermQuery little = new SpanTermQuery(new Term("age", longToPrefixCoded(35, 0))); + little.setBoost(3); + Query expectedQuery = new SpanContainingQuery(big, little); + + SpanContainingQueryBuilder spanContainingQueryBuilder = spanContainingQuery() + .big(spanTermQuery("age", 34).boost(2)) + .little(spanTermQuery("age", 35).boost(3)); + Query actualQuery = queryParser.parse(spanContainingQueryBuilder).query(); + assertEquals(expectedQuery, actualQuery); + + float boost = randomFloat(); + expectedQuery.setBoost(boost); + spanContainingQueryBuilder.boost(boost); + actualQuery = queryParser.parse(spanContainingQueryBuilder).query(); assertEquals(expectedQuery, actualQuery); } From b98cd5f611597e394b47fd88972751ef8c2ad08d Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Fri, 4 Sep 2015 12:13:54 -0400 Subject: [PATCH 17/17] Should be asserting JVM plugins do not have a URL --- .../elasticsearch/test/hamcrest/ElasticsearchAssertions.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/core/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java b/core/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java index 497322ac7ea..4cb9616c659 100644 --- a/core/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java +++ b/core/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchAssertions.java @@ -80,6 +80,7 @@ import java.nio.file.Path; import java.util.*; import static com.google.common.base.Predicates.isNull; +import static com.google.common.base.Predicates.notNull; import static org.elasticsearch.test.ESTestCase.*; import static org.elasticsearch.test.VersionUtils.randomVersion; import static org.hamcrest.CoreMatchers.equalTo; @@ -755,8 +756,8 @@ public class ElasticsearchAssertions { FluentIterable jvmUrls = FluentIterable.from(plugins.getInfos()) .filter(Predicates.and(jvmPluginPredicate, Predicates.not(sitePluginPredicate))) - .filter(isNull()) - .transform(urlFunction); + .transform(urlFunction) + .filter(notNull()); Assert.assertThat(Iterables.size(jvmUrls), is(0)); List sitePluginNames = FluentIterable.from(plugins.getInfos()).filter(sitePluginPredicate).transform(nameFunction).toList();