Merge branch 'master' into feature/query-refactoring

Conflicts:
	core/src/main/java/org/elasticsearch/index/query/AndQueryParser.java
	core/src/main/java/org/elasticsearch/index/query/BoolQueryParser.java
	core/src/main/java/org/elasticsearch/index/query/ExistsQueryParser.java
	core/src/main/java/org/elasticsearch/index/query/GeoShapeQueryParser.java
	core/src/main/java/org/elasticsearch/index/query/MissingQueryParser.java
	core/src/main/java/org/elasticsearch/index/query/OrQueryParser.java
	core/src/main/java/org/elasticsearch/index/query/QueryParseContext.java
	core/src/main/java/org/elasticsearch/index/query/SimpleQueryStringParser.java
	core/src/main/java/org/elasticsearch/index/query/SpanContainingQueryParser.java
	core/src/main/java/org/elasticsearch/index/query/SpanWithinQueryParser.java
	core/src/main/java/org/elasticsearch/index/query/TermsQueryParser.java
	core/src/main/java/org/elasticsearch/search/sort/GeoDistanceSortParser.java
	core/src/test/java/org/elasticsearch/index/query/SimpleIndexQueryParserTests.java
	core/src/test/java/org/elasticsearch/index/search/child/AbstractChildTestCase.java
This commit is contained in:
javanna 2015-09-04 19:21:38 +02:00 committed by Luca Cavanna
commit be3409f1db
168 changed files with 2150 additions and 2044 deletions

View File

@ -1,95 +0,0 @@
<component>
<dependencySets>
<!-- TODO: wtf is this file doing, is it still used? must we list all deps here? -->
<dependencySet>
<outputDirectory>/lib</outputDirectory>
<useTransitiveFiltering>true</useTransitiveFiltering>
<includes>
<include>org.apache.lucene:lucene*</include>
<include>log4j:log4j</include>
<include>log4j:apache-log4j-extras</include>
<include>net.java.dev.jna:jna</include>
<include>com.spatial4j:spatial4j</include>
<include>com.vividsolutions:jts</include>
<include>org.codehaus.groovy:groovy-all</include>
<include>com.google.guava:guava</include>
<include>com.carrotsearch:hppc</include>
<include>com.fasterxml.jackson.core:jackson-core</include>
<include>com.fasterxml.jackson.dataformat:jackson-dataformat-smile</include>
<include>com.fasterxml.jackson.dataformat:jackson-dataformat-yaml</include>
<include>com.fasterxml.jackson.dataformat:jackson-dataformat-cbor</include>
<include>joda-time:joda-time</include>
<include>org.joda:joda-convert</include>
<include>io.netty:netty</include>
<include>com.ning:compress-lzf</include>
<include>com.github.spullara.mustache.java:compiler</include>
<include>com.tdunning:t-digest</include>
<include>commons-cli:commons-cli</include>
<include>org.hdrhistogram:HdrHistogram</include>
</includes>
</dependencySet>
<dependencySet>
<outputDirectory>/lib</outputDirectory>
<useTransitiveDependencies>false</useTransitiveDependencies>
<includes>
<include>org.elasticsearch:elasticsearch</include>
</includes>
</dependencySet>
</dependencySets>
<fileSets>
<fileSet>
<directory>config</directory>
<outputDirectory>config</outputDirectory>
<includes>
<include>*</include>
</includes>
</fileSet>
<fileSet>
<filtered>true</filtered>
<directory>bin</directory>
<outputDirectory>bin</outputDirectory>
<lineEnding>dos</lineEnding>
<includes>
<include>elasticsearch.in.bat</include>
<include>elasticsearch.bat</include>
<include>plugin.bat</include>
<include>service.bat</include>
</includes>
</fileSet>
<fileSet>
<filtered>false</filtered>
<directory>bin</directory>
<outputDirectory>bin</outputDirectory>
<includes>
<include>*.exe</include>
</includes>
</fileSet>
<fileSet>
<filtered>true</filtered>
<directory>bin</directory>
<outputDirectory>bin</outputDirectory>
<fileMode>0755</fileMode>
<directoryMode>0755</directoryMode>
<lineEnding>unix</lineEnding>
<includes>
<include>elasticsearch.in.sh</include>
<include>elasticsearch</include>
<include>plugin</include>
</includes>
</fileSet>
</fileSets>
<files>
<file>
<source>README.textile</source>
<outputDirectory></outputDirectory>
</file>
<file>
<source>LICENSE.txt</source>
<outputDirectory></outputDirectory>
</file>
<file>
<source>NOTICE.txt</source>
<outputDirectory></outputDirectory>
</file>
</files>
</component>

View File

@ -27,6 +27,7 @@ import org.apache.lucene.index.Term;
import org.apache.lucene.index.TermContext; import org.apache.lucene.index.TermContext;
import org.apache.lucene.index.TermState; import org.apache.lucene.index.TermState;
import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.DisjunctionMaxQuery; import org.apache.lucene.search.DisjunctionMaxQuery;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
@ -299,7 +300,8 @@ public abstract class BlendedTermQuery extends Query {
return new BlendedTermQuery(terms, boosts) { return new BlendedTermQuery(terms, boosts) {
@Override @Override
protected Query topLevelQuery(Term[] terms, TermContext[] ctx, int[] docFreqs, int maxDoc) { protected Query topLevelQuery(Term[] terms, TermContext[] ctx, int[] docFreqs, int maxDoc) {
BooleanQuery query = new BooleanQuery(disableCoord); BooleanQuery.Builder query = new BooleanQuery.Builder();
query.setDisableCoord(disableCoord);
for (int i = 0; i < terms.length; i++) { for (int i = 0; i < terms.length; i++) {
TermQuery termQuery = new TermQuery(terms[i], ctx[i]); TermQuery termQuery = new TermQuery(terms[i], ctx[i]);
if (boosts != null) { if (boosts != null) {
@ -307,7 +309,7 @@ public abstract class BlendedTermQuery extends Query {
} }
query.add(termQuery, BooleanClause.Occur.SHOULD); query.add(termQuery, BooleanClause.Occur.SHOULD);
} }
return query; return query.build();
} }
}; };
} }
@ -316,9 +318,10 @@ public abstract class BlendedTermQuery extends Query {
return new BlendedTermQuery(terms, boosts) { return new BlendedTermQuery(terms, boosts) {
@Override @Override
protected Query topLevelQuery(Term[] terms, TermContext[] ctx, int[] docFreqs, int maxDoc) { protected Query topLevelQuery(Term[] terms, TermContext[] ctx, int[] docFreqs, int maxDoc) {
BooleanQuery query = new BooleanQuery(true); BooleanQuery.Builder highBuilder = new BooleanQuery.Builder();
BooleanQuery high = new BooleanQuery(disableCoord); highBuilder.setDisableCoord(disableCoord);
BooleanQuery low = new BooleanQuery(disableCoord); BooleanQuery.Builder lowBuilder = new BooleanQuery.Builder();
lowBuilder.setDisableCoord(disableCoord);
for (int i = 0; i < terms.length; i++) { for (int i = 0; i < terms.length; i++) {
TermQuery termQuery = new TermQuery(terms[i], ctx[i]); TermQuery termQuery = new TermQuery(terms[i], ctx[i]);
if (boosts != null) { if (boosts != null) {
@ -327,22 +330,28 @@ public abstract class BlendedTermQuery extends Query {
if ((maxTermFrequency >= 1f && docFreqs[i] > maxTermFrequency) if ((maxTermFrequency >= 1f && docFreqs[i] > maxTermFrequency)
|| (docFreqs[i] > (int) Math.ceil(maxTermFrequency || (docFreqs[i] > (int) Math.ceil(maxTermFrequency
* (float) maxDoc))) { * (float) maxDoc))) {
high.add(termQuery, BooleanClause.Occur.SHOULD); highBuilder.add(termQuery, BooleanClause.Occur.SHOULD);
} else { } else {
low.add(termQuery, BooleanClause.Occur.SHOULD); lowBuilder.add(termQuery, BooleanClause.Occur.SHOULD);
} }
} }
BooleanQuery high = highBuilder.build();
BooleanQuery low = lowBuilder.build();
if (low.clauses().isEmpty()) { if (low.clauses().isEmpty()) {
BooleanQuery.Builder queryBuilder = new BooleanQuery.Builder();
queryBuilder.setDisableCoord(disableCoord);
for (BooleanClause booleanClause : high) { for (BooleanClause booleanClause : high) {
booleanClause.setOccur(BooleanClause.Occur.MUST); queryBuilder.add(booleanClause.getQuery(), Occur.MUST);
} }
return high; return queryBuilder.build();
} else if (high.clauses().isEmpty()) { } else if (high.clauses().isEmpty()) {
return low; return low;
} else { } else {
query.add(high, BooleanClause.Occur.SHOULD); return new BooleanQuery.Builder()
query.add(low, BooleanClause.Occur.MUST); .setDisableCoord(true)
return query; .add(high, BooleanClause.Occur.SHOULD)
.add(low, BooleanClause.Occur.MUST)
.build();
} }
} }
}; };

View File

@ -104,12 +104,13 @@ public class CustomFieldQuery extends FieldQuery {
* It seems expensive but most queries will pretty small. * It seems expensive but most queries will pretty small.
*/ */
if (currentPos == terms.size()) { if (currentPos == terms.size()) {
PhraseQuery query = new PhraseQuery(); PhraseQuery.Builder queryBuilder = new PhraseQuery.Builder();
query.setBoost(orig.getBoost()); queryBuilder.setSlop(orig.getSlop());
query.setSlop(orig.getSlop());
for (int i = 0; i < termsIdx.length; i++) { for (int i = 0; i < termsIdx.length; i++) {
query.add(terms.get(i)[termsIdx[i]], pos[i]); queryBuilder.add(terms.get(i)[termsIdx[i]], pos[i]);
} }
PhraseQuery query = queryBuilder.build();
query.setBoost(orig.getBoost());
this.flatten(query, reader, flatQueries); this.flatten(query, reader, flatQueries);
} else { } else {
Term[] t = terms.get(currentPos); Term[] t = terms.get(currentPos);

View File

@ -260,7 +260,7 @@ public class Version {
public static final int V_2_1_0_ID = 2010099; public static final int V_2_1_0_ID = 2010099;
public static final Version V_2_1_0 = new Version(V_2_1_0_ID, true, org.apache.lucene.util.Version.LUCENE_5_3_0); public static final Version V_2_1_0 = new Version(V_2_1_0_ID, true, org.apache.lucene.util.Version.LUCENE_5_3_0);
public static final int V_3_0_0_ID = 3000099; public static final int V_3_0_0_ID = 3000099;
public static final Version V_3_0_0 = new Version(V_3_0_0_ID, true, org.apache.lucene.util.Version.LUCENE_5_3_0); public static final Version V_3_0_0 = new Version(V_3_0_0_ID, true, org.apache.lucene.util.Version.LUCENE_5_4_0);
public static final Version CURRENT = V_3_0_0; public static final Version CURRENT = V_3_0_0;
static { static {

View File

@ -44,7 +44,11 @@ import org.elasticsearch.node.settings.NodeSettingsService;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.ReceiveTimeoutTransportException; import org.elasticsearch.transport.ReceiveTimeoutTransportException;
import java.util.*; import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.CountDownLatch; import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
@ -77,7 +81,7 @@ public class InternalClusterInfoService extends AbstractComponent implements Clu
private final TransportIndicesStatsAction transportIndicesStatsAction; private final TransportIndicesStatsAction transportIndicesStatsAction;
private final ClusterService clusterService; private final ClusterService clusterService;
private final ThreadPool threadPool; private final ThreadPool threadPool;
private final Set<Listener> listeners = Collections.synchronizedSet(new HashSet<Listener>()); private final List<Listener> listeners = new CopyOnWriteArrayList<>();
@Inject @Inject
public InternalClusterInfoService(Settings settings, NodeSettingsService nodeSettingsService, public InternalClusterInfoService(Settings settings, NodeSettingsService nodeSettingsService,

View File

@ -630,7 +630,7 @@ public class RoutingNodes implements Iterable<RoutingNode> {
/** /**
* Unsupported operation, just there for the interface. Use {@link #removeAndIgnore()} or * Unsupported operation, just there for the interface. Use {@link #removeAndIgnore()} or
* {@link #initialize(String)}. * {@link #initialize(String, long, long)}.
*/ */
@Override @Override
public void remove() { public void remove() {

View File

@ -163,11 +163,11 @@ public class MoreLikeThisQuery extends Query {
} }
private Query createQuery(XMoreLikeThis mlt) throws IOException { private Query createQuery(XMoreLikeThis mlt) throws IOException {
BooleanQuery bq = new BooleanQuery(); BooleanQuery.Builder bqBuilder = new BooleanQuery.Builder();
if (this.likeFields != null) { if (this.likeFields != null) {
Query mltQuery = mlt.like(this.likeFields); Query mltQuery = mlt.like(this.likeFields);
mltQuery = Queries.applyMinimumShouldMatch((BooleanQuery) mltQuery, minimumShouldMatch); mltQuery = Queries.applyMinimumShouldMatch((BooleanQuery) mltQuery, minimumShouldMatch);
bq.add(mltQuery, BooleanClause.Occur.SHOULD); bqBuilder.add(mltQuery, BooleanClause.Occur.SHOULD);
} }
if (this.likeText != null) { if (this.likeText != null) {
Reader[] readers = new Reader[likeText.length]; Reader[] readers = new Reader[likeText.length];
@ -177,9 +177,10 @@ public class MoreLikeThisQuery extends Query {
//LUCENE 4 UPGRADE this mapps the 3.6 behavior (only use the first field) //LUCENE 4 UPGRADE this mapps the 3.6 behavior (only use the first field)
Query mltQuery = mlt.like(moreLikeFields[0], readers); Query mltQuery = mlt.like(moreLikeFields[0], readers);
mltQuery = Queries.applyMinimumShouldMatch((BooleanQuery) mltQuery, minimumShouldMatch); mltQuery = Queries.applyMinimumShouldMatch((BooleanQuery) mltQuery, minimumShouldMatch);
bq.add(mltQuery, BooleanClause.Occur.SHOULD); bqBuilder.add(mltQuery, BooleanClause.Occur.SHOULD);
} }
BooleanQuery bq = bqBuilder.build();
bq.setBoost(getBoost()); bq.setBoost(getBoost());
return bq; return bq;
} }

View File

@ -40,7 +40,7 @@ public class Queries {
/** Return a query that matches no document. */ /** Return a query that matches no document. */
public static Query newMatchNoDocsQuery() { public static Query newMatchNoDocsQuery() {
return new BooleanQuery(); return new BooleanQuery.Builder().build();
} }
public static Filter newNestedFilter() { public static Filter newNestedFilter() {
@ -64,10 +64,10 @@ public class Queries {
/** Return a query that matches all documents but those that match the given query. */ /** Return a query that matches all documents but those that match the given query. */
public static Query not(Query q) { public static Query not(Query q) {
BooleanQuery bq = new BooleanQuery(); return new BooleanQuery.Builder()
bq.add(new MatchAllDocsQuery(), Occur.MUST); .add(new MatchAllDocsQuery(), Occur.MUST)
bq.add(q, Occur.MUST_NOT); .add(q, Occur.MUST_NOT)
return bq; .build();
} }
public static boolean isNegativeQuery(Query q) { public static boolean isNegativeQuery(Query q) {
@ -86,9 +86,14 @@ public class Queries {
public static Query fixNegativeQueryIfNeeded(Query q) { public static Query fixNegativeQueryIfNeeded(Query q) {
if (isNegativeQuery(q)) { if (isNegativeQuery(q)) {
BooleanQuery newBq = (BooleanQuery) q.clone(); BooleanQuery bq = (BooleanQuery) q;
newBq.add(newMatchAllQuery(), BooleanClause.Occur.MUST); BooleanQuery.Builder builder = new BooleanQuery.Builder();
return newBq; builder.setDisableCoord(bq.isCoordDisabled());
for (BooleanClause clause : bq) {
builder.add(clause);
}
builder.add(newMatchAllQuery(), BooleanClause.Occur.MUST);
return builder.build();
} }
return q; return q;
} }

View File

@ -27,8 +27,6 @@ import org.elasticsearch.common.unit.TimeValue;
import java.io.IOException; import java.io.IOException;
import java.net.InetAddress; import java.net.InetAddress;
import java.net.InterfaceAddress;
import java.net.NetworkInterface;
import java.net.UnknownHostException; import java.net.UnknownHostException;
import java.util.List; import java.util.List;
import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CopyOnWriteArrayList;
@ -120,14 +118,6 @@ public class NetworkService extends AbstractComponent {
if (address.isMulticastAddress()) { if (address.isMulticastAddress()) {
throw new IllegalArgumentException("bind address: {" + NetworkAddress.format(address) + "} is invalid: multicast address"); throw new IllegalArgumentException("bind address: {" + NetworkAddress.format(address) + "} is invalid: multicast address");
} }
// check if its broadcast: flat out mistake
for (NetworkInterface nic : NetworkUtils.getInterfaces()) {
for (InterfaceAddress intf : nic.getInterfaceAddresses()) {
if (address.equals(intf.getBroadcast())) {
throw new IllegalArgumentException("bind address: {" + NetworkAddress.format(address) + "} is invalid: broadcast address");
}
}
}
} }
} }
return addresses; return addresses;
@ -161,14 +151,6 @@ public class NetworkService extends AbstractComponent {
if (address.isMulticastAddress()) { if (address.isMulticastAddress()) {
throw new IllegalArgumentException("publish address: {" + NetworkAddress.format(address) + "} is invalid: multicast address"); throw new IllegalArgumentException("publish address: {" + NetworkAddress.format(address) + "} is invalid: multicast address");
} }
// check if its broadcast: flat out mistake
for (NetworkInterface nic : NetworkUtils.getInterfaces()) {
for (InterfaceAddress intf : nic.getInterfaceAddresses()) {
if (address.equals(intf.getBroadcast())) {
throw new IllegalArgumentException("publish address: {" + NetworkAddress.format(address) + "} is invalid: broadcast address");
}
}
}
// wildcard address, probably set by network.host // wildcard address, probably set by network.host
if (address.isAnyLocalAddress()) { if (address.isAnyLocalAddress()) {
InetAddress old = address; InetAddress old = address;

View File

@ -73,7 +73,7 @@ public class IndexAliasesService extends AbstractIndexComponent {
return parse(alias); return parse(alias);
} else { } else {
// we need to bench here a bit, to see maybe it makes sense to use OrFilter // we need to bench here a bit, to see maybe it makes sense to use OrFilter
BooleanQuery combined = new BooleanQuery(); BooleanQuery.Builder combined = new BooleanQuery.Builder();
for (String aliasName : aliasNames) { for (String aliasName : aliasNames) {
AliasMetaData alias = this.aliases.get(aliasName); AliasMetaData alias = this.aliases.get(aliasName);
if (alias == null) { if (alias == null) {
@ -88,7 +88,7 @@ public class IndexAliasesService extends AbstractIndexComponent {
return null; return null;
} }
} }
return combined; return combined.build();
} }
} }

View File

@ -453,8 +453,7 @@ public class AnalysisModule extends AbstractModule {
tokenFiltersBindings.processTokenFilter("apostrophe", ApostropheFilterFactory.class); tokenFiltersBindings.processTokenFilter("apostrophe", ApostropheFilterFactory.class);
tokenFiltersBindings.processTokenFilter("classic", ClassicFilterFactory.class); tokenFiltersBindings.processTokenFilter("classic", ClassicFilterFactory.class);
tokenFiltersBindings.processTokenFilter("decimal_digit", DecimalDigitFilterFactory.class);
} }
@Override @Override

View File

@ -0,0 +1,42 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.analysis;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.core.DecimalDigitFilter;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.Index;
/**
* Factory for {@link DecimalDigitFilter}
*/
public final class DecimalDigitFilterFactory extends AbstractTokenFilterFactory {
@Inject
public DecimalDigitFilterFactory(Index index, Settings indexSettings, String name, Settings settings) {
super(index, indexSettings, name, settings);
}
@Override
public TokenStream create(TokenStream tokenStream) {
return new DecimalDigitFilter(tokenStream);
}
}

View File

@ -24,14 +24,17 @@ import com.google.common.cache.CacheBuilder;
import com.google.common.cache.RemovalListener; import com.google.common.cache.RemovalListener;
import com.google.common.cache.RemovalNotification; import com.google.common.cache.RemovalNotification;
import org.apache.lucene.index.IndexReaderContext;
import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.DocIdSet; import org.apache.lucene.index.ReaderUtil;
import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Filter; import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.join.BitDocIdSetFilter; import org.apache.lucene.search.Query;
import org.apache.lucene.search.Weight;
import org.apache.lucene.search.join.BitSetProducer;
import org.apache.lucene.util.BitDocIdSet; import org.apache.lucene.util.BitDocIdSet;
import org.apache.lucene.util.SparseFixedBitSet; import org.apache.lucene.util.BitSet;
import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
@ -56,6 +59,7 @@ import java.io.Closeable;
import java.io.IOException; import java.io.IOException;
import java.util.HashSet; import java.util.HashSet;
import java.util.Map; import java.util.Map;
import java.util.Objects;
import java.util.Set; import java.util.Set;
import java.util.concurrent.Callable; import java.util.concurrent.Callable;
import java.util.concurrent.CountDownLatch; import java.util.concurrent.CountDownLatch;
@ -69,13 +73,13 @@ import java.util.concurrent.Executor;
* and require that it should always be around should use this cache, otherwise the * and require that it should always be around should use this cache, otherwise the
* {@link org.elasticsearch.index.cache.query.QueryCache} should be used instead. * {@link org.elasticsearch.index.cache.query.QueryCache} should be used instead.
*/ */
public class BitsetFilterCache extends AbstractIndexComponent implements LeafReader.CoreClosedListener, RemovalListener<Object, Cache<Filter, BitsetFilterCache.Value>>, Closeable { public class BitsetFilterCache extends AbstractIndexComponent implements LeafReader.CoreClosedListener, RemovalListener<Object, Cache<Query, BitsetFilterCache.Value>>, Closeable {
public static final String LOAD_RANDOM_ACCESS_FILTERS_EAGERLY = "index.load_fixed_bitset_filters_eagerly"; public static final String LOAD_RANDOM_ACCESS_FILTERS_EAGERLY = "index.load_fixed_bitset_filters_eagerly";
private final boolean loadRandomAccessFiltersEagerly; private final boolean loadRandomAccessFiltersEagerly;
private final Cache<Object, Cache<Filter, Value>> loadedFilters; private final Cache<Object, Cache<Query, Value>> loadedFilters;
private final BitDocIdSetFilterWarmer warmer; private final BitSetProducerWarmer warmer;
private IndexService indexService; private IndexService indexService;
private IndicesWarmer indicesWarmer; private IndicesWarmer indicesWarmer;
@ -85,7 +89,7 @@ public class BitsetFilterCache extends AbstractIndexComponent implements LeafRea
super(index, indexSettings); super(index, indexSettings);
this.loadRandomAccessFiltersEagerly = indexSettings.getAsBoolean(LOAD_RANDOM_ACCESS_FILTERS_EAGERLY, true); this.loadRandomAccessFiltersEagerly = indexSettings.getAsBoolean(LOAD_RANDOM_ACCESS_FILTERS_EAGERLY, true);
this.loadedFilters = CacheBuilder.newBuilder().removalListener(this).build(); this.loadedFilters = CacheBuilder.newBuilder().removalListener(this).build();
this.warmer = new BitDocIdSetFilterWarmer(); this.warmer = new BitSetProducerWarmer();
} }
@Inject(optional = true) @Inject(optional = true)
@ -101,9 +105,8 @@ public class BitsetFilterCache extends AbstractIndexComponent implements LeafRea
indicesWarmer.addListener(warmer); indicesWarmer.addListener(warmer);
} }
public BitDocIdSetFilter getBitDocIdSetFilter(Filter filter) { public BitSetProducer getBitSetProducer(Query query) {
assert filter != null; return new QueryWrapperBitSetProducer(query);
return new BitDocIdSetFilterWrapper(filter);
} }
@Override @Override
@ -122,38 +125,29 @@ public class BitsetFilterCache extends AbstractIndexComponent implements LeafRea
loadedFilters.invalidateAll(); loadedFilters.invalidateAll();
} }
private BitDocIdSet getAndLoadIfNotPresent(final Filter filter, final LeafReaderContext context) throws IOException, ExecutionException { private BitSet getAndLoadIfNotPresent(final Query query, final LeafReaderContext context) throws IOException, ExecutionException {
final Object coreCacheReader = context.reader().getCoreCacheKey(); final Object coreCacheReader = context.reader().getCoreCacheKey();
final ShardId shardId = ShardUtils.extractShardId(context.reader()); final ShardId shardId = ShardUtils.extractShardId(context.reader());
Cache<Filter, Value> filterToFbs = loadedFilters.get(coreCacheReader, new Callable<Cache<Filter, Value>>() { Cache<Query, Value> filterToFbs = loadedFilters.get(coreCacheReader, new Callable<Cache<Query, Value>>() {
@Override @Override
public Cache<Filter, Value> call() throws Exception { public Cache<Query, Value> call() throws Exception {
context.reader().addCoreClosedListener(BitsetFilterCache.this); context.reader().addCoreClosedListener(BitsetFilterCache.this);
return CacheBuilder.newBuilder().build(); return CacheBuilder.newBuilder().build();
} }
}); });
return filterToFbs.get(filter, new Callable<Value>() { return filterToFbs.get(query, new Callable<Value>() {
@Override @Override
public Value call() throws Exception { public Value call() throws Exception {
DocIdSet docIdSet = filter.getDocIdSet(context, null); final IndexReaderContext topLevelContext = ReaderUtil.getTopLevelContext(context);
final BitDocIdSet bitSet; final IndexSearcher searcher = new IndexSearcher(topLevelContext);
if (docIdSet instanceof BitDocIdSet) { searcher.setQueryCache(null);
bitSet = (BitDocIdSet) docIdSet; final Weight weight = searcher.createNormalizedWeight(query, false);
final DocIdSetIterator it = weight.scorer(context);
final BitSet bitSet;
if (it == null) {
bitSet = null;
} else { } else {
BitDocIdSet.Builder builder = new BitDocIdSet.Builder(context.reader().maxDoc()); bitSet = BitSet.of(it, context.reader().maxDoc());
if (docIdSet != null && docIdSet != DocIdSet.EMPTY) {
DocIdSetIterator iterator = docIdSet.iterator();
// some filters (QueryWrapperFilter) return not null or DocIdSet.EMPTY if there no matching docs
if (iterator != null) {
builder.or(iterator);
}
}
BitDocIdSet bits = builder.build();
// code expects this to be non-null
if (bits == null) {
bits = new BitDocIdSet(new SparseFixedBitSet(context.reader().maxDoc()), 0);
}
bitSet = bits;
} }
Value value = new Value(bitSet, shardId); Value value = new Value(bitSet, shardId);
@ -169,18 +163,18 @@ public class BitsetFilterCache extends AbstractIndexComponent implements LeafRea
} }
@Override @Override
public void onRemoval(RemovalNotification<Object, Cache<Filter, Value>> notification) { public void onRemoval(RemovalNotification<Object, Cache<Query, Value>> notification) {
Object key = notification.getKey(); Object key = notification.getKey();
if (key == null) { if (key == null) {
return; return;
} }
Cache<Filter, Value> value = notification.getValue(); Cache<Query, Value> value = notification.getValue();
if (value == null) { if (value == null) {
return; return;
} }
for (Map.Entry<Filter, Value> entry : value.asMap().entrySet()) { for (Map.Entry<Query, Value> entry : value.asMap().entrySet()) {
if (entry.getValue().shardId == null) { if (entry.getValue().shardId == null) {
continue; continue;
} }
@ -195,50 +189,50 @@ public class BitsetFilterCache extends AbstractIndexComponent implements LeafRea
public static final class Value { public static final class Value {
final BitDocIdSet bitset; final BitSet bitset;
final ShardId shardId; final ShardId shardId;
public Value(BitDocIdSet bitset, ShardId shardId) { public Value(BitSet bitset, ShardId shardId) {
this.bitset = bitset; this.bitset = bitset;
this.shardId = shardId; this.shardId = shardId;
} }
} }
final class BitDocIdSetFilterWrapper extends BitDocIdSetFilter { final class QueryWrapperBitSetProducer implements BitSetProducer {
final Filter filter; final Query query;
BitDocIdSetFilterWrapper(Filter filter) { QueryWrapperBitSetProducer(Query query) {
this.filter = filter; this.query = Objects.requireNonNull(query);
} }
@Override @Override
public BitDocIdSet getDocIdSet(LeafReaderContext context) throws IOException { public BitSet getBitSet(LeafReaderContext context) throws IOException {
try { try {
return getAndLoadIfNotPresent(filter, context); return getAndLoadIfNotPresent(query, context);
} catch (ExecutionException e) { } catch (ExecutionException e) {
throw ExceptionsHelper.convertToElastic(e); throw ExceptionsHelper.convertToElastic(e);
} }
} }
@Override @Override
public String toString(String field) { public String toString() {
return "random_access(" + filter + ")"; return "random_access(" + query + ")";
} }
@Override @Override
public boolean equals(Object o) { public boolean equals(Object o) {
if (!(o instanceof BitDocIdSetFilterWrapper)) return false; if (!(o instanceof QueryWrapperBitSetProducer)) return false;
return this.filter.equals(((BitDocIdSetFilterWrapper) o).filter); return this.query.equals(((QueryWrapperBitSetProducer) o).query);
} }
@Override @Override
public int hashCode() { public int hashCode() {
return filter.hashCode() ^ 0x1117BF26; return 31 * getClass().hashCode() + query.hashCode();
} }
} }
final class BitDocIdSetFilterWarmer extends IndicesWarmer.Listener { final class BitSetProducerWarmer extends IndicesWarmer.Listener {
@Override @Override
public IndicesWarmer.TerminationHandle warmNewReaders(final IndexShard indexShard, IndexMetaData indexMetaData, IndicesWarmer.WarmerContext context, ThreadPool threadPool) { public IndicesWarmer.TerminationHandle warmNewReaders(final IndexShard indexShard, IndexMetaData indexMetaData, IndicesWarmer.WarmerContext context, ThreadPool threadPool) {
@ -247,7 +241,7 @@ public class BitsetFilterCache extends AbstractIndexComponent implements LeafRea
} }
boolean hasNested = false; boolean hasNested = false;
final Set<Filter> warmUp = new HashSet<>(); final Set<Query> warmUp = new HashSet<>();
final MapperService mapperService = indexShard.mapperService(); final MapperService mapperService = indexShard.mapperService();
for (DocumentMapper docMapper : mapperService.docMappers(false)) { for (DocumentMapper docMapper : mapperService.docMappers(false)) {
if (docMapper.hasNestedObjects()) { if (docMapper.hasNestedObjects()) {
@ -270,7 +264,7 @@ public class BitsetFilterCache extends AbstractIndexComponent implements LeafRea
final Executor executor = threadPool.executor(executor()); final Executor executor = threadPool.executor(executor());
final CountDownLatch latch = new CountDownLatch(context.searcher().reader().leaves().size() * warmUp.size()); final CountDownLatch latch = new CountDownLatch(context.searcher().reader().leaves().size() * warmUp.size());
for (final LeafReaderContext ctx : context.searcher().reader().leaves()) { for (final LeafReaderContext ctx : context.searcher().reader().leaves()) {
for (final Filter filterToWarm : warmUp) { for (final Query filterToWarm : warmUp) {
executor.execute(new Runnable() { executor.execute(new Runnable() {
@Override @Override
@ -306,7 +300,7 @@ public class BitsetFilterCache extends AbstractIndexComponent implements LeafRea
} }
Cache<Object, Cache<Filter, Value>> getLoadedFilters() { Cache<Object, Cache<Query, Value>> getLoadedFilters() {
return loadedFilters; return loadedFilters;
} }
} }

View File

@ -20,11 +20,22 @@
package org.elasticsearch.index.engine; package org.elasticsearch.index.engine;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import org.apache.lucene.index.*;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.FilterLeafReader;
import org.apache.lucene.index.IndexCommit;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.SegmentCommitInfo;
import org.apache.lucene.index.SegmentInfos;
import org.apache.lucene.index.SegmentReader;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.SearcherManager; import org.apache.lucene.search.SearcherManager;
import org.apache.lucene.search.join.BitDocIdSetFilter; import org.apache.lucene.search.join.BitSetProducer;
import org.apache.lucene.util.Accountable; import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.Accountables; import org.apache.lucene.util.Accountables;
import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ExceptionsHelper;
@ -55,7 +66,11 @@ import org.elasticsearch.index.translog.Translog;
import java.io.Closeable; import java.io.Closeable;
import java.io.IOException; import java.io.IOException;
import java.util.*; import java.util.Arrays;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.locks.Condition; import java.util.concurrent.locks.Condition;
@ -911,13 +926,13 @@ public abstract class Engine implements Closeable {
private final String[] filteringAliases; private final String[] filteringAliases;
private final Query aliasFilter; private final Query aliasFilter;
private final String[] types; private final String[] types;
private final BitDocIdSetFilter parentFilter; private final BitSetProducer parentFilter;
private final Operation.Origin origin; private final Operation.Origin origin;
private final long startTime; private final long startTime;
private long endTime; private long endTime;
public DeleteByQuery(Query query, BytesReference source, @Nullable String[] filteringAliases, @Nullable Query aliasFilter, BitDocIdSetFilter parentFilter, Operation.Origin origin, long startTime, String... types) { public DeleteByQuery(Query query, BytesReference source, @Nullable String[] filteringAliases, @Nullable Query aliasFilter, BitSetProducer parentFilter, Operation.Origin origin, long startTime, String... types) {
this.query = query; this.query = query;
this.source = source; this.source = source;
this.types = types; this.types = types;
@ -952,7 +967,7 @@ public abstract class Engine implements Closeable {
return parentFilter != null; return parentFilter != null;
} }
public BitDocIdSetFilter parentFilter() { public BitSetProducer parentFilter() {
return parentFilter; return parentFilter;
} }

View File

@ -638,10 +638,10 @@ public class InternalEngine extends Engine {
try { try {
Query query = delete.query(); Query query = delete.query();
if (delete.aliasFilter() != null) { if (delete.aliasFilter() != null) {
BooleanQuery boolQuery = new BooleanQuery(); query = new BooleanQuery.Builder()
boolQuery.add(query, Occur.MUST); .add(query, Occur.MUST)
boolQuery.add(delete.aliasFilter(), Occur.FILTER); .add(delete.aliasFilter(), Occur.FILTER)
query = boolQuery; .build();
} }
if (delete.nested()) { if (delete.nested()) {
query = new IncludeNestedDocsQuery(query, delete.parentFilter()); query = new IncludeNestedDocsQuery(query, delete.parentFilter());

View File

@ -19,11 +19,15 @@
package org.elasticsearch.index.fielddata; package org.elasticsearch.index.fielddata;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.*; import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.join.BitDocIdSetFilter; import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.FieldComparatorSource;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.join.BitSetProducer;
import org.apache.lucene.util.BitDocIdSet; import org.apache.lucene.util.BitDocIdSet;
import org.apache.lucene.util.BitSet;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
@ -119,10 +123,10 @@ public interface IndexFieldData<FD extends AtomicFieldData> extends IndexCompone
*/ */
public static class Nested { public static class Nested {
private final BitDocIdSetFilter rootFilter; private final BitSetProducer rootFilter;
private final Filter innerFilter; private final Filter innerFilter;
public Nested(BitDocIdSetFilter rootFilter, Filter innerFilter) { public Nested(BitSetProducer rootFilter, Filter innerFilter) {
this.rootFilter = rootFilter; this.rootFilter = rootFilter;
this.innerFilter = innerFilter; this.innerFilter = innerFilter;
} }
@ -130,8 +134,8 @@ public interface IndexFieldData<FD extends AtomicFieldData> extends IndexCompone
/** /**
* Get a {@link BitDocIdSet} that matches the root documents. * Get a {@link BitDocIdSet} that matches the root documents.
*/ */
public BitDocIdSet rootDocs(LeafReaderContext ctx) throws IOException { public BitSet rootDocs(LeafReaderContext ctx) throws IOException {
return rootFilter.getDocIdSet(ctx); return rootFilter.getBitSet(ctx);
} }
/** /**

View File

@ -94,7 +94,7 @@ public class BytesRefFieldComparatorSource extends IndexFieldData.XFieldComparat
if (nested == null) { if (nested == null) {
selectedValues = sortMode.select(values); selectedValues = sortMode.select(values);
} else { } else {
final BitSet rootDocs = nested.rootDocs(context).bits(); final BitSet rootDocs = nested.rootDocs(context);
final DocIdSet innerDocs = nested.innerDocs(context); final DocIdSet innerDocs = nested.innerDocs(context);
selectedValues = sortMode.select(values, rootDocs, innerDocs); selectedValues = sortMode.select(values, rootDocs, innerDocs);
} }
@ -124,7 +124,7 @@ public class BytesRefFieldComparatorSource extends IndexFieldData.XFieldComparat
if (nested == null) { if (nested == null) {
selectedValues = sortMode.select(values, nonNullMissingBytes); selectedValues = sortMode.select(values, nonNullMissingBytes);
} else { } else {
final BitSet rootDocs = nested.rootDocs(context).bits(); final BitSet rootDocs = nested.rootDocs(context);
final DocIdSet innerDocs = nested.innerDocs(context); final DocIdSet innerDocs = nested.innerDocs(context);
selectedValues = sortMode.select(values, nonNullMissingBytes, rootDocs, innerDocs, context.reader().maxDoc()); selectedValues = sortMode.select(values, nonNullMissingBytes, rootDocs, innerDocs, context.reader().maxDoc());
} }

View File

@ -78,7 +78,7 @@ public class DoubleValuesComparatorSource extends IndexFieldData.XFieldComparato
if (nested == null) { if (nested == null) {
selectedValues = sortMode.select(values, dMissingValue); selectedValues = sortMode.select(values, dMissingValue);
} else { } else {
final BitSet rootDocs = nested.rootDocs(context).bits(); final BitSet rootDocs = nested.rootDocs(context);
final DocIdSet innerDocs = nested.innerDocs(context); final DocIdSet innerDocs = nested.innerDocs(context);
selectedValues = sortMode.select(values, dMissingValue, rootDocs, innerDocs, context.reader().maxDoc()); selectedValues = sortMode.select(values, dMissingValue, rootDocs, innerDocs, context.reader().maxDoc());
} }

View File

@ -70,7 +70,7 @@ public class FloatValuesComparatorSource extends IndexFieldData.XFieldComparator
if (nested == null) { if (nested == null) {
selectedValues = sortMode.select(values, dMissingValue); selectedValues = sortMode.select(values, dMissingValue);
} else { } else {
final BitSet rootDocs = nested.rootDocs(context).bits(); final BitSet rootDocs = nested.rootDocs(context);
final DocIdSet innerDocs = nested.innerDocs(context); final DocIdSet innerDocs = nested.innerDocs(context);
selectedValues = sortMode.select(values, dMissingValue, rootDocs, innerDocs, context.reader().maxDoc()); selectedValues = sortMode.select(values, dMissingValue, rootDocs, innerDocs, context.reader().maxDoc());
} }

View File

@ -69,7 +69,7 @@ public class LongValuesComparatorSource extends IndexFieldData.XFieldComparatorS
if (nested == null) { if (nested == null) {
selectedValues = sortMode.select(values, dMissingValue); selectedValues = sortMode.select(values, dMissingValue);
} else { } else {
final BitSet rootDocs = nested.rootDocs(context).bits(); final BitSet rootDocs = nested.rootDocs(context);
final DocIdSet innerDocs = nested.innerDocs(context); final DocIdSet innerDocs = nested.innerDocs(context);
selectedValues = sortMode.select(values, dMissingValue, rootDocs, innerDocs, context.reader().maxDoc()); selectedValues = sortMode.select(values, dMissingValue, rootDocs, innerDocs, context.reader().maxDoc());
} }

View File

@ -426,10 +426,10 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
if (types == null || types.length == 0) { if (types == null || types.length == 0) {
if (hasNested && filterPercolateType) { if (hasNested && filterPercolateType) {
BooleanQuery bq = new BooleanQuery(); BooleanQuery.Builder bq = new BooleanQuery.Builder();
bq.add(percolatorType, Occur.MUST_NOT); bq.add(percolatorType, Occur.MUST_NOT);
bq.add(Queries.newNonNestedFilter(), Occur.MUST); bq.add(Queries.newNonNestedFilter(), Occur.MUST);
return new ConstantScoreQuery(bq); return new ConstantScoreQuery(bq.build());
} else if (hasNested) { } else if (hasNested) {
return Queries.newNonNestedFilter(); return Queries.newNonNestedFilter();
} else if (filterPercolateType) { } else if (filterPercolateType) {
@ -444,10 +444,10 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
DocumentMapper docMapper = documentMapper(types[0]); DocumentMapper docMapper = documentMapper(types[0]);
Query filter = docMapper != null ? docMapper.typeFilter() : new TermQuery(new Term(TypeFieldMapper.NAME, types[0])); Query filter = docMapper != null ? docMapper.typeFilter() : new TermQuery(new Term(TypeFieldMapper.NAME, types[0]));
if (filterPercolateType) { if (filterPercolateType) {
BooleanQuery bq = new BooleanQuery(); BooleanQuery.Builder bq = new BooleanQuery.Builder();
bq.add(percolatorType, Occur.MUST_NOT); bq.add(percolatorType, Occur.MUST_NOT);
bq.add(filter, Occur.MUST); bq.add(filter, Occur.MUST);
return new ConstantScoreQuery(bq); return new ConstantScoreQuery(bq.build());
} else { } else {
return filter; return filter;
} }
@ -474,16 +474,16 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
} }
TermsQuery termsFilter = new TermsQuery(TypeFieldMapper.NAME, typesBytes); TermsQuery termsFilter = new TermsQuery(TypeFieldMapper.NAME, typesBytes);
if (filterPercolateType) { if (filterPercolateType) {
BooleanQuery bq = new BooleanQuery(); BooleanQuery.Builder bq = new BooleanQuery.Builder();
bq.add(percolatorType, Occur.MUST_NOT); bq.add(percolatorType, Occur.MUST_NOT);
bq.add(termsFilter, Occur.MUST); bq.add(termsFilter, Occur.MUST);
return new ConstantScoreQuery(bq); return new ConstantScoreQuery(bq.build());
} else { } else {
return termsFilter; return termsFilter;
} }
} else { } else {
// Current bool filter requires that at least one should clause matches, even with a must clause. // Current bool filter requires that at least one should clause matches, even with a must clause.
BooleanQuery bool = new BooleanQuery(); BooleanQuery.Builder bool = new BooleanQuery.Builder();
for (String type : types) { for (String type : types) {
DocumentMapper docMapper = documentMapper(type); DocumentMapper docMapper = documentMapper(type);
if (docMapper == null) { if (docMapper == null) {
@ -499,7 +499,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
bool.add(Queries.newNonNestedFilter(), BooleanClause.Occur.MUST); bool.add(Queries.newNonNestedFilter(), BooleanClause.Occur.MUST);
} }
return new ConstantScoreQuery(bool); return new ConstantScoreQuery(bool.build());
} }
} }

View File

@ -27,6 +27,7 @@ import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.NumericUtils; import org.apache.lucene.util.NumericUtils;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.geo.GeoDistance; import org.elasticsearch.common.geo.GeoDistance;
import org.elasticsearch.common.geo.GeoHashUtils; import org.elasticsearch.common.geo.GeoHashUtils;
@ -43,6 +44,8 @@ import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperParsingException; import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.MergeMappingException;
import org.elasticsearch.index.mapper.MergeResult;
import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.core.DoubleFieldMapper; import org.elasticsearch.index.mapper.core.DoubleFieldMapper;
import org.elasticsearch.index.mapper.core.NumberFieldMapper; import org.elasticsearch.index.mapper.core.NumberFieldMapper;
@ -96,8 +99,8 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
public static final boolean ENABLE_GEOHASH_PREFIX = false; public static final boolean ENABLE_GEOHASH_PREFIX = false;
public static final int GEO_HASH_PRECISION = GeoHashUtils.PRECISION; public static final int GEO_HASH_PRECISION = GeoHashUtils.PRECISION;
public static final boolean IGNORE_MALFORMED = false; public static final Explicit<Boolean> IGNORE_MALFORMED = new Explicit(false, false);
public static final boolean COERCE = false; public static final Explicit<Boolean> COERCE = new Explicit(false, false);
public static final MappedFieldType FIELD_TYPE = new GeoPointFieldType(); public static final MappedFieldType FIELD_TYPE = new GeoPointFieldType();
@ -123,11 +126,45 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
private int geoHashPrecision = Defaults.GEO_HASH_PRECISION; private int geoHashPrecision = Defaults.GEO_HASH_PRECISION;
private Boolean ignoreMalformed;
private Boolean coerce;
public Builder(String name) { public Builder(String name) {
super(name, Defaults.FIELD_TYPE); super(name, Defaults.FIELD_TYPE);
this.builder = this; this.builder = this;
} }
public Builder ignoreMalformed(boolean ignoreMalformed) {
this.ignoreMalformed = ignoreMalformed;
return builder;
}
protected Explicit<Boolean> ignoreMalformed(BuilderContext context) {
if (ignoreMalformed != null) {
return new Explicit<>(ignoreMalformed, true);
}
if (context.indexSettings() != null) {
return new Explicit<>(context.indexSettings().getAsBoolean("index.mapping.ignore_malformed", Defaults.IGNORE_MALFORMED.value()), false);
}
return Defaults.IGNORE_MALFORMED;
}
public Builder coerce(boolean coerce) {
this.coerce = coerce;
return builder;
}
protected Explicit<Boolean> coerce(BuilderContext context) {
if (coerce != null) {
return new Explicit<>(coerce, true);
}
if (context.indexSettings() != null) {
return new Explicit<>(context.indexSettings().getAsBoolean("index.mapping.coerce", Defaults.COERCE.value()), false);
}
return Defaults.COERCE;
}
@Override @Override
public GeoPointFieldType fieldType() { public GeoPointFieldType fieldType() {
return (GeoPointFieldType)fieldType; return (GeoPointFieldType)fieldType;
@ -208,7 +245,7 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
fieldType.setHasDocValues(false); fieldType.setHasDocValues(false);
defaultFieldType.setHasDocValues(false); defaultFieldType.setHasDocValues(false);
return new GeoPointFieldMapper(name, fieldType, defaultFieldType, context.indexSettings(), origPathType, return new GeoPointFieldMapper(name, fieldType, defaultFieldType, context.indexSettings(), origPathType,
latMapper, lonMapper, geohashMapper, multiFieldsBuilder.build(this, context)); latMapper, lonMapper, geohashMapper, multiFieldsBuilder.build(this, context), ignoreMalformed(context), coerce(context));
} }
} }
@ -220,71 +257,58 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
parseField(builder, name, node, parserContext); parseField(builder, name, node, parserContext);
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) { for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
Map.Entry<String, Object> entry = iterator.next(); Map.Entry<String, Object> entry = iterator.next();
String fieldName = Strings.toUnderscoreCase(entry.getKey()); String propName = Strings.toUnderscoreCase(entry.getKey());
Object fieldNode = entry.getValue(); Object propNode = entry.getValue();
if (fieldName.equals("path") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) { if (propName.equals("path") && parserContext.indexVersionCreated().before(Version.V_2_0_0_beta1)) {
builder.multiFieldPathType(parsePathType(name, fieldNode.toString())); builder.multiFieldPathType(parsePathType(name, propNode.toString()));
iterator.remove(); iterator.remove();
} else if (fieldName.equals("lat_lon")) { } else if (propName.equals("lat_lon")) {
builder.enableLatLon(XContentMapValues.nodeBooleanValue(fieldNode)); builder.enableLatLon(XContentMapValues.nodeBooleanValue(propNode));
iterator.remove(); iterator.remove();
} else if (fieldName.equals("geohash")) { } else if (propName.equals("geohash")) {
builder.enableGeoHash(XContentMapValues.nodeBooleanValue(fieldNode)); builder.enableGeoHash(XContentMapValues.nodeBooleanValue(propNode));
iterator.remove(); iterator.remove();
} else if (fieldName.equals("geohash_prefix")) { } else if (propName.equals("geohash_prefix")) {
builder.geohashPrefix(XContentMapValues.nodeBooleanValue(fieldNode)); builder.geohashPrefix(XContentMapValues.nodeBooleanValue(propNode));
if (XContentMapValues.nodeBooleanValue(fieldNode)) { if (XContentMapValues.nodeBooleanValue(propNode)) {
builder.enableGeoHash(true); builder.enableGeoHash(true);
} }
iterator.remove(); iterator.remove();
} else if (fieldName.equals("precision_step")) { } else if (propName.equals("precision_step")) {
builder.precisionStep(XContentMapValues.nodeIntegerValue(fieldNode)); builder.precisionStep(XContentMapValues.nodeIntegerValue(propNode));
iterator.remove(); iterator.remove();
} else if (fieldName.equals("geohash_precision")) { } else if (propName.equals("geohash_precision")) {
if (fieldNode instanceof Integer) { if (propNode instanceof Integer) {
builder.geoHashPrecision(XContentMapValues.nodeIntegerValue(fieldNode)); builder.geoHashPrecision(XContentMapValues.nodeIntegerValue(propNode));
} else { } else {
builder.geoHashPrecision(GeoUtils.geoHashLevelsForPrecision(fieldNode.toString())); builder.geoHashPrecision(GeoUtils.geoHashLevelsForPrecision(propNode.toString()));
} }
iterator.remove(); iterator.remove();
} else if (fieldName.equals(Names.IGNORE_MALFORMED)) { } else if (propName.equals(Names.IGNORE_MALFORMED)) {
if (builder.fieldType().coerce == false) { builder.ignoreMalformed(XContentMapValues.nodeBooleanValue(propNode));
builder.fieldType().ignoreMalformed = XContentMapValues.nodeBooleanValue(fieldNode);
}
iterator.remove(); iterator.remove();
} else if (indexCreatedBeforeV2_0 && fieldName.equals("validate")) { } else if (indexCreatedBeforeV2_0 && propName.equals("validate")) {
if (builder.fieldType().ignoreMalformed == false) { builder.ignoreMalformed(!XContentMapValues.nodeBooleanValue(propNode));
builder.fieldType().ignoreMalformed = !XContentMapValues.nodeBooleanValue(fieldNode);
}
iterator.remove();
} else if (indexCreatedBeforeV2_0 && fieldName.equals("validate_lon")) {
if (builder.fieldType().ignoreMalformed() == false) {
builder.fieldType().ignoreMalformed = !XContentMapValues.nodeBooleanValue(fieldNode);
}
iterator.remove(); iterator.remove();
} else if (indexCreatedBeforeV2_0 && fieldName.equals("validate_lat")) { } else if (indexCreatedBeforeV2_0 && propName.equals("validate_lon")) {
if (builder.fieldType().ignoreMalformed == false) { builder.ignoreMalformed(!XContentMapValues.nodeBooleanValue(propNode));
builder.fieldType().ignoreMalformed = !XContentMapValues.nodeBooleanValue(fieldNode);
}
iterator.remove(); iterator.remove();
} else if (fieldName.equals(Names.COERCE)) { } else if (indexCreatedBeforeV2_0 && propName.equals("validate_lat")) {
builder.fieldType().coerce = XContentMapValues.nodeBooleanValue(fieldNode); builder.ignoreMalformed(!XContentMapValues.nodeBooleanValue(propNode));
if (builder.fieldType().coerce == true) {
builder.fieldType().ignoreMalformed = true;
}
iterator.remove(); iterator.remove();
} else if (indexCreatedBeforeV2_0 && fieldName.equals("normalize")) { } else if (propName.equals(Names.COERCE)) {
builder.fieldType().coerce = XContentMapValues.nodeBooleanValue(fieldNode); builder.coerce(XContentMapValues.nodeBooleanValue(propNode));
iterator.remove(); iterator.remove();
} else if (indexCreatedBeforeV2_0 && fieldName.equals("normalize_lat")) { } else if (indexCreatedBeforeV2_0 && propName.equals("normalize")) {
builder.fieldType().coerce = XContentMapValues.nodeBooleanValue(fieldNode); builder.coerce(XContentMapValues.nodeBooleanValue(propNode));
iterator.remove(); iterator.remove();
} else if (indexCreatedBeforeV2_0 && fieldName.equals("normalize_lon")) { } else if (indexCreatedBeforeV2_0 && propName.equals("normalize_lat")) {
if (builder.fieldType().coerce == false) { builder.coerce(XContentMapValues.nodeBooleanValue(propNode));
builder.fieldType().coerce = XContentMapValues.nodeBooleanValue(fieldNode);
}
iterator.remove(); iterator.remove();
} else if (parseMultiField(builder, name, parserContext, fieldName, fieldNode)) { } else if (indexCreatedBeforeV2_0 && propName.equals("normalize_lon")) {
builder.coerce(XContentMapValues.nodeBooleanValue(propNode));
iterator.remove();
} else if (parseMultiField(builder, name, parserContext, propName, propNode)) {
iterator.remove(); iterator.remove();
} }
} }
@ -300,8 +324,6 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
private MappedFieldType latFieldType; private MappedFieldType latFieldType;
private MappedFieldType lonFieldType; private MappedFieldType lonFieldType;
private boolean ignoreMalformed = false;
private boolean coerce = false;
public GeoPointFieldType() {} public GeoPointFieldType() {}
@ -312,8 +334,6 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
this.geohashPrefixEnabled = ref.geohashPrefixEnabled; this.geohashPrefixEnabled = ref.geohashPrefixEnabled;
this.latFieldType = ref.latFieldType; // copying ref is ok, this can never be modified this.latFieldType = ref.latFieldType; // copying ref is ok, this can never be modified
this.lonFieldType = ref.lonFieldType; // copying ref is ok, this can never be modified this.lonFieldType = ref.lonFieldType; // copying ref is ok, this can never be modified
this.coerce = ref.coerce;
this.ignoreMalformed = ref.ignoreMalformed;
} }
@Override @Override
@ -327,8 +347,6 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
GeoPointFieldType that = (GeoPointFieldType) o; GeoPointFieldType that = (GeoPointFieldType) o;
return geohashPrecision == that.geohashPrecision && return geohashPrecision == that.geohashPrecision &&
geohashPrefixEnabled == that.geohashPrefixEnabled && geohashPrefixEnabled == that.geohashPrefixEnabled &&
coerce == that.coerce &&
ignoreMalformed == that.ignoreMalformed &&
java.util.Objects.equals(geohashFieldType, that.geohashFieldType) && java.util.Objects.equals(geohashFieldType, that.geohashFieldType) &&
java.util.Objects.equals(latFieldType, that.latFieldType) && java.util.Objects.equals(latFieldType, that.latFieldType) &&
java.util.Objects.equals(lonFieldType, that.lonFieldType); java.util.Objects.equals(lonFieldType, that.lonFieldType);
@ -337,7 +355,7 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
@Override @Override
public int hashCode() { public int hashCode() {
return java.util.Objects.hash(super.hashCode(), geohashFieldType, geohashPrecision, geohashPrefixEnabled, latFieldType, return java.util.Objects.hash(super.hashCode(), geohashFieldType, geohashPrecision, geohashPrefixEnabled, latFieldType,
lonFieldType, coerce, ignoreMalformed); lonFieldType);
} }
@Override @Override
@ -365,12 +383,6 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
latFieldType().numericPrecisionStep() != other.latFieldType().numericPrecisionStep()) { latFieldType().numericPrecisionStep() != other.latFieldType().numericPrecisionStep()) {
conflicts.add("mapper [" + names().fullName() + "] has different [precision_step]"); conflicts.add("mapper [" + names().fullName() + "] has different [precision_step]");
} }
if (ignoreMalformed() != other.ignoreMalformed()) {
conflicts.add("mapper [" + names().fullName() + "] has different [ignore_malformed]");
}
if (coerce() != other.coerce()) {
conflicts.add("mapper [" + names().fullName() + "] has different [coerce]");
}
} }
public boolean isGeohashEnabled() { public boolean isGeohashEnabled() {
@ -414,24 +426,6 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
this.lonFieldType = lonFieldType; this.lonFieldType = lonFieldType;
} }
public boolean coerce() {
return this.coerce;
}
public void setCoerce(boolean coerce) {
checkIfFrozen();
this.coerce = coerce;
}
public boolean ignoreMalformed() {
return this.ignoreMalformed;
}
public void setIgnoreMalformed(boolean ignoreMalformed) {
checkIfFrozen();
this.ignoreMalformed = ignoreMalformed;
}
@Override @Override
public GeoPoint value(Object value) { public GeoPoint value(Object value) {
if (value instanceof GeoPoint) { if (value instanceof GeoPoint) {
@ -575,14 +569,20 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
private final StringFieldMapper geohashMapper; private final StringFieldMapper geohashMapper;
protected Explicit<Boolean> ignoreMalformed;
protected Explicit<Boolean> coerce;
public GeoPointFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Settings indexSettings, public GeoPointFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Settings indexSettings,
ContentPath.Type pathType, DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper, StringFieldMapper geohashMapper, ContentPath.Type pathType, DoubleFieldMapper latMapper, DoubleFieldMapper lonMapper, StringFieldMapper geohashMapper,
MultiFields multiFields) { MultiFields multiFields, Explicit<Boolean> ignoreMalformed, Explicit<Boolean> coerce) {
super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, null); super(simpleName, fieldType, defaultFieldType, indexSettings, multiFields, null);
this.pathType = pathType; this.pathType = pathType;
this.latMapper = latMapper; this.latMapper = latMapper;
this.lonMapper = lonMapper; this.lonMapper = lonMapper;
this.geohashMapper = geohashMapper; this.geohashMapper = geohashMapper;
this.ignoreMalformed = ignoreMalformed;
this.coerce = coerce;
} }
@Override @Override
@ -595,6 +595,30 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
return (GeoPointFieldType) super.fieldType(); return (GeoPointFieldType) super.fieldType();
} }
@Override
public void merge(Mapper mergeWith, MergeResult mergeResult) throws MergeMappingException {
super.merge(mergeWith, mergeResult);
if (!this.getClass().equals(mergeWith.getClass())) {
return;
}
GeoPointFieldMapper gpfmMergeWith = (GeoPointFieldMapper) mergeWith;
if (gpfmMergeWith.coerce.explicit()) {
if (coerce.explicit() && coerce.value() != gpfmMergeWith.coerce.value()) {
mergeResult.addConflict("mapper [" + fieldType().names().fullName() + "] has different [coerce]");
}
}
if (mergeResult.simulate() == false && mergeResult.hasConflicts() == false) {
if (gpfmMergeWith.ignoreMalformed.explicit()) {
this.ignoreMalformed = gpfmMergeWith.ignoreMalformed;
}
if (gpfmMergeWith.coerce.explicit()) {
this.coerce = gpfmMergeWith.coerce;
}
}
}
@Override @Override
protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException { protected void parseCreateField(ParseContext context, List<Field> fields) throws IOException {
throw new UnsupportedOperationException("Parsing is implemented in parse(), this method should NEVER be called"); throw new UnsupportedOperationException("Parsing is implemented in parse(), this method should NEVER be called");
@ -671,16 +695,18 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
} }
private void parse(ParseContext context, GeoPoint point, String geohash) throws IOException { private void parse(ParseContext context, GeoPoint point, String geohash) throws IOException {
if (fieldType().ignoreMalformed == false) { boolean validPoint = false;
if (coerce.value() == false && ignoreMalformed.value() == false) {
if (point.lat() > 90.0 || point.lat() < -90.0) { if (point.lat() > 90.0 || point.lat() < -90.0) {
throw new IllegalArgumentException("illegal latitude value [" + point.lat() + "] for " + name()); throw new IllegalArgumentException("illegal latitude value [" + point.lat() + "] for " + name());
} }
if (point.lon() > 180.0 || point.lon() < -180) { if (point.lon() > 180.0 || point.lon() < -180) {
throw new IllegalArgumentException("illegal longitude value [" + point.lon() + "] for " + name()); throw new IllegalArgumentException("illegal longitude value [" + point.lon() + "] for " + name());
} }
validPoint = true;
} }
if (fieldType().coerce) { if (coerce.value() == true && validPoint == false) {
// by setting coerce to false we are assuming all geopoints are already in a valid coordinate system // by setting coerce to false we are assuming all geopoints are already in a valid coordinate system
// thus this extra step can be skipped // thus this extra step can be skipped
// LUCENE WATCH: This will be folded back into Lucene's GeoPointField // LUCENE WATCH: This will be folded back into Lucene's GeoPointField
@ -747,11 +773,11 @@ public class GeoPointFieldMapper extends FieldMapper implements ArrayValueMapper
if (fieldType().isLatLonEnabled() && (includeDefaults || fieldType().latFieldType().numericPrecisionStep() != NumericUtils.PRECISION_STEP_DEFAULT)) { if (fieldType().isLatLonEnabled() && (includeDefaults || fieldType().latFieldType().numericPrecisionStep() != NumericUtils.PRECISION_STEP_DEFAULT)) {
builder.field("precision_step", fieldType().latFieldType().numericPrecisionStep()); builder.field("precision_step", fieldType().latFieldType().numericPrecisionStep());
} }
if (includeDefaults || fieldType().coerce != Defaults.COERCE) { if (includeDefaults || coerce.explicit()) {
builder.field(Names.COERCE, fieldType().coerce); builder.field(Names.COERCE, coerce.value());
} }
if (includeDefaults || fieldType().ignoreMalformed != Defaults.IGNORE_MALFORMED) { if (includeDefaults || ignoreMalformed.explicit()) {
builder.field(Names.IGNORE_MALFORMED, fieldType().ignoreMalformed); builder.field(Names.IGNORE_MALFORMED, ignoreMalformed.value());
} }
} }

View File

@ -189,7 +189,7 @@ public class IdFieldMapper extends MetadataFieldMapper {
return super.prefixQuery(value, method, context); return super.prefixQuery(value, method, context);
} }
Collection<String> queryTypes = context.queryTypes(); Collection<String> queryTypes = context.queryTypes();
BooleanQuery query = new BooleanQuery(); BooleanQuery.Builder query = new BooleanQuery.Builder();
for (String queryType : queryTypes) { for (String queryType : queryTypes) {
PrefixQuery prefixQuery = new PrefixQuery(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(queryType, BytesRefs.toBytesRef(value)))); PrefixQuery prefixQuery = new PrefixQuery(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(queryType, BytesRefs.toBytesRef(value))));
if (method != null) { if (method != null) {
@ -197,7 +197,7 @@ public class IdFieldMapper extends MetadataFieldMapper {
} }
query.add(prefixQuery, BooleanClause.Occur.SHOULD); query.add(prefixQuery, BooleanClause.Occur.SHOULD);
} }
return query; return query.build();
} }
@Override @Override
@ -214,7 +214,7 @@ public class IdFieldMapper extends MetadataFieldMapper {
} }
return regexpQuery; return regexpQuery;
} }
BooleanQuery query = new BooleanQuery(); BooleanQuery.Builder query = new BooleanQuery.Builder();
for (String queryType : queryTypes) { for (String queryType : queryTypes) {
RegexpQuery regexpQuery = new RegexpQuery(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(queryType, BytesRefs.toBytesRef(value))), flags, maxDeterminizedStates); RegexpQuery regexpQuery = new RegexpQuery(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(queryType, BytesRefs.toBytesRef(value))), flags, maxDeterminizedStates);
if (method != null) { if (method != null) {
@ -222,7 +222,7 @@ public class IdFieldMapper extends MetadataFieldMapper {
} }
query.add(regexpQuery, BooleanClause.Occur.SHOULD); query.add(regexpQuery, BooleanClause.Occur.SHOULD);
} }
return query; return query.build();
} }
} }

View File

@ -71,7 +71,7 @@ public abstract class AbstractQueryBuilder<QB extends AbstractQueryBuilder> exte
public final Query toQuery(QueryShardContext context) throws IOException { public final Query toQuery(QueryShardContext context) throws IOException {
Query query = doToQuery(context); Query query = doToQuery(context);
if (query != null) { if (query != null) {
query.setBoost(boost); setFinalBoost(query);
if (queryName != null) { if (queryName != null) {
context.addNamedQuery(queryName, query); context.addNamedQuery(queryName, query);
} }
@ -79,6 +79,20 @@ public abstract class AbstractQueryBuilder<QB extends AbstractQueryBuilder> exte
return query; return query;
} }
/**
* Sets the main boost to the query obtained by converting the current query into a lucene query.
* The default behaviour is to set the main boost, after verifying that we are not overriding any non default boost
* value that was previously set to the lucene query. That case would require some manual decision on how to combine
* the main boost with the boost coming from lucene by overriding this method.
* @throws IllegalStateException if the lucene query boost has already been set
*/
protected void setFinalBoost(Query query) {
if (query.getBoost() != AbstractQueryBuilder.DEFAULT_BOOST) {
throw new IllegalStateException("lucene query boost is already set, override setFinalBoost to define how to combine lucene boost with main boost");
}
query.setBoost(boost);
}
@Override @Override
public final Query toFilter(QueryShardContext context) throws IOException { public final Query toFilter(QueryShardContext context) throws IOException {
Query result = null; Query result = null;

View File

@ -19,8 +19,6 @@
package org.elasticsearch.index.query; package org.elasticsearch.index.query;
import com.google.common.collect.Lists;
import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
@ -89,14 +87,15 @@ public class AndQueryBuilder extends AbstractQueryBuilder<AndQueryBuilder> {
return null; return null;
} }
BooleanQuery query = new BooleanQuery(); BooleanQuery.Builder queryBuilder = new BooleanQuery.Builder();
for (QueryBuilder f : filters) { for (QueryBuilder f : filters) {
Query innerQuery = f.toFilter(context); Query innerQuery = f.toFilter(context);
// ignore queries that are null // ignore queries that are null
if (innerQuery != null) { if (innerQuery != null) {
query.add(innerQuery, Occur.MUST); queryBuilder.add(innerQuery, Occur.MUST);
} }
} }
BooleanQuery query = queryBuilder.build();
if (query.clauses().isEmpty()) { if (query.clauses().isEmpty()) {
// no inner lucene query exists, ignore upstream // no inner lucene query exists, ignore upstream
return null; return null;

View File

@ -250,16 +250,16 @@ public class BoolQueryBuilder extends AbstractQueryBuilder<BoolQueryBuilder> {
@Override @Override
protected Query doToQuery(QueryShardContext context) throws IOException { protected Query doToQuery(QueryShardContext context) throws IOException {
BooleanQuery booleanQuery = new BooleanQuery(disableCoord); BooleanQuery.Builder booleanQueryBuilder = new BooleanQuery.Builder();
addBooleanClauses(context, booleanQuery, mustClauses, BooleanClause.Occur.MUST); booleanQueryBuilder.setDisableCoord(disableCoord);
addBooleanClauses(context, booleanQuery, mustNotClauses, BooleanClause.Occur.MUST_NOT); addBooleanClauses(context, booleanQueryBuilder, mustClauses, BooleanClause.Occur.MUST);
addBooleanClauses(context, booleanQuery, shouldClauses, BooleanClause.Occur.SHOULD); addBooleanClauses(context, booleanQueryBuilder, mustNotClauses, BooleanClause.Occur.MUST_NOT);
addBooleanClauses(context, booleanQuery, filterClauses, BooleanClause.Occur.FILTER); addBooleanClauses(context, booleanQueryBuilder, shouldClauses, BooleanClause.Occur.SHOULD);
addBooleanClauses(context, booleanQueryBuilder, filterClauses, BooleanClause.Occur.FILTER);
BooleanQuery booleanQuery = booleanQueryBuilder.build();
if (booleanQuery.clauses().isEmpty()) { if (booleanQuery.clauses().isEmpty()) {
return new MatchAllDocsQuery(); return new MatchAllDocsQuery();
} }
booleanQuery = Queries.applyMinimumShouldMatch(booleanQuery, minimumShouldMatch); booleanQuery = Queries.applyMinimumShouldMatch(booleanQuery, minimumShouldMatch);
return adjustPureNegative ? fixNegativeQueryIfNeeded(booleanQuery) : booleanQuery; return adjustPureNegative ? fixNegativeQueryIfNeeded(booleanQuery) : booleanQuery;
} }
@ -274,7 +274,7 @@ public class BoolQueryBuilder extends AbstractQueryBuilder<BoolQueryBuilder> {
return validationException; return validationException;
} }
private void addBooleanClauses(QueryShardContext context, BooleanQuery booleanQuery, List<QueryBuilder> clauses, Occur occurs) throws IOException { private void addBooleanClauses(QueryShardContext context, BooleanQuery.Builder booleanQueryBuilder, List<QueryBuilder> clauses, Occur occurs) throws IOException {
for (QueryBuilder query : clauses) { for (QueryBuilder query : clauses) {
Query luceneQuery = null; Query luceneQuery = null;
switch (occurs) { switch (occurs) {
@ -292,7 +292,7 @@ public class BoolQueryBuilder extends AbstractQueryBuilder<BoolQueryBuilder> {
luceneQuery = query.toQuery(context); luceneQuery = query.toQuery(context);
} }
if (luceneQuery != null) { if (luceneQuery != null) {
booleanQuery.add(new BooleanClause(luceneQuery, occurs)); booleanQueryBuilder.add(new BooleanClause(luceneQuery, occurs));
} }
} }
} }

View File

@ -92,7 +92,7 @@ public class ExistsQueryBuilder extends AbstractQueryBuilder<ExistsQueryBuilder>
return Queries.newMatchNoDocsQuery(); return Queries.newMatchNoDocsQuery();
} }
BooleanQuery boolFilter = new BooleanQuery(); BooleanQuery.Builder boolFilterBuilder = new BooleanQuery.Builder();
for (String field : fields) { for (String field : fields) {
MappedFieldType fieldType = context.fieldMapper(field); MappedFieldType fieldType = context.fieldMapper(field);
Query filter = null; Query filter = null;
@ -112,9 +112,9 @@ public class ExistsQueryBuilder extends AbstractQueryBuilder<ExistsQueryBuilder>
if (filter == null) { if (filter == null) {
filter = new TermRangeQuery(field, null, null, true, true); filter = new TermRangeQuery(field, null, null, true, true);
} }
boolFilter.add(filter, BooleanClause.Occur.SHOULD); boolFilterBuilder.add(filter, BooleanClause.Occur.SHOULD);
} }
return new ConstantScoreQuery(boolFilter); return new ConstantScoreQuery(boolFilterBuilder.build());
} }
@Override @Override

View File

@ -157,12 +157,12 @@ public class GeoShapeQueryParser extends BaseQueryParserTemp {
if (strategy instanceof RecursivePrefixTreeStrategy && shapeRelation == ShapeRelation.DISJOINT) { if (strategy instanceof RecursivePrefixTreeStrategy && shapeRelation == ShapeRelation.DISJOINT) {
// this strategy doesn't support disjoint anymore: but it did before, including creating lucene fieldcache (!) // this strategy doesn't support disjoint anymore: but it did before, including creating lucene fieldcache (!)
// in this case, execute disjoint as exists && !intersects // in this case, execute disjoint as exists && !intersects
BooleanQuery bool = new BooleanQuery(); BooleanQuery.Builder bool = new BooleanQuery.Builder();
Query exists = ExistsQueryBuilder.newFilter(context, fieldName); Query exists = ExistsQueryBuilder.newFilter(context, fieldName);
Filter intersects = strategy.makeFilter(getArgs(shape, ShapeRelation.INTERSECTS)); Filter intersects = strategy.makeFilter(getArgs(shape, ShapeRelation.INTERSECTS));
bool.add(exists, BooleanClause.Occur.MUST); bool.add(exists, BooleanClause.Occur.MUST);
bool.add(intersects, BooleanClause.Occur.MUST_NOT); bool.add(intersects, BooleanClause.Occur.MUST_NOT);
query = new ConstantScoreQuery(bool); query = new ConstantScoreQuery(bool.build());
} else { } else {
query = strategy.makeQuery(getArgs(shape, shapeRelation)); query = strategy.makeQuery(getArgs(shape, shapeRelation));
} }

View File

@ -25,7 +25,7 @@ import org.apache.lucene.search.Filter;
import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.QueryWrapperFilter; import org.apache.lucene.search.QueryWrapperFilter;
import org.apache.lucene.search.join.BitDocIdSetFilter; import org.apache.lucene.search.join.BitSetProducer;
import org.apache.lucene.search.join.JoinUtil; import org.apache.lucene.search.join.JoinUtil;
import org.apache.lucene.search.join.ScoreMode; import org.apache.lucene.search.join.ScoreMode;
import org.elasticsearch.Version; import org.elasticsearch.Version;
@ -167,7 +167,7 @@ public class HasChildQueryParser extends BaseQueryParserTemp {
throw new QueryParsingException(parseContext, "[has_child] 'max_children' is less than 'min_children'"); throw new QueryParsingException(parseContext, "[has_child] 'max_children' is less than 'min_children'");
} }
BitDocIdSetFilter nonNestedDocsFilter = null; BitSetProducer nonNestedDocsFilter = null;
if (parentDocMapper.hasNestedObjects()) { if (parentDocMapper.hasNestedObjects()) {
nonNestedDocsFilter = context.bitsetFilter(Queries.newNonNestedFilter()); nonNestedDocsFilter = context.bitsetFilter(Queries.newNonNestedFilter());
} }

View File

@ -180,14 +180,14 @@ public class HasParentQueryParser extends BaseQueryParserTemp {
parentFilter = documentMapper.typeFilter(); parentFilter = documentMapper.typeFilter();
} }
} else { } else {
BooleanQuery parentsFilter = new BooleanQuery(); BooleanQuery.Builder parentsFilter = new BooleanQuery.Builder();
for (String parentTypeStr : parentTypes) { for (String parentTypeStr : parentTypes) {
DocumentMapper documentMapper = context.mapperService().documentMapper(parentTypeStr); DocumentMapper documentMapper = context.mapperService().documentMapper(parentTypeStr);
if (documentMapper != null) { if (documentMapper != null) {
parentsFilter.add(documentMapper.typeFilter(), BooleanClause.Occur.SHOULD); parentsFilter.add(documentMapper.typeFilter(), BooleanClause.Occur.SHOULD);
} }
} }
parentFilter = parentsFilter; parentFilter = parentsFilter.build();
} }
if (parentFilter == null) { if (parentFilter == null) {

View File

@ -111,6 +111,14 @@ public class IndicesQueryBuilder extends AbstractQueryBuilder<IndicesQueryBuilde
return noMatchQuery.toQuery(context); return noMatchQuery.toQuery(context);
} }
@Override
protected void setFinalBoost(Query query) {
if (boost != DEFAULT_BOOST) {
//if both the wrapped query and the wrapper hold a boost, the main one coming from the wrapper wins
query.setBoost(boost);
}
}
@Override @Override
public QueryValidationException validate() { public QueryValidationException validate() {
QueryValidationException validationException = null; QueryValidationException validationException = null;

View File

@ -144,7 +144,7 @@ public class MissingQueryBuilder extends AbstractQueryBuilder<MissingQueryBuilde
Query nullFilter = null; Query nullFilter = null;
if (existence) { if (existence) {
BooleanQuery boolFilter = new BooleanQuery(); BooleanQuery.Builder boolFilter = new BooleanQuery.Builder();
for (String field : fields) { for (String field : fields) {
MappedFieldType fieldType = context.fieldMapper(field); MappedFieldType fieldType = context.fieldMapper(field);
Query filter = null; Query filter = null;
@ -167,7 +167,7 @@ public class MissingQueryBuilder extends AbstractQueryBuilder<MissingQueryBuilde
boolFilter.add(filter, BooleanClause.Occur.SHOULD); boolFilter.add(filter, BooleanClause.Occur.SHOULD);
} }
existenceFilter = boolFilter; existenceFilter = boolFilter.build();
existenceFilter = Queries.not(existenceFilter);; existenceFilter = Queries.not(existenceFilter);;
} }
@ -183,11 +183,10 @@ public class MissingQueryBuilder extends AbstractQueryBuilder<MissingQueryBuilde
Query filter; Query filter;
if (nullFilter != null) { if (nullFilter != null) {
if (existenceFilter != null) { if (existenceFilter != null) {
BooleanQuery combined = new BooleanQuery(); filter = new BooleanQuery.Builder()
combined.add(existenceFilter, BooleanClause.Occur.SHOULD); .add(existenceFilter, BooleanClause.Occur.SHOULD)
combined.add(nullFilter, BooleanClause.Occur.SHOULD); .add(nullFilter, BooleanClause.Occur.SHOULD)
// cache the not filter as well, so it will be faster .build();
filter = combined;
} else { } else {
filter = nullFilter; filter = nullFilter;
} }

View File

@ -291,14 +291,14 @@ public class MoreLikeThisQueryParser extends BaseQueryParserTemp {
} }
} }
BooleanQuery boolQuery = new BooleanQuery(); BooleanQuery.Builder boolQuery = new BooleanQuery.Builder();
boolQuery.add(mltQuery, BooleanClause.Occur.SHOULD); boolQuery.add(mltQuery, BooleanClause.Occur.SHOULD);
// exclude the items from the search // exclude the items from the search
if (!include) { if (!include) {
handleExclude(boolQuery, likeItems); handleExclude(boolQuery, likeItems);
} }
return boolQuery; return boolQuery.build();
} }
return mltQuery; return mltQuery;
@ -343,7 +343,7 @@ public class MoreLikeThisQueryParser extends BaseQueryParserTemp {
return moreLikeFields; return moreLikeFields;
} }
private void handleExclude(BooleanQuery boolQuery, MultiTermVectorsRequest likeItems) { private void handleExclude(BooleanQuery.Builder boolQuery, MultiTermVectorsRequest likeItems) {
// artificial docs get assigned a random id and should be disregarded // artificial docs get assigned a random id and should be disregarded
List<BytesRef> uids = new ArrayList<>(); List<BytesRef> uids = new ArrayList<>();
for (TermVectorsRequest item : likeItems) { for (TermVectorsRequest item : likeItems) {

View File

@ -83,20 +83,20 @@ public class OrQueryBuilder extends AbstractQueryBuilder<OrQueryBuilder> {
// no filters provided, this should be ignored upstream // no filters provided, this should be ignored upstream
return null; return null;
} }
BooleanQuery.Builder queryBuilder = new BooleanQuery.Builder();
BooleanQuery query = new BooleanQuery();
for (QueryBuilder f : filters) { for (QueryBuilder f : filters) {
Query innerQuery = f.toFilter(context); Query innerQuery = f.toFilter(context);
// ignore queries that are null // ignore queries that are null
if (innerQuery != null) { if (innerQuery != null) {
query.add(innerQuery, Occur.SHOULD); queryBuilder.add(innerQuery, Occur.SHOULD);
} }
} }
if (query.clauses().isEmpty()) { BooleanQuery booleanQuery = queryBuilder.build();
if (booleanQuery.clauses().isEmpty()) {
// no inner lucene query exists, ignore upstream // no inner lucene query exists, ignore upstream
return null; return null;
} }
return query; return booleanQuery;
} }
@Override @Override

View File

@ -26,7 +26,7 @@ import org.apache.lucene.queryparser.classic.MapperQueryParser;
import org.apache.lucene.queryparser.classic.QueryParserSettings; import org.apache.lucene.queryparser.classic.QueryParserSettings;
import org.apache.lucene.search.Filter; import org.apache.lucene.search.Filter;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.join.BitDocIdSetFilter; import org.apache.lucene.search.join.BitSetProducer;
import org.apache.lucene.search.similarities.Similarity; import org.apache.lucene.search.similarities.Similarity;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData;
@ -163,8 +163,8 @@ public class QueryShardContext {
return queryParser; return queryParser;
} }
public BitDocIdSetFilter bitsetFilter(Filter filter) { public BitSetProducer bitsetFilter(Filter filter) {
return indexQueryParser.bitsetFilterCache.getBitDocIdSetFilter(filter); return indexQueryParser.bitsetFilterCache.getBitSetProducer(filter);
} }
public <IFD extends IndexFieldData<?>> IFD getForField(MappedFieldType mapper) { public <IFD extends IndexFieldData<?>> IFD getForField(MappedFieldType mapper) {

View File

@ -57,7 +57,8 @@ public class SimpleQueryParser extends org.apache.lucene.queryparser.simple.Simp
@Override @Override
public Query newDefaultQuery(String text) { public Query newDefaultQuery(String text) {
BooleanQuery bq = new BooleanQuery(true); BooleanQuery.Builder bq = new BooleanQuery.Builder();
bq.setDisableCoord(true);
for (Map.Entry<String,Float> entry : weights.entrySet()) { for (Map.Entry<String,Float> entry : weights.entrySet()) {
try { try {
Query q = createBooleanQuery(entry.getKey(), text, super.getDefaultOperator()); Query q = createBooleanQuery(entry.getKey(), text, super.getDefaultOperator());
@ -69,7 +70,7 @@ public class SimpleQueryParser extends org.apache.lucene.queryparser.simple.Simp
rethrowUnlessLenient(e); rethrowUnlessLenient(e);
} }
} }
return super.simplify(bq); return super.simplify(bq.build());
} }
/** /**
@ -81,24 +82,24 @@ public class SimpleQueryParser extends org.apache.lucene.queryparser.simple.Simp
if (settings.lowercaseExpandedTerms()) { if (settings.lowercaseExpandedTerms()) {
text = text.toLowerCase(settings.locale()); text = text.toLowerCase(settings.locale());
} }
BooleanQuery bq = new BooleanQuery(true); BooleanQuery.Builder bq = new BooleanQuery.Builder();
bq.setDisableCoord(true);
for (Map.Entry<String,Float> entry : weights.entrySet()) { for (Map.Entry<String,Float> entry : weights.entrySet()) {
try { try {
Query q = new FuzzyQuery(new Term(entry.getKey(), text), fuzziness); Query q = new FuzzyQuery(new Term(entry.getKey(), text), fuzziness);
if (q != null) { q.setBoost(entry.getValue());
q.setBoost(entry.getValue()); bq.add(q, BooleanClause.Occur.SHOULD);
bq.add(q, BooleanClause.Occur.SHOULD);
}
} catch (RuntimeException e) { } catch (RuntimeException e) {
rethrowUnlessLenient(e); rethrowUnlessLenient(e);
} }
} }
return super.simplify(bq); return super.simplify(bq.build());
} }
@Override @Override
public Query newPhraseQuery(String text, int slop) { public Query newPhraseQuery(String text, int slop) {
BooleanQuery bq = new BooleanQuery(true); BooleanQuery.Builder bq = new BooleanQuery.Builder();
bq.setDisableCoord(true);
for (Map.Entry<String,Float> entry : weights.entrySet()) { for (Map.Entry<String,Float> entry : weights.entrySet()) {
try { try {
Query q = createPhraseQuery(entry.getKey(), text, slop); Query q = createPhraseQuery(entry.getKey(), text, slop);
@ -110,7 +111,7 @@ public class SimpleQueryParser extends org.apache.lucene.queryparser.simple.Simp
rethrowUnlessLenient(e); rethrowUnlessLenient(e);
} }
} }
return super.simplify(bq); return super.simplify(bq.build());
} }
/** /**
@ -122,7 +123,8 @@ public class SimpleQueryParser extends org.apache.lucene.queryparser.simple.Simp
if (settings.lowercaseExpandedTerms()) { if (settings.lowercaseExpandedTerms()) {
text = text.toLowerCase(settings.locale()); text = text.toLowerCase(settings.locale());
} }
BooleanQuery bq = new BooleanQuery(true); BooleanQuery.Builder bq = new BooleanQuery.Builder();
bq.setDisableCoord(true);
for (Map.Entry<String,Float> entry : weights.entrySet()) { for (Map.Entry<String,Float> entry : weights.entrySet()) {
try { try {
if (settings.analyzeWildcard()) { if (settings.analyzeWildcard()) {
@ -138,7 +140,7 @@ public class SimpleQueryParser extends org.apache.lucene.queryparser.simple.Simp
return rethrowUnlessLenient(e); return rethrowUnlessLenient(e);
} }
} }
return super.simplify(bq); return super.simplify(bq.build());
} }
/** /**
@ -183,7 +185,7 @@ public class SimpleQueryParser extends org.apache.lucene.queryparser.simple.Simp
} }
return new PrefixQuery(new Term(field, BytesRef.deepCopyOf(termAtt.getBytesRef()))); return new PrefixQuery(new Term(field, BytesRef.deepCopyOf(termAtt.getBytesRef())));
} else { } else {
BooleanQuery bq = new BooleanQuery(); BooleanQuery.Builder bq = new BooleanQuery.Builder();
for (int i = 0; i < numTokens; i++) { for (int i = 0; i < numTokens; i++) {
try { try {
boolean hasNext = buffer.incrementToken(); boolean hasNext = buffer.incrementToken();
@ -193,7 +195,7 @@ public class SimpleQueryParser extends org.apache.lucene.queryparser.simple.Simp
} }
bq.add(new BooleanClause(new PrefixQuery(new Term(field, BytesRef.deepCopyOf(termAtt.getBytesRef()))), BooleanClause.Occur.SHOULD)); bq.add(new BooleanClause(new PrefixQuery(new Term(field, BytesRef.deepCopyOf(termAtt.getBytesRef()))), BooleanClause.Occur.SHOULD));
} }
return bq; return bq.build();
} }
} catch (IOException e) { } catch (IOException e) {
// Bail on any exceptions, going with a regular prefix query // Bail on any exceptions, going with a regular prefix query

View File

@ -311,6 +311,11 @@ public class SimpleQueryStringBuilder extends AbstractQueryBuilder<SimpleQuerySt
return fieldName; return fieldName;
} }
@Override
protected void setFinalBoost(Query query) {
query.setBoost(boost * query.getBoost());
}
@Override @Override
protected void doXContent(XContentBuilder builder, Params params) throws IOException { protected void doXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(NAME); builder.startObject(NAME);

View File

@ -19,6 +19,7 @@
package org.elasticsearch.index.query; package org.elasticsearch.index.query;
import org.apache.lucene.search.BooleanQuery;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
@ -164,7 +165,6 @@ public class SimpleQueryStringParser extends BaseQueryParser<SimpleQueryStringBu
qb.boost(boost).fields(fieldsAndWeights).analyzer(analyzerName).queryName(queryName).minimumShouldMatch(minimumShouldMatch); qb.boost(boost).fields(fieldsAndWeights).analyzer(analyzerName).queryName(queryName).minimumShouldMatch(minimumShouldMatch);
qb.flags(flags).defaultOperator(defaultOperator).locale(locale).lowercaseExpandedTerms(lowercaseExpandedTerms); qb.flags(flags).defaultOperator(defaultOperator).locale(locale).lowercaseExpandedTerms(lowercaseExpandedTerms);
qb.lenient(lenient).analyzeWildcard(analyzeWildcard).boost(boost); qb.lenient(lenient).analyzeWildcard(analyzeWildcard).boost(boost);
return qb; return qb;
} }

View File

@ -82,6 +82,14 @@ public class SpanContainingQueryBuilder extends AbstractQueryBuilder<SpanContain
return new SpanContainingQuery((SpanQuery) innerBig, (SpanQuery) innerLittle); return new SpanContainingQuery((SpanQuery) innerBig, (SpanQuery) innerLittle);
} }
@Override
protected void setFinalBoost(Query query) {
if (boost != AbstractQueryBuilder.DEFAULT_BOOST) {
//preserve potential inner boost coming from lucene (default is big.boost)
query.setBoost(boost);
}
}
@Override @Override
public QueryValidationException validate() { public QueryValidationException validate() {
QueryValidationException validationException = null; QueryValidationException validationException = null;

View File

@ -87,6 +87,14 @@ public class SpanWithinQueryBuilder extends AbstractQueryBuilder<SpanWithinQuery
return new SpanWithinQuery((SpanQuery) innerBig, (SpanQuery) innerLittle); return new SpanWithinQuery((SpanQuery) innerBig, (SpanQuery) innerLittle);
} }
@Override
protected void setFinalBoost(Query query) {
if (boost != AbstractQueryBuilder.DEFAULT_BOOST) {
//preserve potential inner boost coming from lucene (default is little.boost)
query.setBoost(boost);
}
}
@Override @Override
public QueryValidationException validate() { public QueryValidationException validate() {
QueryValidationException validationException = null; QueryValidationException validationException = null;

View File

@ -356,7 +356,8 @@ public class TermsQueryBuilder extends AbstractQueryBuilder<TermsQueryBuilder> {
query = new TermsQuery(indexFieldName, filterValues); query = new TermsQuery(indexFieldName, filterValues);
} }
} else { } else {
BooleanQuery bq = new BooleanQuery(disableCoord); BooleanQuery.Builder bq = new BooleanQuery.Builder();
bq.setDisableCoord(disableCoord);
for (Object term : terms) { for (Object term : terms) {
if (fieldType != null) { if (fieldType != null) {
bq.add(fieldType.termQuery(term, context), BooleanClause.Occur.SHOULD); bq.add(fieldType.termQuery(term, context), BooleanClause.Occur.SHOULD);
@ -364,8 +365,7 @@ public class TermsQueryBuilder extends AbstractQueryBuilder<TermsQueryBuilder> {
bq.add(new TermQuery(new Term(indexFieldName, BytesRefs.toBytesRef(term))), BooleanClause.Occur.SHOULD); bq.add(new TermQuery(new Term(indexFieldName, BytesRefs.toBytesRef(term))), BooleanClause.Occur.SHOULD);
} }
} }
bq = Queries.applyMinimumShouldMatch(bq, minimumShouldMatch); query = Queries.applyMinimumShouldMatch(bq.build(), minimumShouldMatch);
query = bq;
} }
return query; return query;
} }

View File

@ -104,6 +104,14 @@ public class WrapperQueryBuilder extends AbstractQueryBuilder<WrapperQueryBuilde
} }
} }
@Override
protected void setFinalBoost(Query query) {
if (boost != DEFAULT_BOOST) {
//if both the wrapped query and the wrapper hold a boost, the main one coming from the wrapper takes precedence
query.setBoost(boost);
}
}
@Override @Override
public QueryValidationException validate() { public QueryValidationException validate() {
QueryValidationException validationException = null; QueryValidationException validationException = null;

View File

@ -158,10 +158,10 @@ public class FunctionScoreQueryParser implements QueryParser {
} else if (query == null && filter != null) { } else if (query == null && filter != null) {
query = new ConstantScoreQuery(filter); query = new ConstantScoreQuery(filter);
} else if (query != null && filter != null) { } else if (query != null && filter != null) {
final BooleanQuery filtered = new BooleanQuery(); final BooleanQuery.Builder filtered = new BooleanQuery.Builder();
filtered.add(query, Occur.MUST); filtered.add(query, Occur.MUST);
filtered.add(filter, Occur.FILTER); filtered.add(filter, Occur.FILTER);
query = filtered; query = filtered.build();
} }
// if all filter elements returned null, just use the query // if all filter elements returned null, just use the query
if (filterFunctions.isEmpty() && combineFunction == null) { if (filterFunctions.isEmpty() && combineFunction == null) {

View File

@ -21,7 +21,7 @@ package org.elasticsearch.index.query.support;
import org.apache.lucene.search.Filter; import org.apache.lucene.search.Filter;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.join.BitDocIdSetFilter; import org.apache.lucene.search.join.BitSetProducer;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
@ -55,7 +55,7 @@ public class NestedInnerQueryParseSupport {
protected boolean queryFound = false; protected boolean queryFound = false;
protected boolean filterFound = false; protected boolean filterFound = false;
protected BitDocIdSetFilter parentFilter; protected BitSetProducer parentFilter;
protected Filter childFilter; protected Filter childFilter;
protected ObjectMapper nestedObjectMapper; protected ObjectMapper nestedObjectMapper;

View File

@ -132,11 +132,11 @@ public class MultiMatchQuery extends MatchQuery {
} }
return disMaxQuery; return disMaxQuery;
} else { } else {
final BooleanQuery booleanQuery = new BooleanQuery(); final BooleanQuery.Builder booleanQuery = new BooleanQuery.Builder();
for (Query query : groupQuery) { for (Query query : groupQuery) {
booleanQuery.add(query, BooleanClause.Occur.SHOULD); booleanQuery.add(query, BooleanClause.Occur.SHOULD);
} }
return booleanQuery; return booleanQuery.build();
} }
} }

View File

@ -35,8 +35,7 @@ import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Weight; import org.apache.lucene.search.Weight;
import org.apache.lucene.search.XFilteredDocIdSetIterator; import org.apache.lucene.search.XFilteredDocIdSetIterator;
import org.apache.lucene.search.join.BitDocIdSetFilter; import org.apache.lucene.search.join.BitSetProducer;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.LongBitSet; import org.apache.lucene.util.LongBitSet;
import org.elasticsearch.common.lucene.IndexCacheableQuery; import org.elasticsearch.common.lucene.IndexCacheableQuery;
import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.Lucene;
@ -61,9 +60,9 @@ public class ChildrenConstantScoreQuery extends IndexCacheableQuery {
private final String childType; private final String childType;
private final Filter parentFilter; private final Filter parentFilter;
private final int shortCircuitParentDocSet; private final int shortCircuitParentDocSet;
private final BitDocIdSetFilter nonNestedDocsFilter; private final BitSetProducer nonNestedDocsFilter;
public ChildrenConstantScoreQuery(IndexParentChildFieldData parentChildIndexFieldData, Query childQuery, String parentType, String childType, Filter parentFilter, int shortCircuitParentDocSet, BitDocIdSetFilter nonNestedDocsFilter) { public ChildrenConstantScoreQuery(IndexParentChildFieldData parentChildIndexFieldData, Query childQuery, String parentType, String childType, Filter parentFilter, int shortCircuitParentDocSet, BitSetProducer nonNestedDocsFilter) {
this.parentChildIndexFieldData = parentChildIndexFieldData; this.parentChildIndexFieldData = parentChildIndexFieldData;
this.parentFilter = parentFilter; this.parentFilter = parentFilter;
this.parentType = parentType; this.parentType = parentType;
@ -92,7 +91,7 @@ public class ChildrenConstantScoreQuery extends IndexCacheableQuery {
final long valueCount; final long valueCount;
List<LeafReaderContext> leaves = searcher.getIndexReader().leaves(); List<LeafReaderContext> leaves = searcher.getIndexReader().leaves();
if (globalIfd == null || leaves.isEmpty()) { if (globalIfd == null || leaves.isEmpty()) {
return new BooleanQuery().createWeight(searcher, needsScores); return new BooleanQuery.Builder().build().createWeight(searcher, needsScores);
} else { } else {
AtomicParentChildFieldData afd = globalIfd.load(leaves.get(0)); AtomicParentChildFieldData afd = globalIfd.load(leaves.get(0));
SortedDocValues globalValues = afd.getOrdinalsValues(parentType); SortedDocValues globalValues = afd.getOrdinalsValues(parentType);
@ -100,7 +99,7 @@ public class ChildrenConstantScoreQuery extends IndexCacheableQuery {
} }
if (valueCount == 0) { if (valueCount == 0) {
return new BooleanQuery().createWeight(searcher, needsScores); return new BooleanQuery.Builder().build().createWeight(searcher, needsScores);
} }
ParentOrdCollector collector = new ParentOrdCollector(globalIfd, valueCount, parentType); ParentOrdCollector collector = new ParentOrdCollector(globalIfd, valueCount, parentType);
@ -108,7 +107,7 @@ public class ChildrenConstantScoreQuery extends IndexCacheableQuery {
final long remaining = collector.foundParents(); final long remaining = collector.foundParents();
if (remaining == 0) { if (remaining == 0) {
return new BooleanQuery().createWeight(searcher, needsScores); return new BooleanQuery.Builder().build().createWeight(searcher, needsScores);
} }
Filter shortCircuitFilter = null; Filter shortCircuitFilter = null;

View File

@ -34,8 +34,7 @@ import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Weight; import org.apache.lucene.search.Weight;
import org.apache.lucene.search.XFilteredDocIdSetIterator; import org.apache.lucene.search.XFilteredDocIdSetIterator;
import org.apache.lucene.search.join.BitDocIdSetFilter; import org.apache.lucene.search.join.BitSetProducer;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.ToStringUtils; import org.apache.lucene.util.ToStringUtils;
import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.lease.Releasables;
@ -74,9 +73,9 @@ public final class ChildrenQuery extends IndexCacheableQuery {
protected final int minChildren; protected final int minChildren;
protected final int maxChildren; protected final int maxChildren;
protected final int shortCircuitParentDocSet; protected final int shortCircuitParentDocSet;
protected final BitDocIdSetFilter nonNestedDocsFilter; protected final BitSetProducer nonNestedDocsFilter;
public ChildrenQuery(ParentChildIndexFieldData ifd, String parentType, String childType, Filter parentFilter, Query childQuery, ScoreType scoreType, int minChildren, int maxChildren, int shortCircuitParentDocSet, BitDocIdSetFilter nonNestedDocsFilter) { public ChildrenQuery(ParentChildIndexFieldData ifd, String parentType, String childType, Filter parentFilter, Query childQuery, ScoreType scoreType, int minChildren, int maxChildren, int shortCircuitParentDocSet, BitSetProducer nonNestedDocsFilter) {
this.ifd = ifd; this.ifd = ifd;
this.parentType = parentType; this.parentType = parentType;
this.childType = childType; this.childType = childType;
@ -150,7 +149,7 @@ public final class ChildrenQuery extends IndexCacheableQuery {
IndexParentChildFieldData globalIfd = ifd.loadGlobal(searcher.getIndexReader()); IndexParentChildFieldData globalIfd = ifd.loadGlobal(searcher.getIndexReader());
if (globalIfd == null) { if (globalIfd == null) {
// No docs of the specified type exist on this shard // No docs of the specified type exist on this shard
return new BooleanQuery().createWeight(searcher, needsScores); return new BooleanQuery.Builder().build().createWeight(searcher, needsScores);
} }
boolean abort = true; boolean abort = true;
@ -193,7 +192,7 @@ public final class ChildrenQuery extends IndexCacheableQuery {
searcher.search(childQuery, collector); searcher.search(childQuery, collector);
numFoundParents = collector.foundParents(); numFoundParents = collector.foundParents();
if (numFoundParents == 0) { if (numFoundParents == 0) {
return new BooleanQuery().createWeight(searcher, needsScores); return new BooleanQuery.Builder().build().createWeight(searcher, needsScores);
} }
abort = false; abort = false;
} finally { } finally {

View File

@ -81,7 +81,7 @@ public class ParentConstantScoreQuery extends IndexCacheableQuery {
final long maxOrd; final long maxOrd;
List<LeafReaderContext> leaves = searcher.getIndexReader().leaves(); List<LeafReaderContext> leaves = searcher.getIndexReader().leaves();
if (globalIfd == null || leaves.isEmpty()) { if (globalIfd == null || leaves.isEmpty()) {
return new BooleanQuery().createWeight(searcher, needsScores); return new BooleanQuery.Builder().build().createWeight(searcher, needsScores);
} else { } else {
AtomicParentChildFieldData afd = globalIfd.load(leaves.get(0)); AtomicParentChildFieldData afd = globalIfd.load(leaves.get(0));
SortedDocValues globalValues = afd.getOrdinalsValues(parentType); SortedDocValues globalValues = afd.getOrdinalsValues(parentType);
@ -89,14 +89,14 @@ public class ParentConstantScoreQuery extends IndexCacheableQuery {
} }
if (maxOrd == 0) { if (maxOrd == 0) {
return new BooleanQuery().createWeight(searcher, needsScores); return new BooleanQuery.Builder().build().createWeight(searcher, needsScores);
} }
ParentOrdsCollector collector = new ParentOrdsCollector(globalIfd, maxOrd, parentType); ParentOrdsCollector collector = new ParentOrdsCollector(globalIfd, maxOrd, parentType);
searcher.search(parentQuery, collector); searcher.search(parentQuery, collector);
if (collector.parentCount() == 0) { if (collector.parentCount() == 0) {
return new BooleanQuery().createWeight(searcher, needsScores); return new BooleanQuery.Builder().build().createWeight(searcher, needsScores);
} }
return new ChildrenWeight(this, childrenFilter, collector, globalIfd); return new ChildrenWeight(this, childrenFilter, collector, globalIfd);

View File

@ -21,17 +21,12 @@ package org.elasticsearch.index.search.child;
import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.PostingsEnum; import org.apache.lucene.index.PostingsEnum;
import org.apache.lucene.index.SortedDocValues; import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.Terms; import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum; import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.DocIdSet; import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Filter; import org.apache.lucene.search.Filter;
import org.apache.lucene.search.QueryWrapperFilter; import org.apache.lucene.search.join.BitSetProducer;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.join.BitDocIdSetFilter;
import org.apache.lucene.util.BitDocIdSet; import org.apache.lucene.util.BitDocIdSet;
import org.apache.lucene.util.BitSet; import org.apache.lucene.util.BitSet;
import org.apache.lucene.util.Bits; import org.apache.lucene.util.Bits;
@ -57,75 +52,51 @@ import java.io.IOException;
*/ */
final class ParentIdsFilter extends Filter { final class ParentIdsFilter extends Filter {
static Filter createShortCircuitFilter(BitDocIdSetFilter nonNestedDocsFilter, SearchContext searchContext, static Filter createShortCircuitFilter(BitSetProducer nonNestedDocsFilter, SearchContext searchContext,
String parentType, SortedDocValues globalValues, String parentType, SortedDocValues globalValues,
LongBitSet parentOrds, long numFoundParents) { LongBitSet parentOrds, long numFoundParents) {
if (numFoundParents == 1) { BytesRefHash parentIds= null;
BytesRef id = globalValues.lookupOrd((int) parentOrds.nextSetBit(0)); boolean constructed = false;
if (nonNestedDocsFilter != null) { try {
BooleanQuery bq = new BooleanQuery(); parentIds = new BytesRefHash(numFoundParents, searchContext.bigArrays());
bq.add(new TermQuery(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(parentType, id))), Occur.MUST); for (long parentOrd = parentOrds.nextSetBit(0); parentOrd != -1; parentOrd = parentOrds.nextSetBit(parentOrd + 1)) {
bq.add(nonNestedDocsFilter, Occur.MUST); parentIds.add(globalValues.lookupOrd((int) parentOrd));
return new QueryWrapperFilter(bq);
} else {
return new QueryWrapperFilter(new TermQuery(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(parentType, id))));
} }
} else { constructed = true;
BytesRefHash parentIds= null; } finally {
boolean constructed = false; if (!constructed) {
try { Releasables.close(parentIds);
parentIds = new BytesRefHash(numFoundParents, searchContext.bigArrays());
for (long parentOrd = parentOrds.nextSetBit(0); parentOrd != -1; parentOrd = parentOrds.nextSetBit(parentOrd + 1)) {
parentIds.add(globalValues.lookupOrd((int) parentOrd));
}
constructed = true;
} finally {
if (!constructed) {
Releasables.close(parentIds);
}
} }
searchContext.addReleasable(parentIds, SearchContext.Lifetime.COLLECTION);
return new ParentIdsFilter(parentType, nonNestedDocsFilter, parentIds);
} }
searchContext.addReleasable(parentIds, SearchContext.Lifetime.COLLECTION);
return new ParentIdsFilter(parentType, nonNestedDocsFilter, parentIds);
} }
static Filter createShortCircuitFilter(BitDocIdSetFilter nonNestedDocsFilter, SearchContext searchContext, static Filter createShortCircuitFilter(BitSetProducer nonNestedDocsFilter, SearchContext searchContext,
String parentType, SortedDocValues globalValues, String parentType, SortedDocValues globalValues,
LongHash parentIdxs, long numFoundParents) { LongHash parentIdxs, long numFoundParents) {
if (numFoundParents == 1) { BytesRefHash parentIds = null;
BytesRef id = globalValues.lookupOrd((int) parentIdxs.get(0)); boolean constructed = false;
if (nonNestedDocsFilter != null) { try {
BooleanQuery bq = new BooleanQuery(); parentIds = new BytesRefHash(numFoundParents, searchContext.bigArrays());
bq.add(new TermQuery(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(parentType, id))), Occur.MUST); for (int id = 0; id < parentIdxs.size(); id++) {
bq.add(nonNestedDocsFilter, Occur.MUST); parentIds.add(globalValues.lookupOrd((int) parentIdxs.get(id)));
return new QueryWrapperFilter(bq);
} else {
return new QueryWrapperFilter(new TermQuery(new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(parentType, id))));
} }
} else { constructed = true;
BytesRefHash parentIds = null; } finally {
boolean constructed = false; if (!constructed) {
try { Releasables.close(parentIds);
parentIds = new BytesRefHash(numFoundParents, searchContext.bigArrays());
for (int id = 0; id < parentIdxs.size(); id++) {
parentIds.add(globalValues.lookupOrd((int) parentIdxs.get(id)));
}
constructed = true;
} finally {
if (!constructed) {
Releasables.close(parentIds);
}
} }
searchContext.addReleasable(parentIds, SearchContext.Lifetime.COLLECTION);
return new ParentIdsFilter(parentType, nonNestedDocsFilter, parentIds);
} }
searchContext.addReleasable(parentIds, SearchContext.Lifetime.COLLECTION);
return new ParentIdsFilter(parentType, nonNestedDocsFilter, parentIds);
} }
private final BytesRef parentTypeBr; private final BytesRef parentTypeBr;
private final BitDocIdSetFilter nonNestedDocsFilter; private final BitSetProducer nonNestedDocsFilter;
private final BytesRefHash parentIds; private final BytesRefHash parentIds;
private ParentIdsFilter(String parentType, BitDocIdSetFilter nonNestedDocsFilter, BytesRefHash parentIds) { private ParentIdsFilter(String parentType, BitSetProducer nonNestedDocsFilter, BytesRefHash parentIds) {
this.nonNestedDocsFilter = nonNestedDocsFilter; this.nonNestedDocsFilter = nonNestedDocsFilter;
this.parentTypeBr = new BytesRef(parentType); this.parentTypeBr = new BytesRef(parentType);
this.parentIds = parentIds; this.parentIds = parentIds;
@ -148,7 +119,7 @@ final class ParentIdsFilter extends Filter {
BitSet nonNestedDocs = null; BitSet nonNestedDocs = null;
if (nonNestedDocsFilter != null) { if (nonNestedDocsFilter != null) {
nonNestedDocs = nonNestedDocsFilter.getDocIdSet(context).bits(); nonNestedDocs = nonNestedDocsFilter.getBitSet(context);
} }
PostingsEnum docsEnum = null; PostingsEnum docsEnum = null;

View File

@ -125,14 +125,14 @@ public class ParentQuery extends IndexCacheableQuery {
IndexParentChildFieldData globalIfd = parentChildIndexFieldData.loadGlobal(searcher.getIndexReader()); IndexParentChildFieldData globalIfd = parentChildIndexFieldData.loadGlobal(searcher.getIndexReader());
if (globalIfd == null) { if (globalIfd == null) {
// No docs of the specified type don't exist on this shard // No docs of the specified type don't exist on this shard
return new BooleanQuery().createWeight(searcher, needsScores); return new BooleanQuery.Builder().build().createWeight(searcher, needsScores);
} }
try { try {
collector = new ParentOrdAndScoreCollector(sc, globalIfd, parentType); collector = new ParentOrdAndScoreCollector(sc, globalIfd, parentType);
searcher.search(parentQuery, collector); searcher.search(parentQuery, collector);
if (collector.parentCount() == 0) { if (collector.parentCount() == 0) {
return new BooleanQuery().createWeight(searcher, needsScores); return new BooleanQuery.Builder().build().createWeight(searcher, needsScores);
} }
childWeight = new ChildWeight(this, parentQuery.createWeight(searcher, needsScores), childrenFilter, collector, globalIfd); childWeight = new ChildWeight(this, parentQuery.createWeight(searcher, needsScores), childrenFilter, collector, globalIfd);
releaseCollectorResource = false; releaseCollectorResource = false;

View File

@ -43,18 +43,18 @@ public class IndexedGeoBoundingBoxQuery {
} }
private static Query westGeoBoundingBoxFilter(GeoPoint topLeft, GeoPoint bottomRight, GeoPointFieldMapper.GeoPointFieldType fieldType) { private static Query westGeoBoundingBoxFilter(GeoPoint topLeft, GeoPoint bottomRight, GeoPointFieldMapper.GeoPointFieldType fieldType) {
BooleanQuery filter = new BooleanQuery(); BooleanQuery.Builder filter = new BooleanQuery.Builder();
filter.setMinimumNumberShouldMatch(1); filter.setMinimumNumberShouldMatch(1);
filter.add(fieldType.lonFieldType().rangeQuery(null, bottomRight.lon(), true, true), Occur.SHOULD); filter.add(fieldType.lonFieldType().rangeQuery(null, bottomRight.lon(), true, true), Occur.SHOULD);
filter.add(fieldType.lonFieldType().rangeQuery(topLeft.lon(), null, true, true), Occur.SHOULD); filter.add(fieldType.lonFieldType().rangeQuery(topLeft.lon(), null, true, true), Occur.SHOULD);
filter.add(fieldType.latFieldType().rangeQuery(bottomRight.lat(), topLeft.lat(), true, true), Occur.MUST); filter.add(fieldType.latFieldType().rangeQuery(bottomRight.lat(), topLeft.lat(), true, true), Occur.MUST);
return new ConstantScoreQuery(filter); return new ConstantScoreQuery(filter.build());
} }
private static Query eastGeoBoundingBoxFilter(GeoPoint topLeft, GeoPoint bottomRight, GeoPointFieldMapper.GeoPointFieldType fieldType) { private static Query eastGeoBoundingBoxFilter(GeoPoint topLeft, GeoPoint bottomRight, GeoPointFieldMapper.GeoPointFieldType fieldType) {
BooleanQuery filter = new BooleanQuery(); BooleanQuery.Builder filter = new BooleanQuery.Builder();
filter.add(fieldType.lonFieldType().rangeQuery(topLeft.lon(), bottomRight.lon(), true, true), Occur.MUST); filter.add(fieldType.lonFieldType().rangeQuery(topLeft.lon(), bottomRight.lon(), true, true), Occur.MUST);
filter.add(fieldType.latFieldType().rangeQuery(bottomRight.lat(), topLeft.lat(), true, true), Occur.MUST); filter.add(fieldType.latFieldType().rangeQuery(bottomRight.lat(), topLeft.lat(), true, true), Occur.MUST);
return new ConstantScoreQuery(filter); return new ConstantScoreQuery(filter.build());
} }
} }

View File

@ -19,15 +19,17 @@
package org.elasticsearch.index.search.nested; package org.elasticsearch.index.search.nested;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.Term; import org.apache.lucene.index.Term;
import org.apache.lucene.search.*; import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.join.BitDocIdSetFilter; import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Weight;
import org.apache.lucene.search.join.BitSetProducer;
import org.apache.lucene.util.BitSet; import org.apache.lucene.util.BitSet;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BitDocIdSet;
import org.apache.lucene.util.BytesRef;
import java.io.IOException; import java.io.IOException;
import java.util.Collection; import java.util.Collection;
@ -41,7 +43,7 @@ import java.util.Set;
*/ */
public class IncludeNestedDocsQuery extends Query { public class IncludeNestedDocsQuery extends Query {
private final BitDocIdSetFilter parentFilter; private final BitSetProducer parentFilter;
private final Query parentQuery; private final Query parentQuery;
// If we are rewritten, this is the original childQuery we // If we are rewritten, this is the original childQuery we
@ -52,7 +54,7 @@ public class IncludeNestedDocsQuery extends Query {
private final Query origParentQuery; private final Query origParentQuery;
public IncludeNestedDocsQuery(Query parentQuery, BitDocIdSetFilter parentFilter) { public IncludeNestedDocsQuery(Query parentQuery, BitSetProducer parentFilter) {
this.origParentQuery = parentQuery; this.origParentQuery = parentQuery;
this.parentQuery = parentQuery; this.parentQuery = parentQuery;
this.parentFilter = parentFilter; this.parentFilter = parentFilter;
@ -82,9 +84,9 @@ public class IncludeNestedDocsQuery extends Query {
private final Query parentQuery; private final Query parentQuery;
private final Weight parentWeight; private final Weight parentWeight;
private final BitDocIdSetFilter parentsFilter; private final BitSetProducer parentsFilter;
IncludeNestedDocsWeight(Query query, Query parentQuery, Weight parentWeight, BitDocIdSetFilter parentsFilter) { IncludeNestedDocsWeight(Query query, Query parentQuery, Weight parentWeight, BitSetProducer parentsFilter) {
super(query); super(query);
this.parentQuery = parentQuery; this.parentQuery = parentQuery;
this.parentWeight = parentWeight; this.parentWeight = parentWeight;
@ -115,7 +117,7 @@ public class IncludeNestedDocsQuery extends Query {
return null; return null;
} }
BitDocIdSet parents = parentsFilter.getDocIdSet(context); BitSet parents = parentsFilter.getBitSet(context);
if (parents == null) { if (parents == null) {
// No matches // No matches
return null; return null;
@ -144,10 +146,10 @@ public class IncludeNestedDocsQuery extends Query {
int currentParentPointer = -1; int currentParentPointer = -1;
int currentDoc = -1; int currentDoc = -1;
IncludeNestedDocsScorer(Weight weight, Scorer parentScorer, BitDocIdSet parentBits, int currentParentPointer) { IncludeNestedDocsScorer(Weight weight, Scorer parentScorer, BitSet parentBits, int currentParentPointer) {
super(weight); super(weight);
this.parentScorer = parentScorer; this.parentScorer = parentScorer;
this.parentBits = parentBits.bits(); this.parentBits = parentBits;
this.currentParentPointer = currentParentPointer; this.currentParentPointer = currentParentPointer;
if (currentParentPointer == 0) { if (currentParentPointer == 0) {
currentChildPointer = 0; currentChildPointer = 0;

View File

@ -254,42 +254,7 @@ public class IndexShard extends AbstractIndexShardComponent {
if (indexSettings.getAsBoolean(IndexCacheModule.QUERY_CACHE_EVERYTHING, false)) { if (indexSettings.getAsBoolean(IndexCacheModule.QUERY_CACHE_EVERYTHING, false)) {
cachingPolicy = QueryCachingPolicy.ALWAYS_CACHE; cachingPolicy = QueryCachingPolicy.ALWAYS_CACHE;
} else { } else {
assert Version.CURRENT.luceneVersion == org.apache.lucene.util.Version.LUCENE_5_3_0; cachingPolicy = new UsageTrackingQueryCachingPolicy();
// TODO: remove this hack in Lucene 5.4, use UsageTrackingQueryCachingPolicy directly
// See https://issues.apache.org/jira/browse/LUCENE-6748
// cachingPolicy = new UsageTrackingQueryCachingPolicy();
final QueryCachingPolicy wrapped = new UsageTrackingQueryCachingPolicy();
cachingPolicy = new QueryCachingPolicy() {
@Override
public boolean shouldCache(Query query, LeafReaderContext context) throws IOException {
if (query instanceof MatchAllDocsQuery
// MatchNoDocsQuery currently rewrites to a BooleanQuery,
// but who knows, it might get its own Weight one day
|| query instanceof MatchNoDocsQuery) {
return false;
}
if (query instanceof BooleanQuery) {
BooleanQuery bq = (BooleanQuery) query;
if (bq.clauses().isEmpty()) {
return false;
}
}
if (query instanceof DisjunctionMaxQuery) {
DisjunctionMaxQuery dmq = (DisjunctionMaxQuery) query;
if (dmq.getDisjuncts().isEmpty()) {
return false;
}
}
return wrapped.shouldCache(query, context);
}
@Override
public void onUse(Query query) {
wrapped.onUse(query);
}
};
} }
this.engineConfig = newEngineConfig(translogConfig, cachingPolicy); this.engineConfig = newEngineConfig(translogConfig, cachingPolicy);
this.indexShardOperationCounter = new IndexShardOperationCounter(logger, shardId); this.indexShardOperationCounter = new IndexShardOperationCounter(logger, shardId);

View File

@ -19,7 +19,7 @@
package org.elasticsearch.index.shard; package org.elasticsearch.index.shard;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.join.BitDocIdSetFilter; import org.apache.lucene.search.join.BitSetProducer;
import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
@ -34,7 +34,12 @@ import org.elasticsearch.index.aliases.IndexAliasesService;
import org.elasticsearch.index.cache.IndexCache; import org.elasticsearch.index.cache.IndexCache;
import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.engine.IgnoreOnRecoveryEngineException; import org.elasticsearch.index.engine.IgnoreOnRecoveryEngineException;
import org.elasticsearch.index.mapper.*; import org.elasticsearch.index.mapper.DocumentMapperForType;
import org.elasticsearch.index.mapper.MapperException;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.MapperUtils;
import org.elasticsearch.index.mapper.Mapping;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.query.IndexQueryParserService; import org.elasticsearch.index.query.IndexQueryParserService;
import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.index.query.QueryParsingException; import org.elasticsearch.index.query.QueryParsingException;
@ -222,7 +227,7 @@ public class TranslogRecoveryPerformer {
} }
Query aliasFilter = indexAliasesService.aliasFilter(filteringAliases); Query aliasFilter = indexAliasesService.aliasFilter(filteringAliases);
BitDocIdSetFilter parentFilter = mapperService.hasNested() ? indexCache.bitsetFilterCache().getBitDocIdSetFilter(Queries.newNonNestedFilter()) : null; BitSetProducer parentFilter = mapperService.hasNested() ? indexCache.bitsetFilterCache().getBitSetProducer(Queries.newNonNestedFilter()) : null;
return new Engine.DeleteByQuery(query, source, filteringAliases, aliasFilter, parentFilter, origin, startTime, types); return new Engine.DeleteByQuery(query, source, filteringAliases, aliasFilter, parentFilter, origin, startTime, types);
} }

View File

@ -26,6 +26,7 @@ import org.apache.lucene.analysis.cjk.CJKBigramFilter;
import org.apache.lucene.analysis.cjk.CJKWidthFilter; import org.apache.lucene.analysis.cjk.CJKWidthFilter;
import org.apache.lucene.analysis.ckb.SoraniNormalizationFilter; import org.apache.lucene.analysis.ckb.SoraniNormalizationFilter;
import org.apache.lucene.analysis.commongrams.CommonGramsFilter; import org.apache.lucene.analysis.commongrams.CommonGramsFilter;
import org.apache.lucene.analysis.core.DecimalDigitFilter;
import org.apache.lucene.analysis.core.LowerCaseFilter; import org.apache.lucene.analysis.core.LowerCaseFilter;
import org.apache.lucene.analysis.core.Lucene43StopFilter; import org.apache.lucene.analysis.core.Lucene43StopFilter;
import org.apache.lucene.analysis.core.StopAnalyzer; import org.apache.lucene.analysis.core.StopAnalyzer;
@ -396,6 +397,13 @@ public enum PreBuiltTokenFilters {
} }
}, },
DECIMAL_DIGIT(CachingStrategy.ONE) {
@Override
public TokenStream create(TokenStream tokenStream, Version version) {
return new DecimalDigitFilter(tokenStream);
}
},
CJK_BIGRAM(CachingStrategy.ONE) { CJK_BIGRAM(CachingStrategy.ONE) {
@Override @Override
public TokenStream create(TokenStream tokenStream, Version version) { public TokenStream create(TokenStream tokenStream, Version version) {

View File

@ -805,10 +805,10 @@ public class PercolatorService extends AbstractComponent {
final Query filter; final Query filter;
if (context.aliasFilter() != null) { if (context.aliasFilter() != null) {
BooleanQuery booleanFilter = new BooleanQuery(); BooleanQuery.Builder booleanFilter = new BooleanQuery.Builder();
booleanFilter.add(context.aliasFilter(), BooleanClause.Occur.MUST); booleanFilter.add(context.aliasFilter(), BooleanClause.Occur.MUST);
booleanFilter.add(percolatorTypeFilter, BooleanClause.Occur.MUST); booleanFilter.add(percolatorTypeFilter, BooleanClause.Occur.MUST);
filter = booleanFilter; filter = booleanFilter.build();
} else { } else {
filter = percolatorTypeFilter; filter = percolatorTypeFilter;
} }

View File

@ -19,7 +19,6 @@
package org.elasticsearch.search; package org.elasticsearch.search;
import org.elasticsearch.common.Classes;
import org.elasticsearch.common.inject.AbstractModule; import org.elasticsearch.common.inject.AbstractModule;
import org.elasticsearch.common.inject.multibindings.Multibinder; import org.elasticsearch.common.inject.multibindings.Multibinder;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
@ -110,6 +109,8 @@ import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.max.MaxBucke
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.max.MaxBucketPipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.max.MaxBucketPipelineAggregator;
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.min.MinBucketParser; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.min.MinBucketParser;
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.min.MinBucketPipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.min.MinBucketPipelineAggregator;
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile.PercentilesBucketParser;
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile.PercentilesBucketPipelineAggregator;
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.sum.SumBucketParser; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.sum.SumBucketParser;
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.sum.SumBucketPipelineAggregator; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.sum.SumBucketPipelineAggregator;
import org.elasticsearch.search.aggregations.pipeline.bucketscript.BucketScriptParser; import org.elasticsearch.search.aggregations.pipeline.bucketscript.BucketScriptParser;
@ -143,8 +144,6 @@ import org.elasticsearch.search.highlight.HighlightPhase;
import org.elasticsearch.search.highlight.Highlighter; import org.elasticsearch.search.highlight.Highlighter;
import org.elasticsearch.search.highlight.Highlighters; import org.elasticsearch.search.highlight.Highlighters;
import org.elasticsearch.search.query.QueryPhase; import org.elasticsearch.search.query.QueryPhase;
import org.elasticsearch.search.suggest.SuggestParseElement;
import org.elasticsearch.search.suggest.SuggestPhase;
import org.elasticsearch.search.suggest.Suggester; import org.elasticsearch.search.suggest.Suggester;
import org.elasticsearch.search.suggest.Suggesters; import org.elasticsearch.search.suggest.Suggesters;
@ -301,6 +300,7 @@ public class SearchModule extends AbstractModule {
multibinderPipelineAggParser.addBinding().to(MinBucketParser.class); multibinderPipelineAggParser.addBinding().to(MinBucketParser.class);
multibinderPipelineAggParser.addBinding().to(AvgBucketParser.class); multibinderPipelineAggParser.addBinding().to(AvgBucketParser.class);
multibinderPipelineAggParser.addBinding().to(SumBucketParser.class); multibinderPipelineAggParser.addBinding().to(SumBucketParser.class);
multibinderPipelineAggParser.addBinding().to(PercentilesBucketParser.class);
multibinderPipelineAggParser.addBinding().to(MovAvgParser.class); multibinderPipelineAggParser.addBinding().to(MovAvgParser.class);
multibinderPipelineAggParser.addBinding().to(CumulativeSumParser.class); multibinderPipelineAggParser.addBinding().to(CumulativeSumParser.class);
multibinderPipelineAggParser.addBinding().to(BucketScriptParser.class); multibinderPipelineAggParser.addBinding().to(BucketScriptParser.class);
@ -393,6 +393,7 @@ public class SearchModule extends AbstractModule {
MinBucketPipelineAggregator.registerStreams(); MinBucketPipelineAggregator.registerStreams();
AvgBucketPipelineAggregator.registerStreams(); AvgBucketPipelineAggregator.registerStreams();
SumBucketPipelineAggregator.registerStreams(); SumBucketPipelineAggregator.registerStreams();
PercentilesBucketPipelineAggregator.registerStreams();
MovAvgPipelineAggregator.registerStreams(); MovAvgPipelineAggregator.registerStreams();
CumulativeSumPipelineAggregator.registerStreams(); CumulativeSumPipelineAggregator.registerStreams();
BucketScriptPipelineAggregator.registerStreams(); BucketScriptPipelineAggregator.registerStreams();

View File

@ -119,9 +119,10 @@ public class AggregationPhase implements SearchPhase {
Query query = Queries.newMatchAllQuery(); Query query = Queries.newMatchAllQuery();
Query searchFilter = context.searchFilter(context.types()); Query searchFilter = context.searchFilter(context.types());
if (searchFilter != null) { if (searchFilter != null) {
BooleanQuery filtered = new BooleanQuery(); BooleanQuery filtered = new BooleanQuery.Builder()
filtered.add(query, Occur.MUST); .add(query, Occur.MUST)
filtered.add(searchFilter, Occur.FILTER); .add(searchFilter, Occur.FILTER)
.build();
query = filtered; query = filtered;
} }
try { try {

View File

@ -18,16 +18,16 @@
*/ */
package org.elasticsearch.search.aggregations.bucket.nested; package org.elasticsearch.search.aggregations.bucket.nested;
import org.apache.lucene.index.IndexReaderContext;
import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.DocIdSet; import org.apache.lucene.index.ReaderUtil;
import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Filter; import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.join.BitDocIdSetFilter; import org.apache.lucene.search.Query;
import org.apache.lucene.util.BitDocIdSet; import org.apache.lucene.search.Weight;
import org.apache.lucene.search.join.BitSetProducer;
import org.apache.lucene.util.BitSet; import org.apache.lucene.util.BitSet;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.object.ObjectMapper; import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.search.aggregations.AggregationExecutionException; import org.elasticsearch.search.aggregations.AggregationExecutionException;
import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.Aggregator;
@ -50,8 +50,8 @@ import java.util.Map;
*/ */
public class NestedAggregator extends SingleBucketAggregator { public class NestedAggregator extends SingleBucketAggregator {
private BitDocIdSetFilter parentFilter; private BitSetProducer parentFilter;
private final Filter childFilter; private final Query childFilter;
private DocIdSetIterator childDocs; private DocIdSetIterator childDocs;
private BitSet parentDocs; private BitSet parentDocs;
@ -65,13 +65,11 @@ public class NestedAggregator extends SingleBucketAggregator {
public LeafBucketCollector getLeafCollector(final LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException { public LeafBucketCollector getLeafCollector(final LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException {
// Reset parentFilter, so we resolve the parentDocs for each new segment being searched // Reset parentFilter, so we resolve the parentDocs for each new segment being searched
this.parentFilter = null; this.parentFilter = null;
// In ES if parent is deleted, then also the children are deleted. Therefore acceptedDocs can also null here. final IndexReaderContext topLevelContext = ReaderUtil.getTopLevelContext(ctx);
DocIdSet childDocIdSet = childFilter.getDocIdSet(ctx, null); final IndexSearcher searcher = new IndexSearcher(topLevelContext);
if (Lucene.isEmpty(childDocIdSet)) { searcher.setQueryCache(null);
childDocs = null; final Weight weight = searcher.createNormalizedWeight(childFilter, false);
} else { childDocs = weight.scorer(ctx);
childDocs = childDocIdSet.iterator();
}
return new LeafBucketCollectorBase(sub, null) { return new LeafBucketCollectorBase(sub, null) {
@Override @Override
@ -91,18 +89,16 @@ public class NestedAggregator extends SingleBucketAggregator {
// Additional NOTE: Before this logic was performed in the setNextReader(...) method, but the the assumption // Additional NOTE: Before this logic was performed in the setNextReader(...) method, but the the assumption
// that aggs instances are constructed in reverse doesn't hold when buckets are constructed lazily during // that aggs instances are constructed in reverse doesn't hold when buckets are constructed lazily during
// aggs execution // aggs execution
Filter parentFilterNotCached = findClosestNestedPath(parent()); Query parentFilterNotCached = findClosestNestedPath(parent());
if (parentFilterNotCached == null) { if (parentFilterNotCached == null) {
parentFilterNotCached = Queries.newNonNestedFilter(); parentFilterNotCached = Queries.newNonNestedFilter();
} }
parentFilter = context.searchContext().bitsetFilterCache().getBitDocIdSetFilter(parentFilterNotCached); parentFilter = context.searchContext().bitsetFilterCache().getBitSetProducer(parentFilterNotCached);
BitDocIdSet parentSet = parentFilter.getDocIdSet(ctx); parentDocs = parentFilter.getBitSet(ctx);
if (Lucene.isEmpty(parentSet)) { if (parentDocs == null) {
// There are no parentDocs in the segment, so return and set childDocs to null, so we exit early for future invocations. // There are no parentDocs in the segment, so return and set childDocs to null, so we exit early for future invocations.
childDocs = null; childDocs = null;
return; return;
} else {
parentDocs = parentSet.bits();
} }
} }
@ -130,7 +126,7 @@ public class NestedAggregator extends SingleBucketAggregator {
return new InternalNested(name, 0, buildEmptySubAggregations(), pipelineAggregators(), metaData()); return new InternalNested(name, 0, buildEmptySubAggregations(), pipelineAggregators(), metaData());
} }
private static Filter findClosestNestedPath(Aggregator parent) { private static Query findClosestNestedPath(Aggregator parent) {
for (; parent != null; parent = parent.parent()) { for (; parent != null; parent = parent.parent()) {
if (parent instanceof NestedAggregator) { if (parent instanceof NestedAggregator) {
return ((NestedAggregator) parent).childFilter; return ((NestedAggregator) parent).childFilter;

View File

@ -22,13 +22,10 @@ import com.carrotsearch.hppc.LongIntHashMap;
import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Filter; import org.apache.lucene.search.Query;
import org.apache.lucene.search.join.BitDocIdSetFilter; import org.apache.lucene.search.join.BitSetProducer;
import org.apache.lucene.util.BitDocIdSet;
import org.apache.lucene.util.BitSet; import org.apache.lucene.util.BitSet;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.object.ObjectMapper; import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.search.SearchParseException; import org.elasticsearch.search.SearchParseException;
import org.elasticsearch.search.aggregations.AggregationExecutionException; import org.elasticsearch.search.aggregations.AggregationExecutionException;
@ -52,30 +49,28 @@ import java.util.Map;
*/ */
public class ReverseNestedAggregator extends SingleBucketAggregator { public class ReverseNestedAggregator extends SingleBucketAggregator {
private final BitDocIdSetFilter parentFilter; private final Query parentFilter;
private final BitSetProducer parentBitsetProducer;
public ReverseNestedAggregator(String name, AggregatorFactories factories, ObjectMapper objectMapper, public ReverseNestedAggregator(String name, AggregatorFactories factories, ObjectMapper objectMapper,
AggregationContext aggregationContext, Aggregator parent, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) AggregationContext aggregationContext, Aggregator parent, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
throws IOException { throws IOException {
super(name, factories, aggregationContext, parent, pipelineAggregators, metaData); super(name, factories, aggregationContext, parent, pipelineAggregators, metaData);
if (objectMapper == null) { if (objectMapper == null) {
parentFilter = context.searchContext().bitsetFilterCache().getBitDocIdSetFilter(Queries.newNonNestedFilter()); parentFilter = Queries.newNonNestedFilter();
} else { } else {
parentFilter = context.searchContext().bitsetFilterCache().getBitDocIdSetFilter(objectMapper.nestedTypeFilter()); parentFilter = objectMapper.nestedTypeFilter();
} }
parentBitsetProducer = context.searchContext().bitsetFilterCache().getBitSetProducer(parentFilter);
} }
@Override @Override
protected LeafBucketCollector getLeafCollector(LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException { protected LeafBucketCollector getLeafCollector(LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException {
// In ES if parent is deleted, then also the children are deleted, so the child docs this agg receives // In ES if parent is deleted, then also the children are deleted, so the child docs this agg receives
// must belong to parent docs that is alive. For this reason acceptedDocs can be null here. // must belong to parent docs that is alive. For this reason acceptedDocs can be null here.
BitDocIdSet docIdSet = parentFilter.getDocIdSet(ctx); final BitSet parentDocs = parentBitsetProducer.getBitSet(ctx);
final BitSet parentDocs; if (parentDocs == null) {
if (Lucene.isEmpty(docIdSet)) {
return LeafBucketCollector.NO_OP_COLLECTOR; return LeafBucketCollector.NO_OP_COLLECTOR;
} else {
parentDocs = docIdSet.bits();
} }
final LongIntHashMap bucketOrdToLastCollectedParentDoc = new LongIntHashMap(32); final LongIntHashMap bucketOrdToLastCollectedParentDoc = new LongIntHashMap(32);
return new LeafBucketCollectorBase(sub, null) { return new LeafBucketCollectorBase(sub, null) {
@ -120,7 +115,7 @@ public class ReverseNestedAggregator extends SingleBucketAggregator {
return new InternalReverseNested(name, 0, buildEmptySubAggregations(), pipelineAggregators(), metaData()); return new InternalReverseNested(name, 0, buildEmptySubAggregations(), pipelineAggregators(), metaData());
} }
Filter getParentFilter() { Query getParentFilter() {
return parentFilter; return parentFilter;
} }

View File

@ -22,6 +22,7 @@ package org.elasticsearch.search.aggregations.pipeline;
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.avg.AvgBucketBuilder; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.avg.AvgBucketBuilder;
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.max.MaxBucketBuilder; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.max.MaxBucketBuilder;
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.min.MinBucketBuilder; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.min.MinBucketBuilder;
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile.PercentilesBucketBuilder;
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.sum.SumBucketBuilder; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.sum.SumBucketBuilder;
import org.elasticsearch.search.aggregations.pipeline.cumulativesum.CumulativeSumBuilder; import org.elasticsearch.search.aggregations.pipeline.cumulativesum.CumulativeSumBuilder;
import org.elasticsearch.search.aggregations.pipeline.bucketscript.BucketScriptBuilder; import org.elasticsearch.search.aggregations.pipeline.bucketscript.BucketScriptBuilder;
@ -55,6 +56,10 @@ public final class PipelineAggregatorBuilders {
return new SumBucketBuilder(name); return new SumBucketBuilder(name);
} }
public static final PercentilesBucketBuilder percentilesBucket(String name) {
return new PercentilesBucketBuilder(name);
}
public static final MovAvgBuilder movingAvg(String name) { public static final MovAvgBuilder movingAvg(String name) {
return new MovAvgBuilder(name); return new MovAvgBuilder(name);
} }

View File

@ -61,7 +61,7 @@ public abstract class BucketMetricsBuilder<B extends BucketMetricsBuilder<B>> ex
return builder; return builder;
} }
protected void doInternalXContent(XContentBuilder builder, Params params) { protected void doInternalXContent(XContentBuilder builder, Params params) throws IOException {
} }
} }

View File

@ -31,8 +31,11 @@ import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException; import java.io.IOException;
import java.text.ParseException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map;
/** /**
* A parser for parsing requests for a {@link BucketMetricsPipelineAggregator} * A parser for parsing requests for a {@link BucketMetricsPipelineAggregator}
@ -52,12 +55,11 @@ public abstract class BucketMetricsParser implements PipelineAggregator.Parser {
String[] bucketsPaths = null; String[] bucketsPaths = null;
String format = null; String format = null;
GapPolicy gapPolicy = GapPolicy.SKIP; GapPolicy gapPolicy = GapPolicy.SKIP;
Map<String, Object> leftover = new HashMap<>(5);
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) { if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName(); currentFieldName = parser.currentName();
} else if (doParse(pipelineAggregatorName, currentFieldName, token, parser, context)) {
// Do nothing as subclass has stored the state for this token
} else if (token == XContentParser.Token.VALUE_STRING) { } else if (token == XContentParser.Token.VALUE_STRING) {
if (context.parseFieldMatcher().match(currentFieldName, FORMAT)) { if (context.parseFieldMatcher().match(currentFieldName, FORMAT)) {
format = parser.text(); format = parser.text();
@ -66,8 +68,7 @@ public abstract class BucketMetricsParser implements PipelineAggregator.Parser {
} else if (context.parseFieldMatcher().match(currentFieldName, GAP_POLICY)) { } else if (context.parseFieldMatcher().match(currentFieldName, GAP_POLICY)) {
gapPolicy = GapPolicy.parse(context, parser.text(), parser.getTokenLocation()); gapPolicy = GapPolicy.parse(context, parser.text(), parser.getTokenLocation());
} else { } else {
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + pipelineAggregatorName + "]: [" leftover.put(currentFieldName, parser.text());
+ currentFieldName + "].", parser.getTokenLocation());
} }
} else if (token == XContentParser.Token.START_ARRAY) { } else if (token == XContentParser.Token.START_ARRAY) {
if (context.parseFieldMatcher().match(currentFieldName, BUCKETS_PATH)) { if (context.parseFieldMatcher().match(currentFieldName, BUCKETS_PATH)) {
@ -78,18 +79,16 @@ public abstract class BucketMetricsParser implements PipelineAggregator.Parser {
} }
bucketsPaths = paths.toArray(new String[paths.size()]); bucketsPaths = paths.toArray(new String[paths.size()]);
} else { } else {
throw new SearchParseException(context, "Unknown key for a " + token + " in [" + pipelineAggregatorName + "]: [" leftover.put(currentFieldName, parser.list());
+ currentFieldName + "].", parser.getTokenLocation());
} }
} else { } else {
throw new SearchParseException(context, "Unexpected token " + token + " in [" + pipelineAggregatorName + "].", leftover.put(currentFieldName, parser.objectText());
parser.getTokenLocation());
} }
} }
if (bucketsPaths == null) { if (bucketsPaths == null) {
throw new SearchParseException(context, "Missing required field [" + BUCKETS_PATH.getPreferredName() throw new SearchParseException(context, "Missing required field [" + BUCKETS_PATH.getPreferredName()
+ "] for derivative aggregation [" + pipelineAggregatorName + "]", parser.getTokenLocation()); + "] for aggregation [" + pipelineAggregatorName + "]", parser.getTokenLocation());
} }
ValueFormatter formatter = null; ValueFormatter formatter = null;
@ -99,14 +98,23 @@ public abstract class BucketMetricsParser implements PipelineAggregator.Parser {
formatter = ValueFormatter.RAW; formatter = ValueFormatter.RAW;
} }
return buildFactory(pipelineAggregatorName, bucketsPaths, gapPolicy, formatter); PipelineAggregatorFactory factory = null;
try {
factory = buildFactory(pipelineAggregatorName, bucketsPaths, gapPolicy, formatter, leftover);
} catch (ParseException exception) {
throw new SearchParseException(context, "Could not parse settings for aggregation ["
+ pipelineAggregatorName + "].", null, exception);
}
if (leftover.size() > 0) {
throw new SearchParseException(context, "Unexpected tokens " + leftover.keySet() + " in [" + pipelineAggregatorName + "].", null);
}
assert(factory != null);
return factory;
} }
protected abstract PipelineAggregatorFactory buildFactory(String pipelineAggregatorName, String[] bucketsPaths, GapPolicy gapPolicy, protected abstract PipelineAggregatorFactory buildFactory(String pipelineAggregatorName, String[] bucketsPaths, GapPolicy gapPolicy,
ValueFormatter formatter); ValueFormatter formatter, Map<String, Object> unparsedParams) throws ParseException;
protected boolean doParse(String pipelineAggregatorName, String currentFieldName, Token token, XContentParser parser, SearchContext context) {
return false;
}
} }

View File

@ -24,6 +24,8 @@ import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorFactory;
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsParser; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsParser;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter; import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import java.util.Map;
public class AvgBucketParser extends BucketMetricsParser { public class AvgBucketParser extends BucketMetricsParser {
@Override @Override
public String type() { public String type() {
@ -32,7 +34,7 @@ public class AvgBucketParser extends BucketMetricsParser {
@Override @Override
protected PipelineAggregatorFactory buildFactory(String pipelineAggregatorName, String[] bucketsPaths, GapPolicy gapPolicy, protected PipelineAggregatorFactory buildFactory(String pipelineAggregatorName, String[] bucketsPaths, GapPolicy gapPolicy,
ValueFormatter formatter) { ValueFormatter formatter, Map<String, Object> unparsedParams) {
return new AvgBucketPipelineAggregator.Factory(pipelineAggregatorName, bucketsPaths, gapPolicy, formatter); return new AvgBucketPipelineAggregator.Factory(pipelineAggregatorName, bucketsPaths, gapPolicy, formatter);
} }
} }

View File

@ -24,6 +24,8 @@ import org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsParser; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsParser;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter; import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import java.util.Map;
public class MaxBucketParser extends BucketMetricsParser { public class MaxBucketParser extends BucketMetricsParser {
@Override @Override
@ -32,7 +34,8 @@ public class MaxBucketParser extends BucketMetricsParser {
} }
@Override @Override
protected PipelineAggregatorFactory buildFactory(String pipelineAggregatorName, String[] bucketsPaths, GapPolicy gapPolicy, ValueFormatter formatter) { protected PipelineAggregatorFactory buildFactory(String pipelineAggregatorName, String[] bucketsPaths, GapPolicy gapPolicy,
ValueFormatter formatter, Map<String, Object> unparsedParams) {
return new MaxBucketPipelineAggregator.Factory(pipelineAggregatorName, bucketsPaths, gapPolicy, formatter); return new MaxBucketPipelineAggregator.Factory(pipelineAggregatorName, bucketsPaths, gapPolicy, formatter);
} }

View File

@ -24,6 +24,8 @@ import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorFactory;
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsParser; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsParser;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter; import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import java.util.Map;
public class MinBucketParser extends BucketMetricsParser { public class MinBucketParser extends BucketMetricsParser {
@Override @Override
@ -32,7 +34,7 @@ public class MinBucketParser extends BucketMetricsParser {
} }
protected PipelineAggregatorFactory buildFactory(String pipelineAggregatorName, String[] bucketsPaths, GapPolicy gapPolicy, protected PipelineAggregatorFactory buildFactory(String pipelineAggregatorName, String[] bucketsPaths, GapPolicy gapPolicy,
ValueFormatter formatter) { ValueFormatter formatter, Map<String, Object> unparsedParams) {
return new MinBucketPipelineAggregator.Factory(pipelineAggregatorName, bucketsPaths, gapPolicy, formatter); return new MinBucketPipelineAggregator.Factory(pipelineAggregatorName, bucketsPaths, gapPolicy, formatter);
}; };

View File

@ -0,0 +1,163 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile;
import com.google.common.collect.UnmodifiableIterator;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.aggregations.AggregationStreams;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.metrics.InternalNumericMetricsAggregation;
import org.elasticsearch.search.aggregations.metrics.max.InternalMax;
import org.elasticsearch.search.aggregations.metrics.percentiles.InternalPercentile;
import org.elasticsearch.search.aggregations.metrics.percentiles.Percentile;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import org.elasticsearch.search.aggregations.support.format.ValueFormatterStreams;
import java.io.IOException;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
public class InternalPercentilesBucket extends InternalNumericMetricsAggregation.MultiValue implements PercentilesBucket {
public final static Type TYPE = new Type("percentiles_bucket");
public final static AggregationStreams.Stream STREAM = new AggregationStreams.Stream() {
@Override
public InternalPercentilesBucket readResult(StreamInput in) throws IOException {
InternalPercentilesBucket result = new InternalPercentilesBucket();
result.readFrom(in);
return result;
}
};
public static void registerStreams() {
AggregationStreams.registerStream(STREAM, TYPE.stream());
}
private double[] percentiles;
private double[] percents;
protected InternalPercentilesBucket() {
} // for serialization
public InternalPercentilesBucket(String name, double[] percents, double[] percentiles,
ValueFormatter formatter, List<PipelineAggregator> pipelineAggregators,
Map<String, Object> metaData) {
super(name, pipelineAggregators, metaData);
this.valueFormatter = formatter;
this.percentiles = percentiles;
this.percents = percents;
}
@Override
public double percentile(double percent) throws IllegalArgumentException {
int index = Arrays.binarySearch(percents, percent);
if (index < 0) {
throw new IllegalArgumentException("Percent requested [" + String.valueOf(percent) + "] was not" +
" one of the computed percentiles. Available keys are: " + Arrays.toString(percents));
}
return percentiles[index];
}
@Override
public String percentileAsString(double percent) {
return valueFormatter.format(percentile(percent));
}
@Override
public Iterator<Percentile> iterator() {
return new Iter(percents, percentiles);
}
@Override
public double value(String name) {
return percentile(Double.parseDouble(name));
}
@Override
public Type type() {
return TYPE;
}
@Override
public InternalMax doReduce(List<InternalAggregation> aggregations, ReduceContext reduceContext) {
throw new UnsupportedOperationException("Not supported");
}
@Override
protected void doReadFrom(StreamInput in) throws IOException {
valueFormatter = ValueFormatterStreams.readOptional(in);
percentiles = in.readDoubleArray();
percents = in.readDoubleArray();
}
@Override
protected void doWriteTo(StreamOutput out) throws IOException {
ValueFormatterStreams.writeOptional(valueFormatter, out);
out.writeDoubleArray(percentiles);
out.writeDoubleArray(percents);
}
@Override
public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
builder.startObject("values");
for (double percent : percents) {
double value = percentile(percent);
boolean hasValue = !(Double.isInfinite(value) || Double.isNaN(value));
String key = String.valueOf(percent);
builder.field(key, hasValue ? value : null);
if (hasValue && !(valueFormatter instanceof ValueFormatter.Raw)) {
builder.field(key + "_as_string", percentileAsString(percent));
}
}
builder.endObject();
return builder;
}
public static class Iter extends UnmodifiableIterator<Percentile> {
private final double[] percents;
private final double[] percentiles;
private int i;
public Iter(double[] percents, double[] percentiles) {
this.percents = percents;
this.percentiles = percentiles;
i = 0;
}
@Override
public boolean hasNext() {
return i < percents.length;
}
@Override
public Percentile next() {
final Percentile next = new InternalPercentile(percents[i], percentiles[i]);
++i;
return next;
}
}
}

View File

@ -0,0 +1,25 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile;
import org.elasticsearch.search.aggregations.metrics.percentiles.Percentiles;
public interface PercentilesBucket extends Percentiles {
}

View File

@ -0,0 +1,49 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsBuilder;
import java.io.IOException;
public class PercentilesBucketBuilder extends BucketMetricsBuilder<PercentilesBucketBuilder> {
Double[] percents;
public PercentilesBucketBuilder(String name) {
super(name, PercentilesBucketPipelineAggregator.TYPE.name());
}
public PercentilesBucketBuilder percents(Double[] percents) {
this.percents = percents;
return this;
}
@Override
protected void doInternalXContent(XContentBuilder builder, Params params) throws IOException {
if (percents != null) {
builder.field(PercentilesBucketParser.PERCENTS.getPreferredName(), percents);
}
}
}

View File

@ -0,0 +1,72 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorFactory;
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsParser;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import java.text.ParseException;
import java.util.List;
import java.util.Map;
import static org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
public class PercentilesBucketParser extends BucketMetricsParser {
public static final ParseField PERCENTS = new ParseField("percents");
@Override
public String type() {
return PercentilesBucketPipelineAggregator.TYPE.name();
}
@Override
protected PipelineAggregatorFactory buildFactory(String pipelineAggregatorName, String[] bucketsPaths, GapPolicy gapPolicy,
ValueFormatter formatter, Map<String, Object> unparsedParams) throws ParseException {
double[] percents = new double[] { 1.0, 5.0, 25.0, 50.0, 75.0, 95.0, 99.0 };
int counter = 0;
Object percentParam = unparsedParams.get(PERCENTS.getPreferredName());
if (percentParam != null) {
if (percentParam instanceof List) {
percents = new double[((List) percentParam).size()];
for (Object p : (List) percentParam) {
if (p instanceof Double) {
percents[counter] = (Double) p;
counter += 1;
} else {
throw new ParseException("Parameter [" + PERCENTS.getPreferredName() + "] must be an array of doubles, type `"
+ percentParam.getClass().getSimpleName() + "` provided instead", 0);
}
}
unparsedParams.remove(PERCENTS.getPreferredName());
} else {
throw new ParseException("Parameter [" + PERCENTS.getPreferredName() + "] must be an array of doubles, type `"
+ percentParam.getClass().getSimpleName() + "` provided instead", 0);
}
}
return new PercentilesBucketPipelineAggregator.Factory(pipelineAggregatorName, bucketsPaths, gapPolicy, formatter, percents);
}
}

View File

@ -0,0 +1,155 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.pipeline.bucketmetrics.percentile;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.InternalAggregation;
import org.elasticsearch.search.aggregations.InternalAggregation.Type;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorFactory;
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorStreams;
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsPipelineAggregator;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import java.io.IOException;
import java.util.*;
import static org.elasticsearch.search.aggregations.pipeline.BucketHelpers.GapPolicy;
public class PercentilesBucketPipelineAggregator extends BucketMetricsPipelineAggregator {
public final static Type TYPE = new Type("percentiles_bucket");
public final static PipelineAggregatorStreams.Stream STREAM = new PipelineAggregatorStreams.Stream() {
@Override
public PercentilesBucketPipelineAggregator readResult(StreamInput in) throws IOException {
PercentilesBucketPipelineAggregator result = new PercentilesBucketPipelineAggregator();
result.readFrom(in);
return result;
}
};
public static void registerStreams() {
PipelineAggregatorStreams.registerStream(STREAM, TYPE.stream());
InternalPercentilesBucket.registerStreams();
}
private double[] percents;
private List<Double> data;
private PercentilesBucketPipelineAggregator() {
}
protected PercentilesBucketPipelineAggregator(String name, double[] percents, String[] bucketsPaths, GapPolicy gapPolicy,
ValueFormatter formatter, Map<String, Object> metaData) {
super(name, bucketsPaths, gapPolicy, formatter, metaData);
this.percents = percents;
}
@Override
public Type type() {
return TYPE;
}
@Override
protected void preCollection() {
data = new ArrayList<>(1024);
}
@Override
protected void collectBucketValue(String bucketKey, Double bucketValue) {
data.add(bucketValue);
}
@Override
protected InternalAggregation buildAggregation(List<PipelineAggregator> pipelineAggregators, Map<String, Object> metadata) {
// Perform the sorting and percentile collection now that all the data
// has been collected.
Collections.sort(data);
double[] percentiles = new double[percents.length];
if (data.size() == 0) {
for (int i = 0; i < percents.length; i++) {
percentiles[i] = Double.NaN;
}
} else {
for (int i = 0; i < percents.length; i++) {
int index = (int)((percents[i] / 100.0) * data.size());
percentiles[i] = data.get(index);
}
}
// todo need postCollection() to clean up temp sorted data?
return new InternalPercentilesBucket(name(), percents, percentiles, formatter, pipelineAggregators, metadata);
}
@Override
public void doReadFrom(StreamInput in) throws IOException {
super.doReadFrom(in);
percents = in.readDoubleArray();
}
@Override
public void doWriteTo(StreamOutput out) throws IOException {
super.doWriteTo(out);
out.writeDoubleArray(percents);
}
public static class Factory extends PipelineAggregatorFactory {
private final ValueFormatter formatter;
private final GapPolicy gapPolicy;
private final double[] percents;
public Factory(String name, String[] bucketsPaths, GapPolicy gapPolicy, ValueFormatter formatter, double[] percents) {
super(name, TYPE.name(), bucketsPaths);
this.gapPolicy = gapPolicy;
this.formatter = formatter;
this.percents = percents;
}
@Override
protected PipelineAggregator createInternal(Map<String, Object> metaData) throws IOException {
return new PercentilesBucketPipelineAggregator(name, percents, bucketsPaths, gapPolicy, formatter, metaData);
}
@Override
public void doValidate(AggregatorFactory parent, AggregatorFactory[] aggFactories,
List<PipelineAggregatorFactory> pipelineAggregatorFactories) {
if (bucketsPaths.length != 1) {
throw new IllegalStateException(PipelineAggregator.Parser.BUCKETS_PATH.getPreferredName()
+ " must contain a single entry for aggregation [" + name + "]");
}
for (Double p : percents) {
if (p == null || p < 0.0 || p > 100.0) {
throw new IllegalStateException(PercentilesBucketParser.PERCENTS.getPreferredName()
+ " must only contain non-null doubles from 0.0-100.0 inclusive");
}
}
}
}
}

View File

@ -24,6 +24,8 @@ import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorFactory;
import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsParser; import org.elasticsearch.search.aggregations.pipeline.bucketmetrics.BucketMetricsParser;
import org.elasticsearch.search.aggregations.support.format.ValueFormatter; import org.elasticsearch.search.aggregations.support.format.ValueFormatter;
import java.util.Map;
public class SumBucketParser extends BucketMetricsParser { public class SumBucketParser extends BucketMetricsParser {
@Override @Override
public String type() { public String type() {
@ -32,7 +34,7 @@ public class SumBucketParser extends BucketMetricsParser {
@Override @Override
protected PipelineAggregatorFactory buildFactory(String pipelineAggregatorName, String[] bucketsPaths, GapPolicy gapPolicy, protected PipelineAggregatorFactory buildFactory(String pipelineAggregatorName, String[] bucketsPaths, GapPolicy gapPolicy,
ValueFormatter formatter) { ValueFormatter formatter, Map<String, Object> unparsedParams) {
return new SumBucketPipelineAggregator.Factory(pipelineAggregatorName, bucketsPaths, gapPolicy, formatter); return new SumBucketPipelineAggregator.Factory(pipelineAggregatorName, bucketsPaths, gapPolicy, formatter);
} }
} }

View File

@ -25,7 +25,6 @@ import org.apache.lucene.index.ReaderUtil;
import org.apache.lucene.search.DocIdSet; import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Filter; import org.apache.lucene.search.Filter;
import org.apache.lucene.util.BitDocIdSet;
import org.apache.lucene.util.BitSet; import org.apache.lucene.util.BitSet;
import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
@ -192,8 +191,7 @@ public class FetchPhase implements SearchPhase {
private int findRootDocumentIfNested(SearchContext context, LeafReaderContext subReaderContext, int subDocId) throws IOException { private int findRootDocumentIfNested(SearchContext context, LeafReaderContext subReaderContext, int subDocId) throws IOException {
if (context.mapperService().hasNested()) { if (context.mapperService().hasNested()) {
BitDocIdSet nonNested = context.bitsetFilterCache().getBitDocIdSetFilter(Queries.newNonNestedFilter()).getDocIdSet(subReaderContext); BitSet bits = context.bitsetFilterCache().getBitSetProducer(Queries.newNonNestedFilter()).getBitSet(subReaderContext);
BitSet bits = nonNested.bits();
if (!bits.get(subDocId)) { if (!bits.get(subDocId)) {
return bits.nextSetBit(subDocId); return bits.nextSetBit(subDocId);
} }
@ -384,8 +382,7 @@ public class FetchPhase implements SearchPhase {
continue; continue;
} }
BitDocIdSet parentBitSet = context.bitsetFilterCache().getBitDocIdSetFilter(parentFilter).getDocIdSet(subReaderContext); BitSet parentBits = context.bitsetFilterCache().getBitSetProducer(parentFilter).getBitSet(subReaderContext);
BitSet parentBits = parentBitSet.bits();
int offset = 0; int offset = 0;
int nextParent = parentBits.nextSetBit(currentParent); int nextParent = parentBits.nextSetBit(currentParent);

View File

@ -24,9 +24,8 @@ import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.Term; import org.apache.lucene.index.Term;
import org.apache.lucene.search.BooleanClause.Occur; import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.*; import org.apache.lucene.search.*;
import org.apache.lucene.search.join.BitDocIdSetFilter; import org.apache.lucene.search.join.BitSetProducer;
import org.apache.lucene.util.BitSet; import org.apache.lucene.util.BitSet;
import org.apache.lucene.util.Bits;
import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.lucene.search.Queries;
@ -117,7 +116,7 @@ public final class InnerHitsContext {
} else { } else {
rawParentFilter = parentObjectMapper.nestedTypeFilter(); rawParentFilter = parentObjectMapper.nestedTypeFilter();
} }
BitDocIdSetFilter parentFilter = context.bitsetFilterCache().getBitDocIdSetFilter(rawParentFilter); BitSetProducer parentFilter = context.bitsetFilterCache().getBitSetProducer(rawParentFilter);
Filter childFilter = childObjectMapper.nestedTypeFilter(); Filter childFilter = childObjectMapper.nestedTypeFilter();
Query q = Queries.filtered(query.query(), new NestedChildrenQuery(parentFilter, childFilter, hitContext)); Query q = Queries.filtered(query.query(), new NestedChildrenQuery(parentFilter, childFilter, hitContext));
@ -147,12 +146,12 @@ public final class InnerHitsContext {
// A filter that only emits the nested children docs of a specific nested parent doc // A filter that only emits the nested children docs of a specific nested parent doc
static class NestedChildrenQuery extends Query { static class NestedChildrenQuery extends Query {
private final BitDocIdSetFilter parentFilter; private final BitSetProducer parentFilter;
private final Filter childFilter; private final Filter childFilter;
private final int docId; private final int docId;
private final LeafReader leafReader; private final LeafReader leafReader;
NestedChildrenQuery(BitDocIdSetFilter parentFilter, Filter childFilter, FetchSubPhase.HitContext hitContext) { NestedChildrenQuery(BitSetProducer parentFilter, Filter childFilter, FetchSubPhase.HitContext hitContext) {
this.parentFilter = parentFilter; this.parentFilter = parentFilter;
this.childFilter = childFilter; this.childFilter = childFilter;
this.docId = hitContext.docId(); this.docId = hitContext.docId();
@ -202,7 +201,7 @@ public final class InnerHitsContext {
return null; return null;
} }
final BitSet parents = parentFilter.getDocIdSet(context).bits(); final BitSet parents = parentFilter.getBitSet(context);
final int firstChildDocId = parents.prevSetBit(docId - 1) + 1; final int firstChildDocId = parents.prevSetBit(docId - 1) + 1;
// A parent doc doesn't have child docs, so we can early exit here: // A parent doc doesn't have child docs, so we can early exit here:
if (firstChildDocId == docId) { if (firstChildDocId == docId) {
@ -293,12 +292,13 @@ public final class InnerHitsContext {
return Lucene.EMPTY_TOP_DOCS; return Lucene.EMPTY_TOP_DOCS;
} }
BooleanQuery q = new BooleanQuery(); BooleanQuery q = new BooleanQuery.Builder()
q.add(query.query(), Occur.MUST); .add(query.query(), Occur.MUST)
// Only include docs that have the current hit as parent // Only include docs that have the current hit as parent
q.add(new TermQuery(new Term(field, term)), Occur.MUST); .add(new TermQuery(new Term(field, term)), Occur.MUST)
// Only include docs that have this inner hits type // Only include docs that have this inner hits type
q.add(documentMapper.typeFilter(), Occur.MUST); .add(documentMapper.typeFilter(), Occur.MUST)
.build();
if (size() == 0) { if (size() == 0) {
final int count = context.searcher().count(q); final int count = context.searcher().count(q);
return new TopDocs(count, Lucene.EMPTY_SCORE_DOCS, 0); return new TopDocs(count, Lucene.EMPTY_SCORE_DOCS, 0);

View File

@ -197,9 +197,10 @@ public class DefaultSearchContext extends SearchContext {
q.setBoost(query().getBoost()); q.setBoost(query().getBoost());
parsedQuery(new ParsedQuery(q, parsedQuery())); parsedQuery(new ParsedQuery(q, parsedQuery()));
} else { } else {
BooleanQuery filtered = new BooleanQuery(); BooleanQuery filtered = new BooleanQuery.Builder()
filtered.add(query(), Occur.MUST); .add(query(), Occur.MUST)
filtered.add(searchFilter, Occur.FILTER); .add(searchFilter, Occur.FILTER)
.build();
parsedQuery(new ParsedQuery(filtered, parsedQuery())); parsedQuery(new ParsedQuery(filtered, parsedQuery()));
} }
} }
@ -216,14 +217,14 @@ public class DefaultSearchContext extends SearchContext {
if (filter == null && aliasFilter == null) { if (filter == null && aliasFilter == null) {
return null; return null;
} }
BooleanQuery bq = new BooleanQuery(); BooleanQuery.Builder bq = new BooleanQuery.Builder();
if (filter != null) { if (filter != null) {
bq.add(filter, Occur.MUST); bq.add(filter, Occur.MUST);
} }
if (aliasFilter != null) { if (aliasFilter != null) {
bq.add(aliasFilter, Occur.MUST); bq.add(aliasFilter, Occur.MUST);
} }
return new ConstantScoreQuery(bq); return new ConstantScoreQuery(bq.build());
} }
@Override @Override

View File

@ -187,9 +187,10 @@ public class QueryPhase implements SearchPhase {
// now this gets interesting: since we sort in index-order, we can directly // now this gets interesting: since we sort in index-order, we can directly
// skip to the desired doc and stop collecting after ${size} matches // skip to the desired doc and stop collecting after ${size} matches
if (scrollContext.lastEmittedDoc != null) { if (scrollContext.lastEmittedDoc != null) {
BooleanQuery bq = new BooleanQuery(); BooleanQuery bq = new BooleanQuery.Builder()
bq.add(query, BooleanClause.Occur.MUST); .add(query, BooleanClause.Occur.MUST)
bq.add(new MinDocQuery(lastEmittedDoc.doc + 1), BooleanClause.Occur.FILTER); .add(new MinDocQuery(lastEmittedDoc.doc + 1), BooleanClause.Occur.FILTER)
.build();
query = bq; query = bq;
} }
searchContext.terminateAfter(numDocs); searchContext.terminateAfter(numDocs);

View File

@ -26,7 +26,7 @@ import org.apache.lucene.search.FieldComparator;
import org.apache.lucene.search.Filter; import org.apache.lucene.search.Filter;
import org.apache.lucene.search.QueryWrapperFilter; import org.apache.lucene.search.QueryWrapperFilter;
import org.apache.lucene.search.SortField; import org.apache.lucene.search.SortField;
import org.apache.lucene.search.join.BitDocIdSetFilter; import org.apache.lucene.search.join.BitSetProducer;
import org.apache.lucene.util.BitSet; import org.apache.lucene.util.BitSet;
import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.Version; import org.elasticsearch.Version;
@ -181,8 +181,7 @@ public class GeoDistanceSortParser implements SortParser {
final Nested nested; final Nested nested;
if (nestedHelper != null && nestedHelper.getPath() != null) { if (nestedHelper != null && nestedHelper.getPath() != null) {
BitSetProducer rootDocumentsFilter = context.bitsetFilterCache().getBitSetProducer(Queries.newNonNestedFilter());
BitDocIdSetFilter rootDocumentsFilter = context.bitsetFilterCache().getBitDocIdSetFilter(Queries.newNonNestedFilter());
Filter innerDocumentsFilter; Filter innerDocumentsFilter;
if (nestedHelper.filterFound()) { if (nestedHelper.filterFound()) {
// TODO: use queries instead // TODO: use queries instead
@ -213,7 +212,7 @@ public class GeoDistanceSortParser implements SortParser {
if (nested == null) { if (nested == null) {
selectedValues = finalSortMode.select(distanceValues, Double.MAX_VALUE); selectedValues = finalSortMode.select(distanceValues, Double.MAX_VALUE);
} else { } else {
final BitSet rootDocs = nested.rootDocs(context).bits(); final BitSet rootDocs = nested.rootDocs(context);
final DocIdSet innerDocs = nested.innerDocs(context); final DocIdSet innerDocs = nested.innerDocs(context);
selectedValues = finalSortMode.select(distanceValues, Double.MAX_VALUE, rootDocs, innerDocs, context.reader().maxDoc()); selectedValues = finalSortMode.select(distanceValues, Double.MAX_VALUE, rootDocs, innerDocs, context.reader().maxDoc());
} }

View File

@ -25,7 +25,7 @@ import org.apache.lucene.search.Filter;
import org.apache.lucene.search.QueryWrapperFilter; import org.apache.lucene.search.QueryWrapperFilter;
import org.apache.lucene.search.Scorer; import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.SortField; import org.apache.lucene.search.SortField;
import org.apache.lucene.search.join.BitDocIdSetFilter; import org.apache.lucene.search.join.BitSetProducer;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.BytesRefBuilder;
import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.lucene.search.Queries;
@ -145,7 +145,7 @@ public class ScriptSortParser implements SortParser {
// If nested_path is specified, then wrap the `fieldComparatorSource` in a `NestedFieldComparatorSource` // If nested_path is specified, then wrap the `fieldComparatorSource` in a `NestedFieldComparatorSource`
final Nested nested; final Nested nested;
if (nestedHelper != null && nestedHelper.getPath() != null) { if (nestedHelper != null && nestedHelper.getPath() != null) {
BitDocIdSetFilter rootDocumentsFilter = context.bitsetFilterCache().getBitDocIdSetFilter(Queries.newNonNestedFilter()); BitSetProducer rootDocumentsFilter = context.bitsetFilterCache().getBitSetProducer(Queries.newNonNestedFilter());
Filter innerDocumentsFilter; Filter innerDocumentsFilter;
if (nestedHelper.filterFound()) { if (nestedHelper.filterFound()) {
// TODO: use queries instead // TODO: use queries instead

View File

@ -20,11 +20,12 @@
package org.elasticsearch.search.sort; package org.elasticsearch.search.sort;
import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap;
import org.apache.lucene.search.Filter; import org.apache.lucene.search.Filter;
import org.apache.lucene.search.QueryWrapperFilter; import org.apache.lucene.search.QueryWrapperFilter;
import org.apache.lucene.search.Sort; import org.apache.lucene.search.Sort;
import org.apache.lucene.search.SortField; import org.apache.lucene.search.SortField;
import org.apache.lucene.search.join.BitDocIdSetFilter; import org.apache.lucene.search.join.BitSetProducer;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.lucene.search.Queries;
@ -251,7 +252,7 @@ public class SortParseElement implements SearchParseElement {
} }
final Nested nested; final Nested nested;
if (nestedHelper != null && nestedHelper.getPath() != null) { if (nestedHelper != null && nestedHelper.getPath() != null) {
BitDocIdSetFilter rootDocumentsFilter = context.bitsetFilterCache().getBitDocIdSetFilter(Queries.newNonNestedFilter()); BitSetProducer rootDocumentsFilter = context.bitsetFilterCache().getBitSetProducer(Queries.newNonNestedFilter());
Filter innerDocumentsFilter; Filter innerDocumentsFilter;
if (nestedHelper.filterFound()) { if (nestedHelper.filterFound()) {
// TODO: use queries instead // TODO: use queries instead

View File

@ -1,116 +0,0 @@
${packaging.scripts.header}
#
# This script is executed in the post-removal phase
#
# On Debian,
# $1=remove : indicates a removal
# $1=purge : indicates an upgrade
#
# On RedHat,
# $1=1 : indicates an new install
# $1=2 : indicates an upgrade
SOURCE_ENV_FILE=true
REMOVE_DIRS=false
REMOVE_SERVICE=false
REMOVE_USER_AND_GROUP=false
case "$1" in
# Debian ####################################################
remove)
REMOVE_DIRS=true
REMOVE_SERVICE=true
;;
purge)
REMOVE_USER_AND_GROUP=true
SOURCE_ENV_FILE=false
;;
failed-upgrade|abort-install|abort-upgrade|disappear|upgrade|disappear)
;;
# RedHat ####################################################
0)
REMOVE_DIRS=true
REMOVE_SERVICE=true
REMOVE_USER_AND_GROUP=true
;;
2)
# If $1=1 this is an upgrade
IS_UPGRADE=true
;;
*)
echo "post remove script called with unknown argument \`$1'" >&2
exit 1
;;
esac
# Sets the default values for elasticsearch variables used in this script
ES_USER="${packaging.elasticsearch.user}"
ES_GROUP="${packaging.elasticsearch.group}"
LOG_DIR="${packaging.elasticsearch.log.dir}"
PLUGINS_DIR="${packaging.elasticsearch.plugins.dir}"
PID_DIR="${packaging.elasticsearch.pid.dir}"
DATA_DIR="${packaging.elasticsearch.data.dir}"
# Source the default env file
if [ "$SOURCE_ENV_FILE" = "true" ]; then
ES_ENV_FILE="${packaging.env.file}"
if [ -f "$ES_ENV_FILE" ]; then
. "$ES_ENV_FILE"
fi
fi
if [ "$REMOVE_SERVICE" = "true" ]; then
if command -v systemctl >/dev/null; then
systemctl --no-reload disable elasticsearch.service > /dev/null 2>&1 || true
fi
if command -v chkconfig >/dev/null; then
chkconfig --del elasticsearch 2> /dev/null || true
fi
if command -v update-rc.d >/dev/null; then
update-rc.d elasticsearch remove >/dev/null || true
fi
fi
if [ "$REMOVE_DIRS" = "true" ]; then
if [ -d "$LOG_DIR" ]; then
echo -n "Deleting log directory..."
rm -rf "$LOG_DIR" && echo " OK" || echo " ERROR: unable to delete directory [$LOG_DIR]"
fi
if [ -d "$PLUGINS_DIR" ]; then
echo -n "Deleting plugins directory..."
rm -rf "$PLUGINS_DIR" && echo " OK" || echo " ERROR: unable to delete directory [$PLUGINS_DIR]"
fi
if [ -d "$PID_DIR" ]; then
echo -n "Deleting PID directory..."
rm -rf "$PID_DIR" && echo " OK" || echo " ERROR: unable to delete directory [$PID_DIR]"
fi
# Delete the data directory if and only if empty
if [ -d "$DATA_DIR" ]; then
rmdir --ignore-fail-on-non-empty "$DATA_DIR" && echo " OK" || echo " ERROR: unable to delete directory [$DATA_DIR]"
fi
fi
if [ "$REMOVE_USER_AND_GROUP" = "true" ]; then
if id "$ES_USER" > /dev/null 2>&1 ; then
userdel "$ES_USER"
fi
if getent group "$ES_GROUP" > /dev/null 2>&1 ; then
groupdel "$ES_GROUP"
fi
fi
${packaging.scripts.footer}

View File

@ -97,10 +97,11 @@ public class BlendedTermQueryTests extends ESTestCase {
assertEquals(Integer.toString(0), reader.document(scoreDocs[0].doc).getField("id").stringValue()); assertEquals(Integer.toString(0), reader.document(scoreDocs[0].doc).getField("id").stringValue());
} }
{ {
BooleanQuery query = new BooleanQuery(false); BooleanQuery.Builder query = new BooleanQuery.Builder();
query.setDisableCoord(true);
query.add(new TermQuery(new Term("firstname", "simon")), BooleanClause.Occur.SHOULD); query.add(new TermQuery(new Term("firstname", "simon")), BooleanClause.Occur.SHOULD);
query.add(new TermQuery(new Term("surname", "simon")), BooleanClause.Occur.SHOULD); query.add(new TermQuery(new Term("surname", "simon")), BooleanClause.Occur.SHOULD);
TopDocs search = searcher.search(query, 1); TopDocs search = searcher.search(query.build(), 1);
ScoreDoc[] scoreDocs = search.scoreDocs; ScoreDoc[] scoreDocs = search.scoreDocs;
assertEquals(Integer.toString(1), reader.document(scoreDocs[0].doc).getField("id").stringValue()); assertEquals(Integer.toString(1), reader.document(scoreDocs[0].doc).getField("id").stringValue());
@ -150,16 +151,18 @@ public class BlendedTermQueryTests extends ESTestCase {
IndexSearcher searcher = setSimilarity(newSearcher(reader)); IndexSearcher searcher = setSimilarity(newSearcher(reader));
{ {
String[] fields = new String[]{"username", "song"}; String[] fields = new String[]{"username", "song"};
BooleanQuery query = new BooleanQuery(false); BooleanQuery.Builder query = new BooleanQuery.Builder();
query.setDisableCoord(true);
query.add(BlendedTermQuery.dismaxBlendedQuery(toTerms(fields, "foo"), 0.1f), BooleanClause.Occur.SHOULD); query.add(BlendedTermQuery.dismaxBlendedQuery(toTerms(fields, "foo"), 0.1f), BooleanClause.Occur.SHOULD);
query.add(BlendedTermQuery.dismaxBlendedQuery(toTerms(fields, "fighters"), 0.1f), BooleanClause.Occur.SHOULD); query.add(BlendedTermQuery.dismaxBlendedQuery(toTerms(fields, "fighters"), 0.1f), BooleanClause.Occur.SHOULD);
query.add(BlendedTermQuery.dismaxBlendedQuery(toTerms(fields, "generator"), 0.1f), BooleanClause.Occur.SHOULD); query.add(BlendedTermQuery.dismaxBlendedQuery(toTerms(fields, "generator"), 0.1f), BooleanClause.Occur.SHOULD);
TopDocs search = searcher.search(query, 10); TopDocs search = searcher.search(query.build(), 10);
ScoreDoc[] scoreDocs = search.scoreDocs; ScoreDoc[] scoreDocs = search.scoreDocs;
assertEquals(Integer.toString(0), reader.document(scoreDocs[0].doc).getField("id").stringValue()); assertEquals(Integer.toString(0), reader.document(scoreDocs[0].doc).getField("id").stringValue());
} }
{ {
BooleanQuery query = new BooleanQuery(false); BooleanQuery.Builder query = new BooleanQuery.Builder();
query.setDisableCoord(true);
DisjunctionMaxQuery uname = new DisjunctionMaxQuery(0.0f); DisjunctionMaxQuery uname = new DisjunctionMaxQuery(0.0f);
uname.add(new TermQuery(new Term("username", "foo"))); uname.add(new TermQuery(new Term("username", "foo")));
uname.add(new TermQuery(new Term("song", "foo"))); uname.add(new TermQuery(new Term("song", "foo")));
@ -173,7 +176,7 @@ public class BlendedTermQueryTests extends ESTestCase {
query.add(uname, BooleanClause.Occur.SHOULD); query.add(uname, BooleanClause.Occur.SHOULD);
query.add(s, BooleanClause.Occur.SHOULD); query.add(s, BooleanClause.Occur.SHOULD);
query.add(gen, BooleanClause.Occur.SHOULD); query.add(gen, BooleanClause.Occur.SHOULD);
TopDocs search = searcher.search(query, 4); TopDocs search = searcher.search(query.build(), 4);
ScoreDoc[] scoreDocs = search.scoreDocs; ScoreDoc[] scoreDocs = search.scoreDocs;
assertEquals(Integer.toString(1), reader.document(scoreDocs[0].doc).getField("id").stringValue()); assertEquals(Integer.toString(1), reader.document(scoreDocs[0].doc).getField("id").stringValue());

View File

@ -23,11 +23,6 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import java.net.InetAddress; import java.net.InetAddress;
import java.net.InterfaceAddress;
import java.net.NetworkInterface;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/** /**
* Tests for network service... try to keep them safe depending upon configuration * Tests for network service... try to keep them safe depending upon configuration
@ -87,41 +82,6 @@ public class NetworkServiceTests extends ESTestCase {
} }
} }
/**
* ensure exception if we bind/publish to broadcast address
*/
public void testBindPublishBroadcast() throws Exception {
NetworkService service = new NetworkService(Settings.EMPTY);
// collect any broadcast addresses on the system
List<InetAddress> addresses = new ArrayList<>();
for (NetworkInterface nic : Collections.list(NetworkInterface.getNetworkInterfaces())) {
for (InterfaceAddress intf : nic.getInterfaceAddresses()) {
InetAddress address = intf.getBroadcast();
if (address != null) {
addresses.add(address);
}
}
}
// can easily happen (ipv6-only, localhost-only, ...)
assumeTrue("test requires broadcast addresses configured", addresses.size() > 0);
// make sure we fail on each one
for (InetAddress address : addresses) {
try {
service.resolveBindHostAddress(NetworkAddress.formatAddress(address));
fail("should have hit exception for broadcast address: " + address);
} catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("invalid: broadcast"));
}
try {
service.resolvePublishHostAddress(NetworkAddress.formatAddress(address));
fail("should have hit exception for broadcast address: " + address);
} catch (IllegalArgumentException e) {
assertTrue(e.getMessage().contains("invalid: broadcast"));
}
}
}
/** /**
* ensure specifying wildcard ipv4 address will bind to all interfaces * ensure specifying wildcard ipv4 address will bind to all interfaces
*/ */

View File

@ -87,6 +87,7 @@ public class AnalysisFactoryTests extends ESTestCase {
put("commongrams", CommonGramsTokenFilterFactory.class); put("commongrams", CommonGramsTokenFilterFactory.class);
put("commongramsquery", CommonGramsTokenFilterFactory.class); put("commongramsquery", CommonGramsTokenFilterFactory.class);
put("czechstem", CzechStemTokenFilterFactory.class); put("czechstem", CzechStemTokenFilterFactory.class);
put("decimaldigit", DecimalDigitFilterFactory.class);
put("delimitedpayload", DelimitedPayloadTokenFilterFactory.class); put("delimitedpayload", DelimitedPayloadTokenFilterFactory.class);
put("dictionarycompoundword", DictionaryCompoundWordTokenFilterFactory.class); put("dictionarycompoundword", DictionaryCompoundWordTokenFilterFactory.class);
put("edgengram", EdgeNGramTokenFilterFactory.class); put("edgengram", EdgeNGramTokenFilterFactory.class);
@ -176,6 +177,8 @@ public class AnalysisFactoryTests extends ESTestCase {
put("tokenoffsetpayload", Void.class); put("tokenoffsetpayload", Void.class);
// puts the type into the payload // puts the type into the payload
put("typeaspayload", Void.class); put("typeaspayload", Void.class);
// fingerprint
put("fingerprint", Void.class);
}}; }};
public void testTokenFilters() { public void testTokenFilters() {

View File

@ -27,26 +27,37 @@ import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.LogByteSizeMergePolicy; import org.apache.lucene.index.LogByteSizeMergePolicy;
import org.apache.lucene.index.Term; import org.apache.lucene.index.Term;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.QueryWrapperFilter; import org.apache.lucene.search.QueryWrapperFilter;
import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.join.BitSetProducer;
import org.apache.lucene.search.join.BitDocIdSetFilter;
import org.apache.lucene.store.RAMDirectory; import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.BitSet;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.Index; import org.elasticsearch.index.Index;
import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.ESTestCase;
import org.junit.Test; import org.junit.Test;
import java.io.IOException;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
/**
*/
public class BitSetFilterCacheTests extends ESTestCase { public class BitSetFilterCacheTests extends ESTestCase {
private static int matchCount(BitSetProducer producer, IndexReader reader) throws IOException {
int count = 0;
for (LeafReaderContext ctx : reader.leaves()) {
final BitSet bitSet = producer.getBitSet(ctx);
if (bitSet != null) {
count += bitSet.cardinality();
}
}
return count;
}
@Test @Test
public void testInvalidateEntries() throws Exception { public void testInvalidateEntries() throws Exception {
IndexWriter writer = new IndexWriter( IndexWriter writer = new IndexWriter(
@ -72,13 +83,11 @@ public class BitSetFilterCacheTests extends ESTestCase {
IndexSearcher searcher = new IndexSearcher(reader); IndexSearcher searcher = new IndexSearcher(reader);
BitsetFilterCache cache = new BitsetFilterCache(new Index("test"), Settings.EMPTY); BitsetFilterCache cache = new BitsetFilterCache(new Index("test"), Settings.EMPTY);
BitDocIdSetFilter filter = cache.getBitDocIdSetFilter(new QueryWrapperFilter(new TermQuery(new Term("field", "value")))); BitSetProducer filter = cache.getBitSetProducer(new QueryWrapperFilter(new TermQuery(new Term("field", "value"))));
TopDocs docs = searcher.search(new ConstantScoreQuery(filter), 1); assertThat(matchCount(filter, reader), equalTo(3));
assertThat(docs.totalHits, equalTo(3));
// now cached // now cached
docs = searcher.search(new ConstantScoreQuery(filter), 1); assertThat(matchCount(filter, reader), equalTo(3));
assertThat(docs.totalHits, equalTo(3));
// There are 3 segments // There are 3 segments
assertThat(cache.getLoadedFilters().size(), equalTo(3l)); assertThat(cache.getLoadedFilters().size(), equalTo(3l));
@ -87,12 +96,10 @@ public class BitSetFilterCacheTests extends ESTestCase {
reader = DirectoryReader.open(writer, false); reader = DirectoryReader.open(writer, false);
searcher = new IndexSearcher(reader); searcher = new IndexSearcher(reader);
docs = searcher.search(new ConstantScoreQuery(filter), 1); assertThat(matchCount(filter, reader), equalTo(3));
assertThat(docs.totalHits, equalTo(3));
// now cached // now cached
docs = searcher.search(new ConstantScoreQuery(filter), 1); assertThat(matchCount(filter, reader), equalTo(3));
assertThat(docs.totalHits, equalTo(3));
// Only one segment now, so the size must be 1 // Only one segment now, so the size must be 1
assertThat(cache.getLoadedFilters().size(), equalTo(1l)); assertThat(cache.getLoadedFilters().size(), equalTo(1l));

View File

@ -130,7 +130,7 @@ public abstract class AbstractFieldDataTestCase extends ESSingleNodeTestCase {
protected Nested createNested(Filter parentFilter, Filter childFilter) { protected Nested createNested(Filter parentFilter, Filter childFilter) {
BitsetFilterCache s = indexService.bitsetFilterCache(); BitsetFilterCache s = indexService.bitsetFilterCache();
return new Nested(s.getBitDocIdSetFilter(parentFilter), s.getBitDocIdSetFilter(childFilter)); return new Nested(s.getBitSetProducer(parentFilter), childFilter);
} }
public void testEmpty() throws Exception { public void testEmpty() throws Exception {

View File

@ -614,13 +614,13 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
public void testGeoPointMapperMerge() throws Exception { public void testGeoPointMapperMerge() throws Exception {
String stage1Mapping = XContentFactory.jsonBuilder().startObject().startObject("type") String stage1Mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true) .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
.field("ignore_malformed", true).endObject().endObject() .field("coerce", true).endObject().endObject()
.endObject().endObject().string(); .endObject().endObject().string();
DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser(); DocumentMapperParser parser = createIndex("test").mapperService().documentMapperParser();
DocumentMapper stage1 = parser.parse(stage1Mapping); DocumentMapper stage1 = parser.parse(stage1Mapping);
String stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type") String stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", false).field("geohash", true) .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", false).field("geohash", true)
.field("ignore_malformed", false).endObject().endObject() .field("coerce", false).endObject().endObject()
.endObject().endObject().string(); .endObject().endObject().string();
DocumentMapper stage2 = parser.parse(stage2Mapping); DocumentMapper stage2 = parser.parse(stage2Mapping);
@ -629,12 +629,12 @@ public class GeoPointFieldMapperTests extends ESSingleNodeTestCase {
assertThat(mergeResult.buildConflicts().length, equalTo(2)); assertThat(mergeResult.buildConflicts().length, equalTo(2));
// todo better way of checking conflict? // todo better way of checking conflict?
assertThat("mapper [point] has different [lat_lon]", isIn(new ArrayList<>(Arrays.asList(mergeResult.buildConflicts())))); assertThat("mapper [point] has different [lat_lon]", isIn(new ArrayList<>(Arrays.asList(mergeResult.buildConflicts()))));
assertThat("mapper [point] has different [ignore_malformed]", isIn(new ArrayList<>(Arrays.asList(mergeResult.buildConflicts())))); assertThat("mapper [point] has different [coerce]", isIn(new ArrayList<>(Arrays.asList(mergeResult.buildConflicts()))));
// correct mapping and ensure no failures // correct mapping and ensure no failures
stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type") stage2Mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true) .startObject("properties").startObject("point").field("type", "geo_point").field("lat_lon", true).field("geohash", true)
.field("ignore_malformed", true).endObject().endObject() .field("coerce", true).endObject().endObject()
.endObject().endObject().string(); .endObject().endObject().string();
stage2 = parser.parse(stage2Mapping); stage2 = parser.parse(stage2Mapping);
mergeResult = stage1.merge(stage2.mapping(), false, false); mergeResult = stage1.merge(stage2.mapping(), false, false);

View File

@ -44,19 +44,5 @@ public class GeoPointFieldTypeTests extends FieldTypeTestCase {
((GeoPointFieldMapper.GeoPointFieldType)ft).setLatLonEnabled(new DoubleFieldMapper.DoubleFieldType(), new DoubleFieldMapper.DoubleFieldType()); ((GeoPointFieldMapper.GeoPointFieldType)ft).setLatLonEnabled(new DoubleFieldMapper.DoubleFieldType(), new DoubleFieldMapper.DoubleFieldType());
} }
}); });
addModifier(new Modifier("ignore_malformed", false, true) {
@Override
public void modify(MappedFieldType ft) {
GeoPointFieldMapper.GeoPointFieldType gft = (GeoPointFieldMapper.GeoPointFieldType)ft;
gft.setIgnoreMalformed(!gft.ignoreMalformed());
}
});
addModifier(new Modifier("coerce", false, true) {
@Override
public void modify(MappedFieldType ft) {
GeoPointFieldMapper.GeoPointFieldType gft = (GeoPointFieldMapper.GeoPointFieldType)ft;
gft.setCoerce(!gft.coerce());
}
});
} }
} }

View File

@ -211,13 +211,11 @@ public abstract class BaseQueryTestCase<QB extends AbstractQueryBuilder<QB>> ext
protected final QB createTestQueryBuilder() { protected final QB createTestQueryBuilder() {
QB query = doCreateTestQueryBuilder(); QB query = doCreateTestQueryBuilder();
if (supportsBoostAndQueryName()) { if (randomBoolean()) {
if (randomBoolean()) { query.boost(2.0f / randomIntBetween(1, 20));
query.boost(2.0f / randomIntBetween(1, 20)); }
} if (randomBoolean()) {
if (randomBoolean()) { query.queryName(randomAsciiOfLengthBetween(1, 10));
query.queryName(randomAsciiOfLengthBetween(1, 10));
}
} }
return query; return query;
} }
@ -234,6 +232,11 @@ public abstract class BaseQueryTestCase<QB extends AbstractQueryBuilder<QB>> ext
@Test @Test
public void testFromXContent() throws IOException { public void testFromXContent() throws IOException {
QB testQuery = createTestQueryBuilder(); QB testQuery = createTestQueryBuilder();
//we should not set boost and query name for queries that don't parse it, so we simply reset them to their default values
if (supportsBoostAndQueryNameParsing() == false) {
testQuery.boost(AbstractQueryBuilder.DEFAULT_BOOST);
testQuery.queryName(null);
}
assertParsedQuery(testQuery.toString(), testQuery); assertParsedQuery(testQuery.toString(), testQuery);
for (Map.Entry<String, QB> alternateVersion : getAlternateVersions().entrySet()) { for (Map.Entry<String, QB> alternateVersion : getAlternateVersions().entrySet()) {
assertParsedQuery(alternateVersion.getKey(), alternateVersion.getValue()); assertParsedQuery(alternateVersion.getKey(), alternateVersion.getValue());
@ -288,9 +291,7 @@ public abstract class BaseQueryTestCase<QB extends AbstractQueryBuilder<QB>> ext
assertThat("two equivalent query builders lead to different lucene queries", secondLuceneQuery, equalTo(firstLuceneQuery)); assertThat("two equivalent query builders lead to different lucene queries", secondLuceneQuery, equalTo(firstLuceneQuery));
//if the initial lucene query is null, changing its boost won't have any effect, we shouldn't test that //if the initial lucene query is null, changing its boost won't have any effect, we shouldn't test that
//few queries also don't support boost e.g. wrapper query and filter query if (firstLuceneQuery != null) {
//otherwise makes sure that boost is taken into account in toQuery
if (firstLuceneQuery != null && supportsBoostAndQueryName()) {
secondQuery.boost(firstQuery.boost() + 1f + randomFloat()); secondQuery.boost(firstQuery.boost() + 1f + randomFloat());
Query thirdLuceneQuery = secondQuery.toQuery(context); Query thirdLuceneQuery = secondQuery.toQuery(context);
assertThat("modifying the boost doesn't affect the corresponding lucene query", firstLuceneQuery, not(equalTo(thirdLuceneQuery))); assertThat("modifying the boost doesn't affect the corresponding lucene query", firstLuceneQuery, not(equalTo(thirdLuceneQuery)));
@ -298,11 +299,11 @@ public abstract class BaseQueryTestCase<QB extends AbstractQueryBuilder<QB>> ext
} }
/** /**
* Few queries allow you to set the boost and queryName but don't do anything with it. This method allows * Few queries allow you to set the boost and queryName on the java api, although the corresponding parser doesn't parse them as they are not supported.
* to disable boost and queryName related tests for those queries. Those queries are easy to identify: their parsers * This method allows to disable boost and queryName related tests for those queries. Those queries are easy to identify: their parsers
* don't parse `boost` and `_name` as they don't apply to the specific query e.g. filter query or wrapper query * don't parse `boost` and `_name` as they don't apply to the specific query: filter query, wrapper query and match_none
*/ */
protected boolean supportsBoostAndQueryName() { protected boolean supportsBoostAndQueryNameParsing() {
return true; return true;
} }
@ -317,11 +318,15 @@ public abstract class BaseQueryTestCase<QB extends AbstractQueryBuilder<QB>> ext
assertThat(namedQuery, equalTo(query)); assertThat(namedQuery, equalTo(query));
} }
if (query != null) { if (query != null) {
assertThat(query.getBoost(), equalTo(queryBuilder.boost())); assertBoost(queryBuilder, query);
} }
doAssertLuceneQuery(queryBuilder, query, context); doAssertLuceneQuery(queryBuilder, query, context);
} }
protected void assertBoost(QB queryBuilder, Query query) throws IOException {
assertThat(query.getBoost(), equalTo(queryBuilder.boost()));
}
/** /**
* Checks the result of {@link QueryBuilder#toQuery(QueryShardContext)} given the original {@link QueryBuilder} and {@link QueryShardContext}. * Checks the result of {@link QueryBuilder#toQuery(QueryShardContext)} given the original {@link QueryBuilder} and {@link QueryShardContext}.
* Contains the query specific checks to be implemented by subclasses. * Contains the query specific checks to be implemented by subclasses.

View File

@ -57,12 +57,17 @@ public class IndicesQueryBuilderTests extends BaseQueryTestCase<IndicesQueryBuil
} else { } else {
expected = queryBuilder.noMatchQuery().toQuery(context); expected = queryBuilder.noMatchQuery().toQuery(context);
} }
if (expected != null) { if (expected != null && queryBuilder.boost() != AbstractQueryBuilder.DEFAULT_BOOST) {
expected.setBoost(queryBuilder.boost()); expected.setBoost(queryBuilder.boost());
} }
assertEquals(query, expected); assertEquals(query, expected);
} }
@Override
protected void assertBoost(IndicesQueryBuilder queryBuilder, Query query) throws IOException {
//nothing to do here, boost check is already included in equality check done as part of doAssertLuceneQuery above
}
@Test @Test
public void testValidate() { public void testValidate() {
int expectedErrors = 0; int expectedErrors = 0;

View File

@ -19,15 +19,18 @@
package org.elasticsearch.index.query; package org.elasticsearch.index.query;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.elasticsearch.common.lucene.search.Queries;
import java.io.IOException; import java.io.IOException;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.instanceOf;
public class MatchNoneQueryBuilderTests extends BaseQueryTestCase { public class MatchNoneQueryBuilderTests extends BaseQueryTestCase {
@Override @Override
protected boolean supportsBoostAndQueryName() { protected boolean supportsBoostAndQueryNameParsing() {
return false; return false;
} }
@ -38,6 +41,8 @@ public class MatchNoneQueryBuilderTests extends BaseQueryTestCase {
@Override @Override
protected void doAssertLuceneQuery(AbstractQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException { protected void doAssertLuceneQuery(AbstractQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
assertEquals(query, Queries.newMatchNoDocsQuery()); assertThat(query, instanceOf(BooleanQuery.class));
BooleanQuery booleanQuery = (BooleanQuery) query;
assertThat(booleanQuery.clauses().size(), equalTo(0));
} }
} }

View File

@ -49,7 +49,7 @@ public class QueryFilterBuilderTests extends BaseQueryTestCase<QueryFilterBuilde
} }
@Override @Override
protected boolean supportsBoostAndQueryName() { protected boolean supportsBoostAndQueryNameParsing() {
return false; return false;
} }

View File

@ -832,7 +832,7 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase {
IndexQueryParserService queryParser = queryParser(); IndexQueryParserService queryParser = queryParser();
Query parsedQuery = queryParser.parse(filteredQuery(termQuery("name.first", "shay"), boolQuery().must(termQuery("name.first", "shay1")).must(termQuery("name.first", "shay4")).mustNot(termQuery("name.first", "shay2")).should(termQuery("name.first", "shay3")))).query(); Query parsedQuery = queryParser.parse(filteredQuery(termQuery("name.first", "shay"), boolQuery().must(termQuery("name.first", "shay1")).must(termQuery("name.first", "shay4")).mustNot(termQuery("name.first", "shay2")).should(termQuery("name.first", "shay3")))).query();
BooleanQuery filter = new BooleanQuery(); BooleanQuery.Builder filter = new BooleanQuery.Builder();
filter.add(new TermQuery(new Term("name.first", "shay1")), Occur.MUST); filter.add(new TermQuery(new Term("name.first", "shay1")), Occur.MUST);
filter.add(new TermQuery(new Term("name.first", "shay4")), Occur.MUST); filter.add(new TermQuery(new Term("name.first", "shay4")), Occur.MUST);
filter.add(new TermQuery(new Term("name.first", "shay2")), Occur.MUST_NOT); filter.add(new TermQuery(new Term("name.first", "shay2")), Occur.MUST_NOT);
@ -840,7 +840,7 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase {
filter.setMinimumNumberShouldMatch(1); filter.setMinimumNumberShouldMatch(1);
Query expected = Queries.filtered( Query expected = Queries.filtered(
new TermQuery(new Term("name.first", "shay")), new TermQuery(new Term("name.first", "shay")),
filter); filter.build());
assertEquals(expected, parsedQuery); assertEquals(expected, parsedQuery);
} }
@ -849,7 +849,7 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase {
IndexQueryParserService queryParser = queryParser(); IndexQueryParserService queryParser = queryParser();
String query = copyToStringFromClasspath("/org/elasticsearch/index/query/bool-filter.json"); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/bool-filter.json");
Query parsedQuery = queryParser.parse(query).query(); Query parsedQuery = queryParser.parse(query).query();
BooleanQuery filter = new BooleanQuery(); BooleanQuery.Builder filter = new BooleanQuery.Builder();
filter.add(new TermQuery(new Term("name.first", "shay1")), Occur.MUST); filter.add(new TermQuery(new Term("name.first", "shay1")), Occur.MUST);
filter.add(new TermQuery(new Term("name.first", "shay4")), Occur.MUST); filter.add(new TermQuery(new Term("name.first", "shay4")), Occur.MUST);
filter.add(new TermQuery(new Term("name.first", "shay2")), Occur.MUST_NOT); filter.add(new TermQuery(new Term("name.first", "shay2")), Occur.MUST_NOT);
@ -857,7 +857,7 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase {
filter.setMinimumNumberShouldMatch(1); filter.setMinimumNumberShouldMatch(1);
Query expected = Queries.filtered( Query expected = Queries.filtered(
new TermQuery(new Term("name.first", "shay")), new TermQuery(new Term("name.first", "shay")),
filter); filter.build());
assertEquals(expected, parsedQuery); assertEquals(expected, parsedQuery);
} }
@ -865,12 +865,12 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase {
public void testAndFilteredQueryBuilder() throws IOException { public void testAndFilteredQueryBuilder() throws IOException {
IndexQueryParserService queryParser = queryParser(); IndexQueryParserService queryParser = queryParser();
Query parsedQuery = queryParser.parse(filteredQuery(matchAllQuery(), andQuery(termQuery("name.first", "shay1"), termQuery("name.first", "shay4")))).query(); Query parsedQuery = queryParser.parse(filteredQuery(matchAllQuery(), andQuery(termQuery("name.first", "shay1"), termQuery("name.first", "shay4")))).query();
BooleanQuery and = new BooleanQuery(); BooleanQuery.Builder and = new BooleanQuery.Builder();
and.add(new TermQuery(new Term("name.first", "shay1")), Occur.MUST); and.add(new TermQuery(new Term("name.first", "shay1")), Occur.MUST);
and.add(new TermQuery(new Term("name.first", "shay4")), Occur.MUST); and.add(new TermQuery(new Term("name.first", "shay4")), Occur.MUST);
BooleanQuery.Builder builder = new BooleanQuery.Builder(); BooleanQuery.Builder builder = new BooleanQuery.Builder();
builder.add(new BooleanClause(new MatchAllDocsQuery(), Occur.MUST)); builder.add(new MatchAllDocsQuery(), Occur.MUST);
builder.add(new BooleanClause(and, Occur.FILTER)); builder.add(and.build(), Occur.FILTER);
assertEquals(builder.build(), parsedQuery); assertEquals(builder.build(), parsedQuery);
} }
@ -879,12 +879,12 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase {
IndexQueryParserService queryParser = queryParser(); IndexQueryParserService queryParser = queryParser();
String query = copyToStringFromClasspath("/org/elasticsearch/index/query/and-filter.json"); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/and-filter.json");
Query parsedQuery = queryParser.parse(query).query(); Query parsedQuery = queryParser.parse(query).query();
BooleanQuery and = new BooleanQuery(); BooleanQuery.Builder and = new BooleanQuery.Builder();
and.add(new TermQuery(new Term("name.first", "shay1")), Occur.MUST); and.add(new TermQuery(new Term("name.first", "shay1")), Occur.MUST);
and.add(new TermQuery(new Term("name.first", "shay4")), Occur.MUST); and.add(new TermQuery(new Term("name.first", "shay4")), Occur.MUST);
Query expected = Queries.filtered( Query expected = Queries.filtered(
new TermQuery(new Term("name.first", "shay")), new TermQuery(new Term("name.first", "shay")),
and); and.build());
assertEquals(expected, parsedQuery); assertEquals(expected, parsedQuery);
} }
@ -893,12 +893,12 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase {
IndexQueryParserService queryParser = queryParser(); IndexQueryParserService queryParser = queryParser();
String query = copyToStringFromClasspath("/org/elasticsearch/index/query/and-filter-named.json"); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/and-filter-named.json");
Query parsedQuery = queryParser.parse(query).query(); Query parsedQuery = queryParser.parse(query).query();
BooleanQuery and = new BooleanQuery(); BooleanQuery.Builder and = new BooleanQuery.Builder();
and.add(new TermQuery(new Term("name.first", "shay1")), Occur.MUST); and.add(new TermQuery(new Term("name.first", "shay1")), Occur.MUST);
and.add(new TermQuery(new Term("name.first", "shay4")), Occur.MUST); and.add(new TermQuery(new Term("name.first", "shay4")), Occur.MUST);
Query expected = Queries.filtered( Query expected = Queries.filtered(
new TermQuery(new Term("name.first", "shay")), new TermQuery(new Term("name.first", "shay")),
and); and.build());
assertEquals(expected, parsedQuery); assertEquals(expected, parsedQuery);
} }
@ -907,12 +907,12 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase {
IndexQueryParserService queryParser = queryParser(); IndexQueryParserService queryParser = queryParser();
String query = copyToStringFromClasspath("/org/elasticsearch/index/query/and-filter2.json"); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/and-filter2.json");
Query parsedQuery = queryParser.parse(query).query(); Query parsedQuery = queryParser.parse(query).query();
BooleanQuery and = new BooleanQuery(); BooleanQuery.Builder and = new BooleanQuery.Builder();
and.add(new TermQuery(new Term("name.first", "shay1")), Occur.MUST); and.add(new TermQuery(new Term("name.first", "shay1")), Occur.MUST);
and.add(new TermQuery(new Term("name.first", "shay4")), Occur.MUST); and.add(new TermQuery(new Term("name.first", "shay4")), Occur.MUST);
Query expected = Queries.filtered( Query expected = Queries.filtered(
new TermQuery(new Term("name.first", "shay")), new TermQuery(new Term("name.first", "shay")),
and); and.build());
assertEquals(expected, parsedQuery); assertEquals(expected, parsedQuery);
} }
@ -920,12 +920,12 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase {
public void testOrFilteredQueryBuilder() throws IOException { public void testOrFilteredQueryBuilder() throws IOException {
IndexQueryParserService queryParser = queryParser(); IndexQueryParserService queryParser = queryParser();
Query parsedQuery = queryParser.parse(filteredQuery(matchAllQuery(), orQuery(termQuery("name.first", "shay1"), termQuery("name.first", "shay4")))).query(); Query parsedQuery = queryParser.parse(filteredQuery(matchAllQuery(), orQuery(termQuery("name.first", "shay1"), termQuery("name.first", "shay4")))).query();
BooleanQuery or = new BooleanQuery(); BooleanQuery.Builder or = new BooleanQuery.Builder();
or.add(new TermQuery(new Term("name.first", "shay1")), Occur.SHOULD); or.add(new TermQuery(new Term("name.first", "shay1")), Occur.SHOULD);
or.add(new TermQuery(new Term("name.first", "shay4")), Occur.SHOULD); or.add(new TermQuery(new Term("name.first", "shay4")), Occur.SHOULD);
BooleanQuery.Builder builder = new BooleanQuery.Builder(); BooleanQuery.Builder builder = new BooleanQuery.Builder();
builder.add(new MatchAllDocsQuery(), Occur.MUST); builder.add(new MatchAllDocsQuery(), Occur.MUST);
builder.add(or, Occur.FILTER); builder.add(or.build(), Occur.FILTER);
assertEquals(builder.build(), parsedQuery); assertEquals(builder.build(), parsedQuery);
} }
@ -934,12 +934,12 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase {
IndexQueryParserService queryParser = queryParser(); IndexQueryParserService queryParser = queryParser();
String query = copyToStringFromClasspath("/org/elasticsearch/index/query/or-filter.json"); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/or-filter.json");
Query parsedQuery = queryParser.parse(query).query(); Query parsedQuery = queryParser.parse(query).query();
BooleanQuery or = new BooleanQuery(); BooleanQuery.Builder or = new BooleanQuery.Builder();
or.add(new TermQuery(new Term("name.first", "shay1")), Occur.SHOULD); or.add(new TermQuery(new Term("name.first", "shay1")), Occur.SHOULD);
or.add(new TermQuery(new Term("name.first", "shay4")), Occur.SHOULD); or.add(new TermQuery(new Term("name.first", "shay4")), Occur.SHOULD);
Query expected = Queries.filtered( Query expected = Queries.filtered(
new TermQuery(new Term("name.first", "shay")), new TermQuery(new Term("name.first", "shay")),
or); or.build());
assertEquals(expected, parsedQuery); assertEquals(expected, parsedQuery);
} }
@ -948,12 +948,12 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase {
IndexQueryParserService queryParser = queryParser(); IndexQueryParserService queryParser = queryParser();
String query = copyToStringFromClasspath("/org/elasticsearch/index/query/or-filter2.json"); String query = copyToStringFromClasspath("/org/elasticsearch/index/query/or-filter2.json");
Query parsedQuery = queryParser.parse(query).query(); Query parsedQuery = queryParser.parse(query).query();
BooleanQuery or = new BooleanQuery(); BooleanQuery.Builder or = new BooleanQuery.Builder();
or.add(new TermQuery(new Term("name.first", "shay1")), Occur.SHOULD); or.add(new TermQuery(new Term("name.first", "shay1")), Occur.SHOULD);
or.add(new TermQuery(new Term("name.first", "shay4")), Occur.SHOULD); or.add(new TermQuery(new Term("name.first", "shay4")), Occur.SHOULD);
Query expected = Queries.filtered( Query expected = Queries.filtered(
new TermQuery(new Term("name.first", "shay")), new TermQuery(new Term("name.first", "shay")),
or); or.build());
assertEquals(expected, parsedQuery); assertEquals(expected, parsedQuery);
} }
@ -1408,10 +1408,20 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase {
@Test @Test
public void testSpanWithinQueryBuilder() throws IOException { public void testSpanWithinQueryBuilder() throws IOException {
IndexQueryParserService queryParser = queryParser(); IndexQueryParserService queryParser = queryParser();
Query expectedQuery = new SpanWithinQuery(new SpanTermQuery(new Term("age", longToPrefixCoded(34, 0))), SpanTermQuery big = new SpanTermQuery(new Term("age", longToPrefixCoded(34, 0)));
new SpanTermQuery(new Term("age", longToPrefixCoded(35, 0)))); big.setBoost(2);
Query actualQuery = queryParser.parse(spanWithinQuery(spanTermQuery("age", 34), spanTermQuery("age", 35))) SpanTermQuery little = new SpanTermQuery(new Term("age", longToPrefixCoded(35, 0)));
.query(); little.setBoost(3);
Query expectedQuery = new SpanWithinQuery(big, little);
SpanWithinQueryBuilder spanWithinQueryBuilder = spanWithinQuery(spanTermQuery("age", 34).boost(2), spanTermQuery("age", 35).boost(3));
Query actualQuery = queryParser.parse(spanWithinQueryBuilder).query();
assertEquals(expectedQuery, actualQuery);
float boost = randomFloat();
expectedQuery.setBoost(boost);
spanWithinQueryBuilder.boost(boost);
actualQuery = queryParser.parse(spanWithinQueryBuilder).query();
assertEquals(expectedQuery, actualQuery); assertEquals(expectedQuery, actualQuery);
} }
@ -1428,9 +1438,20 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase {
@Test @Test
public void testSpanContainingQueryBuilder() throws IOException { public void testSpanContainingQueryBuilder() throws IOException {
IndexQueryParserService queryParser = queryParser(); IndexQueryParserService queryParser = queryParser();
Query expectedQuery = new SpanContainingQuery(new SpanTermQuery(new Term("age", longToPrefixCoded(34, 0))), SpanTermQuery big = new SpanTermQuery(new Term("age", longToPrefixCoded(34, 0)));
new SpanTermQuery(new Term("age", longToPrefixCoded(35, 0)))); big.setBoost(2);
Query actualQuery = queryParser.parse(spanContainingQuery(spanTermQuery("age", 34), spanTermQuery("age", 35))).query(); SpanTermQuery little = new SpanTermQuery(new Term("age", longToPrefixCoded(35, 0)));
little.setBoost(3);
Query expectedQuery = new SpanContainingQuery(big, little);
SpanContainingQueryBuilder spanContainingQueryBuilder = spanContainingQuery(spanTermQuery("age", 34).boost(2), spanTermQuery("age", 35).boost(3));
Query actualQuery = queryParser.parse(spanContainingQueryBuilder).query();
assertEquals(expectedQuery, actualQuery);
float boost = randomFloat();
expectedQuery.setBoost(boost);
spanContainingQueryBuilder.boost(boost);
actualQuery = queryParser.parse(spanContainingQueryBuilder).query();
assertEquals(expectedQuery, actualQuery); assertEquals(expectedQuery, actualQuery);
} }
@ -2513,17 +2534,42 @@ public class SimpleIndexQueryParserTests extends ESSingleNodeTestCase {
try (Engine.Searcher searcher = indexService.shardSafe(0).acquireSearcher("test")) { try (Engine.Searcher searcher = indexService.shardSafe(0).acquireSearcher("test")) {
Query rewrittenQuery = searcher.searcher().rewrite(parsedQuery); Query rewrittenQuery = searcher.searcher().rewrite(parsedQuery);
BooleanQuery expected = new BooleanQuery(); BooleanQuery.Builder expected = new BooleanQuery.Builder();
expected.add(new TermQuery(new Term("foobar", "banon")), Occur.SHOULD); expected.add(new TermQuery(new Term("foobar", "banon")), Occur.SHOULD);
TermQuery tq1 = new TermQuery(new Term("name.first", "banon")); TermQuery tq1 = new TermQuery(new Term("name.first", "banon"));
tq1.setBoost(2); tq1.setBoost(2);
TermQuery tq2 = new TermQuery(new Term("name.last", "banon")); TermQuery tq2 = new TermQuery(new Term("name.last", "banon"));
tq2.setBoost(3); tq2.setBoost(3);
expected.add(new DisjunctionMaxQuery(Arrays.<Query>asList(tq1, tq2), 0f), Occur.SHOULD); expected.add(new DisjunctionMaxQuery(Arrays.<Query>asList(tq1, tq2), 0f), Occur.SHOULD);
assertEquals(expected, rewrittenQuery); assertEquals(expected.build(), rewrittenQuery);
} }
} }
@Test
public void testSimpleQueryString() throws Exception {
IndexQueryParserService queryParser = queryParser();
String query = copyToStringFromClasspath("/org/elasticsearch/index/query/simple-query-string.json");
Query parsedQuery = queryParser.parse(query).query();
assertThat(parsedQuery, instanceOf(BooleanQuery.class));
}
@Test
public void testSimpleQueryStringBoost() throws Exception {
IndexQueryParserService queryParser = queryParser();
SimpleQueryStringBuilder simpleQueryStringBuilder = new SimpleQueryStringBuilder("test");
simpleQueryStringBuilder.field("body", 5);
Query parsedQuery = queryParser.parse(simpleQueryStringBuilder.toString()).query();
assertThat(parsedQuery, instanceOf(TermQuery.class));
assertThat(parsedQuery.getBoost(), equalTo(5f));
simpleQueryStringBuilder = new SimpleQueryStringBuilder("test");
simpleQueryStringBuilder.field("body", 5);
simpleQueryStringBuilder.boost(2);
parsedQuery = queryParser.parse(simpleQueryStringBuilder.toString()).query();
assertThat(parsedQuery, instanceOf(TermQuery.class));
assertThat(parsedQuery.getBoost(), equalTo(10f));
}
@Test @Test
public void testMatchWithFuzzyTranspositions() throws Exception { public void testMatchWithFuzzyTranspositions() throws Exception {
IndexQueryParserService queryParser = queryParser(); IndexQueryParserService queryParser = queryParser();

View File

@ -135,7 +135,7 @@ public class SimpleQueryStringBuilderTests extends BaseQueryTestCase<SimpleQuery
@Test @Test
public void testDefaultEmptyComplainFlags() { public void testDefaultEmptyComplainFlags() {
SimpleQueryStringBuilder qb = new SimpleQueryStringBuilder("The quick brown fox."); SimpleQueryStringBuilder qb = new SimpleQueryStringBuilder("The quick brown fox.");
qb.flags(new SimpleQueryStringFlag[] {}); qb.flags(new SimpleQueryStringFlag[]{});
assertEquals("Setting flags to empty should result in returning to default value.", SimpleQueryStringBuilder.DEFAULT_FLAGS, assertEquals("Setting flags to empty should result in returning to default value.", SimpleQueryStringBuilder.DEFAULT_FLAGS,
qb.flags()); qb.flags());
} }
@ -311,6 +311,13 @@ public class SimpleQueryStringBuilderTests extends BaseQueryTestCase<SimpleQuery
} }
} }
@Override
protected void assertBoost(SimpleQueryStringBuilder queryBuilder, Query query) throws IOException {
//boost may get parsed from the random query, we then combine the main boost with that one coming from lucene
//instead of trying to reparse the query and guess what the boost should be, we delegate boost checks to specific boost tests below
}
private int shouldClauses(BooleanQuery query) { private int shouldClauses(BooleanQuery query) {
int result = 0; int result = 0;
for (BooleanClause c : query.clauses()) { for (BooleanClause c : query.clauses()) {
@ -320,4 +327,22 @@ public class SimpleQueryStringBuilderTests extends BaseQueryTestCase<SimpleQuery
} }
return result; return result;
} }
@Test
public void testToQueryBoost() throws IOException {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
QueryShardContext shardContext = createShardContext();
SimpleQueryStringBuilder simpleQueryStringBuilder = new SimpleQueryStringBuilder("test");
simpleQueryStringBuilder.field(STRING_FIELD_NAME, 5);
Query query = simpleQueryStringBuilder.toQuery(shardContext);
assertThat(query, instanceOf(TermQuery.class));
assertThat(query.getBoost(), equalTo(5f));
simpleQueryStringBuilder = new SimpleQueryStringBuilder("test");
simpleQueryStringBuilder.field(STRING_FIELD_NAME, 5);
simpleQueryStringBuilder.boost(2);
query = simpleQueryStringBuilder.toQuery(shardContext);
assertThat(query, instanceOf(TermQuery.class));
assertThat(query.getBoost(), equalTo(10f));
}
} }

View File

@ -25,6 +25,7 @@ import org.junit.Test;
import java.io.IOException; import java.io.IOException;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.instanceOf;
public class SpanContainingQueryBuilderTests extends BaseQueryTestCase<SpanContainingQueryBuilder> { public class SpanContainingQueryBuilderTests extends BaseQueryTestCase<SpanContainingQueryBuilder> {
@ -40,6 +41,16 @@ public class SpanContainingQueryBuilderTests extends BaseQueryTestCase<SpanConta
assertThat(query, instanceOf(SpanContainingQuery.class)); assertThat(query, instanceOf(SpanContainingQuery.class));
} }
@Override
protected void assertBoost(SpanContainingQueryBuilder queryBuilder, Query query) throws IOException {
if (queryBuilder.boost() == AbstractQueryBuilder.DEFAULT_BOOST) {
//lucene default behaviour
assertThat(query.getBoost(), equalTo(queryBuilder.bigQuery().boost()));
} else {
assertThat(query.getBoost(), equalTo(queryBuilder.boost()));
}
}
@Test @Test
public void testValidate() { public void testValidate() {
int totalExpectedErrors = 0; int totalExpectedErrors = 0;

View File

@ -25,6 +25,7 @@ import org.junit.Test;
import java.io.IOException; import java.io.IOException;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.instanceOf;
public class SpanWithinQueryBuilderTests extends BaseQueryTestCase<SpanWithinQueryBuilder> { public class SpanWithinQueryBuilderTests extends BaseQueryTestCase<SpanWithinQueryBuilder> {
@ -40,6 +41,16 @@ public class SpanWithinQueryBuilderTests extends BaseQueryTestCase<SpanWithinQue
assertThat(query, instanceOf(SpanWithinQuery.class)); assertThat(query, instanceOf(SpanWithinQuery.class));
} }
@Override
protected void assertBoost(SpanWithinQueryBuilder queryBuilder, Query query) throws IOException {
if (queryBuilder.boost() == AbstractQueryBuilder.DEFAULT_BOOST) {
//lucene default behaviour
assertThat(query.getBoost(), equalTo(queryBuilder.littleQuery().boost()));
} else {
assertThat(query.getBoost(), equalTo(queryBuilder.boost()));
}
}
@Test @Test
public void testValidate() { public void testValidate() {
int totalExpectedErrors = 0; int totalExpectedErrors = 0;

View File

@ -32,7 +32,7 @@ import static org.hamcrest.Matchers.is;
public class WrapperQueryBuilderTests extends BaseQueryTestCase<WrapperQueryBuilder> { public class WrapperQueryBuilderTests extends BaseQueryTestCase<WrapperQueryBuilder> {
@Override @Override
protected boolean supportsBoostAndQueryName() { protected boolean supportsBoostAndQueryNameParsing() {
return false; return false;
} }
@ -56,16 +56,20 @@ public class WrapperQueryBuilderTests extends BaseQueryTestCase<WrapperQueryBuil
try (XContentParser qSourceParser = XContentFactory.xContent(queryBuilder.source()).createParser(queryBuilder.source())) { try (XContentParser qSourceParser = XContentFactory.xContent(queryBuilder.source()).createParser(queryBuilder.source())) {
final QueryShardContext contextCopy = new QueryShardContext(context.index(), context.indexQueryParserService()); final QueryShardContext contextCopy = new QueryShardContext(context.index(), context.indexQueryParserService());
contextCopy.reset(qSourceParser); contextCopy.reset(qSourceParser);
QueryBuilder result = contextCopy.parseContext().parseInnerQueryBuilder(); QueryBuilder<?> innerQuery = contextCopy.parseContext().parseInnerQueryBuilder();
context.combineNamedQueries(contextCopy); Query expected = innerQuery.toQuery(context);
Query expected = result.toQuery(context); if (expected != null && queryBuilder.boost() != AbstractQueryBuilder.DEFAULT_BOOST) {
if (expected != null) { expected.setBoost(queryBuilder.boost());
expected.setBoost(AbstractQueryBuilder.DEFAULT_BOOST);
} }
assertThat(query, equalTo(expected)); assertThat(query, equalTo(expected));
} }
} }
@Override
protected void assertBoost(WrapperQueryBuilder queryBuilder, Query query) throws IOException {
//nothing to do here, boost check is already included in equality check done as part of doAssertLuceneQuery above
}
@Test @Test
public void testValidate() { public void testValidate() {
WrapperQueryBuilder wrapperQueryBuilder = new WrapperQueryBuilder((byte[]) null); WrapperQueryBuilder wrapperQueryBuilder = new WrapperQueryBuilder((byte[]) null);

View File

@ -25,7 +25,7 @@ import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.join.BitDocIdSetFilter; import org.apache.lucene.search.join.BitSetProducer;
import org.apache.lucene.util.BitDocIdSet; import org.apache.lucene.util.BitDocIdSet;
import org.apache.lucene.util.BitSet; import org.apache.lucene.util.BitSet;
import org.elasticsearch.Version; import org.elasticsearch.Version;
@ -135,8 +135,8 @@ public abstract class AbstractChildTestCase extends ESSingleNodeTestCase {
} }
} }
static BitDocIdSetFilter wrapWithBitSetFilter(Filter filter) { static BitSetProducer wrapWithBitSetFilter(Filter filter) {
return SearchContext.current().bitsetFilterCache().getBitDocIdSetFilter(filter); return SearchContext.current().bitsetFilterCache().getBitSetProducer(filter);
} }
static Query parseQuery(QueryBuilder queryBuilder) throws IOException { static Query parseQuery(QueryBuilder queryBuilder) throws IOException {

View File

@ -20,13 +20,28 @@ package org.elasticsearch.index.search.child;
import com.carrotsearch.hppc.IntHashSet; import com.carrotsearch.hppc.IntHashSet;
import com.carrotsearch.hppc.ObjectObjectHashMap; import com.carrotsearch.hppc.ObjectObjectHashMap;
import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document; import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
import org.apache.lucene.document.StringField; import org.apache.lucene.document.StringField;
import org.apache.lucene.index.*; import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.search.*; import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.join.BitDocIdSetFilter; import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.PostingsEnum;
import org.apache.lucene.index.RandomIndexWriter;
import org.apache.lucene.index.SlowCompositeReaderWrapper;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.QueryUtils;
import org.apache.lucene.search.QueryWrapperFilter;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.util.Bits; import org.apache.lucene.util.Bits;
import org.apache.lucene.util.FixedBitSet; import org.apache.lucene.util.FixedBitSet;
@ -51,7 +66,11 @@ import java.util.NavigableSet;
import java.util.Random; import java.util.Random;
import java.util.TreeSet; import java.util.TreeSet;
import static org.elasticsearch.index.query.QueryBuilders.*; import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery;
import static org.elasticsearch.index.query.QueryBuilders.filteredQuery;
import static org.elasticsearch.index.query.QueryBuilders.hasChildQuery;
import static org.elasticsearch.index.query.QueryBuilders.notQuery;
import static org.elasticsearch.index.query.QueryBuilders.termQuery;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
public class ChildrenConstantScoreQueryTests extends AbstractChildTestCase { public class ChildrenConstantScoreQueryTests extends AbstractChildTestCase {
@ -73,7 +92,7 @@ public class ChildrenConstantScoreQueryTests extends AbstractChildTestCase {
Query childQuery = new TermQuery(new Term("field", "value")); Query childQuery = new TermQuery(new Term("field", "value"));
ParentFieldMapper parentFieldMapper = SearchContext.current().mapperService().documentMapper("child").parentFieldMapper(); ParentFieldMapper parentFieldMapper = SearchContext.current().mapperService().documentMapper("child").parentFieldMapper();
ParentChildIndexFieldData parentChildIndexFieldData = SearchContext.current().fieldData().getForField(parentFieldMapper.fieldType()); ParentChildIndexFieldData parentChildIndexFieldData = SearchContext.current().fieldData().getForField(parentFieldMapper.fieldType());
BitDocIdSetFilter parentFilter = wrapWithBitSetFilter(new QueryWrapperFilter(new TermQuery(new Term(TypeFieldMapper.NAME, "parent")))); Filter parentFilter = new QueryWrapperFilter(new TermQuery(new Term(TypeFieldMapper.NAME, "parent")));
Query query = new ChildrenConstantScoreQuery(parentChildIndexFieldData, childQuery, "parent", "child", parentFilter, 12, wrapWithBitSetFilter(Queries.newNonNestedFilter())); Query query = new ChildrenConstantScoreQuery(parentChildIndexFieldData, childQuery, "parent", "child", parentFilter, 12, wrapWithBitSetFilter(Queries.newNonNestedFilter()));
QueryUtils.check(query); QueryUtils.check(query);
} }
@ -106,7 +125,7 @@ public class ChildrenConstantScoreQueryTests extends AbstractChildTestCase {
); );
TermQuery childQuery = new TermQuery(new Term("field1", "value" + (1 + random().nextInt(3)))); TermQuery childQuery = new TermQuery(new Term("field1", "value" + (1 + random().nextInt(3))));
BitDocIdSetFilter parentFilter = wrapWithBitSetFilter(new QueryWrapperFilter(new TermQuery(new Term(TypeFieldMapper.NAME, "parent")))); Filter parentFilter = new QueryWrapperFilter(new TermQuery(new Term(TypeFieldMapper.NAME, "parent")));
int shortCircuitParentDocSet = random().nextInt(5); int shortCircuitParentDocSet = random().nextInt(5);
ParentFieldMapper parentFieldMapper = SearchContext.current().mapperService().documentMapper("child").parentFieldMapper(); ParentFieldMapper parentFieldMapper = SearchContext.current().mapperService().documentMapper("child").parentFieldMapper();
ParentChildIndexFieldData parentChildIndexFieldData = SearchContext.current().fieldData().getForField(parentFieldMapper.fieldType()); ParentChildIndexFieldData parentChildIndexFieldData = SearchContext.current().fieldData().getForField(parentFieldMapper.fieldType());

View File

@ -22,6 +22,7 @@ import com.carrotsearch.hppc.FloatArrayList;
import com.carrotsearch.hppc.IntHashSet; import com.carrotsearch.hppc.IntHashSet;
import com.carrotsearch.hppc.ObjectObjectHashMap; import com.carrotsearch.hppc.ObjectObjectHashMap;
import com.carrotsearch.randomizedtesting.generators.RandomInts; import com.carrotsearch.randomizedtesting.generators.RandomInts;
import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document; import org.apache.lucene.document.Document;
import org.apache.lucene.document.DoubleField; import org.apache.lucene.document.DoubleField;
@ -29,7 +30,6 @@ import org.apache.lucene.document.Field;
import org.apache.lucene.document.StringField; import org.apache.lucene.document.StringField;
import org.apache.lucene.index.*; import org.apache.lucene.index.*;
import org.apache.lucene.search.*; import org.apache.lucene.search.*;
import org.apache.lucene.search.join.BitDocIdSetFilter;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.util.Bits; import org.apache.lucene.util.Bits;
import org.apache.lucene.util.FixedBitSet; import org.apache.lucene.util.FixedBitSet;
@ -78,7 +78,7 @@ public class ChildrenQueryTests extends AbstractChildTestCase {
ScoreType scoreType = ScoreType.values()[random().nextInt(ScoreType.values().length)]; ScoreType scoreType = ScoreType.values()[random().nextInt(ScoreType.values().length)];
ParentFieldMapper parentFieldMapper = SearchContext.current().mapperService().documentMapper("child").parentFieldMapper(); ParentFieldMapper parentFieldMapper = SearchContext.current().mapperService().documentMapper("child").parentFieldMapper();
ParentChildIndexFieldData parentChildIndexFieldData = SearchContext.current().fieldData().getForField(parentFieldMapper.fieldType()); ParentChildIndexFieldData parentChildIndexFieldData = SearchContext.current().fieldData().getForField(parentFieldMapper.fieldType());
BitDocIdSetFilter parentFilter = wrapWithBitSetFilter(new QueryWrapperFilter(new TermQuery(new Term(TypeFieldMapper.NAME, "parent")))); Filter parentFilter = new QueryWrapperFilter(new TermQuery(new Term(TypeFieldMapper.NAME, "parent")));
int minChildren = random().nextInt(10); int minChildren = random().nextInt(10);
int maxChildren = scaledRandomIntBetween(minChildren, 10); int maxChildren = scaledRandomIntBetween(minChildren, 10);
Query query = new ChildrenQuery(parentChildIndexFieldData, "parent", "child", parentFilter, childQuery, scoreType, minChildren, Query query = new ChildrenQuery(parentChildIndexFieldData, "parent", "child", parentFilter, childQuery, scoreType, minChildren,

View File

@ -20,13 +20,13 @@ package org.elasticsearch.index.search.child;
import com.carrotsearch.hppc.IntIntHashMap; import com.carrotsearch.hppc.IntIntHashMap;
import com.carrotsearch.hppc.ObjectObjectHashMap; import com.carrotsearch.hppc.ObjectObjectHashMap;
import org.apache.lucene.analysis.MockAnalyzer; import org.apache.lucene.analysis.MockAnalyzer;
import org.apache.lucene.document.Document; import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field; import org.apache.lucene.document.Field;
import org.apache.lucene.document.StringField; import org.apache.lucene.document.StringField;
import org.apache.lucene.index.*; import org.apache.lucene.index.*;
import org.apache.lucene.search.*; import org.apache.lucene.search.*;
import org.apache.lucene.search.join.BitDocIdSetFilter;
import org.apache.lucene.store.Directory; import org.apache.lucene.store.Directory;
import org.apache.lucene.util.Bits; import org.apache.lucene.util.Bits;
import org.apache.lucene.util.FixedBitSet; import org.apache.lucene.util.FixedBitSet;
@ -73,7 +73,7 @@ public class ParentConstantScoreQueryTests extends AbstractChildTestCase {
Query parentQuery = new TermQuery(new Term("field", "value")); Query parentQuery = new TermQuery(new Term("field", "value"));
ParentFieldMapper parentFieldMapper = SearchContext.current().mapperService().documentMapper("child").parentFieldMapper(); ParentFieldMapper parentFieldMapper = SearchContext.current().mapperService().documentMapper("child").parentFieldMapper();
ParentChildIndexFieldData parentChildIndexFieldData = SearchContext.current().fieldData().getForField(parentFieldMapper.fieldType()); ParentChildIndexFieldData parentChildIndexFieldData = SearchContext.current().fieldData().getForField(parentFieldMapper.fieldType());
BitDocIdSetFilter childrenFilter = wrapWithBitSetFilter(new QueryWrapperFilter(new TermQuery(new Term(TypeFieldMapper.NAME, "child")))); Filter childrenFilter = new QueryWrapperFilter(new TermQuery(new Term(TypeFieldMapper.NAME, "child")));
Query query = new ParentConstantScoreQuery(parentChildIndexFieldData, parentQuery, "parent", childrenFilter); Query query = new ParentConstantScoreQuery(parentChildIndexFieldData, parentQuery, "parent", childrenFilter);
QueryUtils.check(query); QueryUtils.check(query);
} }

Some files were not shown because too many files have changed in this diff Show More