aliases: Parse aliases at search time and never cache parsed alias filters

The work around for resolving `now` doesn't need to be used for aliases, becuase alias filters are parsed at search time. However it can't be removed, because the percolator relies on it.

Parent/child can be specified again in alias filters, this now works again because alias filters are parsed at search time. Parent/child will also use the late query parse work around, to make sure to do the final preparations when the search context is around. This allows the aliases api to validate the parent/child queries without failing because there is no search context.

Closes #10485
This commit is contained in:
Martijn van Groningen 2015-06-30 01:11:27 +02:00
parent 6b4c51f442
commit 53874bf5a6
17 changed files with 196 additions and 211 deletions

View File

@ -1,58 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.aliases;
import org.apache.lucene.search.Query;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.compress.CompressedXContent;
/**
*
*/
public class IndexAlias {
private final String alias;
private final CompressedXContent filter;
private final Query parsedFilter;
public IndexAlias(String alias, @Nullable CompressedXContent filter, @Nullable Query parsedFilter) {
this.alias = alias;
this.filter = filter;
this.parsedFilter = parsedFilter;
}
public String alias() {
return alias;
}
@Nullable
public CompressedXContent filter() {
return filter;
}
@Nullable
public Query parsedFilter() {
return parsedFilter;
}
}

View File

@ -22,11 +22,12 @@ package org.elasticsearch.index.aliases;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.Query;
import org.elasticsearch.cluster.metadata.AliasMetaData;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.AbstractIndexComponent;
@ -38,16 +39,14 @@ import org.elasticsearch.indices.AliasFilterParsingException;
import org.elasticsearch.indices.InvalidAliasNameException;
import java.io.IOException;
import java.util.Iterator;
import java.util.Map;
/**
*
*/
public class IndexAliasesService extends AbstractIndexComponent implements Iterable<IndexAlias> {
public class IndexAliasesService extends AbstractIndexComponent {
private final IndexQueryParserService indexQueryParser;
private final Map<String, IndexAlias> aliases = ConcurrentCollections.newConcurrentMapWithAggressiveConcurrency();
private volatile ImmutableOpenMap<String, AliasMetaData> aliases = ImmutableOpenMap.of();
@Inject
public IndexAliasesService(Index index, @IndexSettings Settings indexSettings, IndexQueryParserService indexQueryParser) {
@ -55,54 +54,35 @@ public class IndexAliasesService extends AbstractIndexComponent implements Itera
this.indexQueryParser = indexQueryParser;
}
public boolean hasAlias(String alias) {
return aliases.containsKey(alias);
}
public IndexAlias alias(String alias) {
return aliases.get(alias);
}
public IndexAlias create(String alias, @Nullable CompressedXContent filter) {
return new IndexAlias(alias, filter, parse(alias, filter));
}
public void add(String alias, @Nullable CompressedXContent filter) {
add(new IndexAlias(alias, filter, parse(alias, filter)));
}
public void addAll(Map<String, IndexAlias> aliases) {
this.aliases.putAll(aliases);
}
/**
* Returns the filter associated with listed filtering aliases.
* <p/>
* <p>The list of filtering aliases should be obtained by calling MetaData.filteringAliases.
* Returns <tt>null</tt> if no filtering is required.</p>
*/
public Query aliasFilter(String... aliases) {
if (aliases == null || aliases.length == 0) {
public Query aliasFilter(String... aliasNames) {
if (aliasNames == null || aliasNames.length == 0) {
return null;
}
if (aliases.length == 1) {
IndexAlias indexAlias = alias(aliases[0]);
if (indexAlias == null) {
if (aliasNames.length == 1) {
AliasMetaData alias = this.aliases.get(aliasNames[0]);
if (alias == null) {
// This shouldn't happen unless alias disappeared after filteringAliases was called.
throw new InvalidAliasNameException(index, aliases[0], "Unknown alias name was passed to alias Filter");
throw new InvalidAliasNameException(index, aliasNames[0], "Unknown alias name was passed to alias Filter");
}
return indexAlias.parsedFilter();
return parse(alias);
} else {
// we need to bench here a bit, to see maybe it makes sense to use OrFilter
BooleanQuery combined = new BooleanQuery();
for (String alias : aliases) {
IndexAlias indexAlias = alias(alias);
if (indexAlias == null) {
for (String aliasName : aliasNames) {
AliasMetaData alias = this.aliases.get(aliasName);
if (alias == null) {
// This shouldn't happen unless alias disappeared after filteringAliases was called.
throw new InvalidAliasNameException(index, aliases[0], "Unknown alias name was passed to alias Filter");
throw new InvalidAliasNameException(index, aliasNames[0], "Unknown alias name was passed to alias Filter");
}
if (indexAlias.parsedFilter() != null) {
combined.add(indexAlias.parsedFilter(), BooleanClause.Occur.SHOULD);
Query parsedFilter = parse(alias);
if (parsedFilter != null) {
combined.add(parsedFilter, BooleanClause.Occur.SHOULD);
} else {
// The filter might be null only if filter was removed after filteringAliases was called
return null;
@ -112,31 +92,36 @@ public class IndexAliasesService extends AbstractIndexComponent implements Itera
}
}
private void add(IndexAlias indexAlias) {
aliases.put(indexAlias.alias(), indexAlias);
public void setAliases(ImmutableOpenMap<String, AliasMetaData> aliases) {
this.aliases = aliases;
}
public void remove(String alias) {
aliases.remove(alias);
}
private Query parse(String alias, CompressedXContent filter) {
if (filter == null) {
Query parse(AliasMetaData alias) {
if (alias.filter() == null) {
return null;
}
try {
byte[] filterSource = filter.uncompressed();
byte[] filterSource = alias.filter().uncompressed();
try (XContentParser parser = XContentFactory.xContent(filterSource).createParser(filterSource)) {
ParsedQuery parsedFilter = indexQueryParser.parseInnerFilter(parser);
return parsedFilter == null ? null : parsedFilter.query();
}
} catch (IOException ex) {
throw new AliasFilterParsingException(index, alias, "Invalid alias filter", ex);
throw new AliasFilterParsingException(index, alias.getAlias(), "Invalid alias filter", ex);
}
}
@Override
public Iterator<IndexAlias> iterator() {
return aliases.values().iterator();
// Used by tests:
void add(String alias, @Nullable CompressedXContent filter) {
AliasMetaData aliasMetaData = AliasMetaData.builder(alias).filter(filter).build();
aliases = ImmutableOpenMap.builder(aliases).fPut(alias, aliasMetaData).build();
}
boolean hasAlias(String alias) {
return aliases.containsKey(alias);
}
void remove(String alias) {
aliases = ImmutableOpenMap.builder(aliases).fRemove(alias).build();
}
}

View File

@ -20,7 +20,6 @@
package org.elasticsearch.index.mapper.core;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType.NumericType;
import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.Terms;
@ -208,8 +207,37 @@ public class DateFieldMapper extends NumberFieldMapper {
@Override
public Query rewrite(IndexReader reader) throws IOException {
Query query = innerRangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, timeZone, forcedDateParser);
return query.rewrite(reader);
return innerRangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, timeZone, forcedDateParser);
}
// Even though we only cache rewritten queries it is good to let all queries implement hashCode() and equals():
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (!super.equals(o)) return false;
LateParsingQuery that = (LateParsingQuery) o;
if (includeLower != that.includeLower) return false;
if (includeUpper != that.includeUpper) return false;
if (lowerTerm != null ? !lowerTerm.equals(that.lowerTerm) : that.lowerTerm != null) return false;
if (upperTerm != null ? !upperTerm.equals(that.upperTerm) : that.upperTerm != null) return false;
if (timeZone != null ? !timeZone.equals(that.timeZone) : that.timeZone != null) return false;
return !(forcedDateParser != null ? !forcedDateParser.equals(that.forcedDateParser) : that.forcedDateParser != null);
}
@Override
public int hashCode() {
int result = super.hashCode();
result = 31 * result + (lowerTerm != null ? lowerTerm.hashCode() : 0);
result = 31 * result + (upperTerm != null ? upperTerm.hashCode() : 0);
result = 31 * result + (includeLower ? 1 : 0);
result = 31 * result + (includeUpper ? 1 : 0);
result = 31 * result + (timeZone != null ? timeZone.hashCode() : 0);
result = 31 * result + (forcedDateParser != null ? forcedDateParser.hashCode() : 0);
return result;
}
@Override
@ -384,12 +412,7 @@ public class DateFieldMapper extends NumberFieldMapper {
}
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable DateTimeZone timeZone, @Nullable DateMathParser forcedDateParser, @Nullable QueryParseContext context) {
// If the current search context is null we're parsing percolator query or a index alias filter.
if (SearchContext.current() == null) {
return new LateParsingQuery(lowerTerm, upperTerm, includeLower, includeUpper, timeZone, forcedDateParser);
} else {
return innerRangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, timeZone, forcedDateParser);
}
}
private Query innerRangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable DateTimeZone timeZone, @Nullable DateMathParser forcedDateParser) {

View File

@ -198,12 +198,6 @@ public class HasChildQueryParser implements QueryParser {
}
public static Query joinUtilHelper(String parentType, ParentChildIndexFieldData parentChildIndexFieldData, Query toQuery, ScoreType scoreType, Query innerQuery, int minChildren, int maxChildren) throws IOException {
SearchContext searchContext = SearchContext.current();
if (searchContext == null) {
throw new IllegalStateException("Search context is required to be set");
}
String joinField = ParentFieldMapper.joinField(parentType);
ScoreMode scoreMode;
// TODO: move entirely over from ScoreType to org.apache.lucene.join.ScoreMode, when we drop the 1.x parent child code.
switch (scoreType) {
@ -225,15 +219,73 @@ public class HasChildQueryParser implements QueryParser {
default:
throw new UnsupportedOperationException("score type [" + scoreType + "] not supported");
}
IndexReader indexReader = searchContext.searcher().getIndexReader();
IndexSearcher indexSearcher = new IndexSearcher(indexReader);
IndexParentChildFieldData indexParentChildFieldData = parentChildIndexFieldData.loadGlobal(indexReader);
MultiDocValues.OrdinalMap ordinalMap = ParentChildIndexFieldData.getOrdinalMap(indexParentChildFieldData, parentType);
// 0 in pre 2.x p/c impl means unbounded
if (maxChildren == 0) {
maxChildren = Integer.MAX_VALUE;
}
return new LateParsingQuery(toQuery, innerQuery, minChildren, maxChildren, parentType, scoreMode, parentChildIndexFieldData);
}
final static class LateParsingQuery extends Query {
private final Query toQuery;
private final Query innerQuery;
private final int minChildren;
private final int maxChildren;
private final String parentType;
private final ScoreMode scoreMode;
private final ParentChildIndexFieldData parentChildIndexFieldData;
private final Object identity = new Object();
LateParsingQuery(Query toQuery, Query innerQuery, int minChildren, int maxChildren, String parentType, ScoreMode scoreMode, ParentChildIndexFieldData parentChildIndexFieldData) {
this.toQuery = toQuery;
this.innerQuery = innerQuery;
this.minChildren = minChildren;
this.maxChildren = maxChildren;
this.parentType = parentType;
this.scoreMode = scoreMode;
this.parentChildIndexFieldData = parentChildIndexFieldData;
}
@Override
public Query rewrite(IndexReader reader) throws IOException {
SearchContext searchContext = SearchContext.current();
if (searchContext == null) {
throw new IllegalArgumentException("Search context is required to be set");
}
String joinField = ParentFieldMapper.joinField(parentType);
IndexReader indexReader = searchContext.searcher().getIndexReader();
IndexSearcher indexSearcher = new IndexSearcher(indexReader);
IndexParentChildFieldData indexParentChildFieldData = parentChildIndexFieldData.loadGlobal(indexReader);
MultiDocValues.OrdinalMap ordinalMap = ParentChildIndexFieldData.getOrdinalMap(indexParentChildFieldData, parentType);
return JoinUtil.createJoinQuery(joinField, innerQuery, toQuery, indexSearcher, scoreMode, ordinalMap, minChildren, maxChildren);
}
// Even though we only cache rewritten queries it is good to let all queries implement hashCode() and equals():
// We can't check for actually equality here, since we need to IndexReader for this, but
// that isn't available on all cases during query parse time, so instead rely on identity:
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (!super.equals(o)) return false;
LateParsingQuery that = (LateParsingQuery) o;
return identity.equals(that.identity);
}
@Override
public int hashCode() {
int result = super.hashCode();
result = 31 * result + identity.hashCode();
return result;
}
@Override
public String toString(String s) {
return "LateParsingQuery {parentType=" + parentType + "}";
}
}
}

View File

@ -388,4 +388,5 @@ public class QueryParseContext {
public Version indexVersionCreated() {
return indexVersionCreated;
}
}

View File

@ -20,11 +20,9 @@
package org.elasticsearch.indices.cluster;
import com.carrotsearch.hppc.IntHashSet;
import com.carrotsearch.hppc.ObjectContainer;
import com.carrotsearch.hppc.cursors.ObjectCursor;
import com.google.common.base.Predicate;
import com.google.common.collect.Lists;
import org.elasticsearch.cluster.ClusterChangedEvent;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState;
@ -32,7 +30,6 @@ import org.elasticsearch.cluster.ClusterStateListener;
import org.elasticsearch.cluster.action.index.NodeIndexDeletedAction;
import org.elasticsearch.cluster.action.index.NodeMappingRefreshAction;
import org.elasticsearch.cluster.action.shard.ShardStateAction;
import org.elasticsearch.cluster.metadata.AliasMetaData;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MappingMetaData;
import org.elasticsearch.cluster.node.DiscoveryNode;
@ -49,17 +46,12 @@ import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.IndexShardAlreadyExistsException;
import org.elasticsearch.index.IndexShardMissingException;
import org.elasticsearch.index.aliases.IndexAlias;
import org.elasticsearch.index.aliases.IndexAliasesService;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.shard.IndexShardRecoveryException;
import org.elasticsearch.index.shard.StoreRecoveryService;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.settings.IndexSettingsService;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.shard.IndexShardState;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.shard.*;
import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.indices.recovery.RecoveryFailedException;
import org.elasticsearch.indices.recovery.RecoveryState;
@ -67,13 +59,11 @@ import org.elasticsearch.indices.recovery.RecoveryStatus;
import org.elasticsearch.indices.recovery.RecoveryTarget;
import org.elasticsearch.threadpool.ThreadPool;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentMap;
import static com.google.common.collect.Maps.newHashMap;
import static org.elasticsearch.ExceptionsHelper.detailedMessage;
/**
@ -467,45 +457,10 @@ public class IndicesClusterStateService extends AbstractLifecycleComponent<Indic
continue;
}
IndexAliasesService indexAliasesService = indexService.aliasesService();
processAliases(index, indexMetaData.aliases().values(), indexAliasesService);
// go over and remove aliases
for (IndexAlias indexAlias : indexAliasesService) {
if (!indexMetaData.aliases().containsKey(indexAlias.alias())) {
// we have it in our aliases, but not in the metadata, remove it
indexAliasesService.remove(indexAlias.alias());
indexAliasesService.setAliases(indexMetaData.getAliases());
}
}
}
}
}
private void processAliases(String index, ObjectContainer<AliasMetaData> aliases, IndexAliasesService indexAliasesService) {
HashMap<String, IndexAlias> newAliases = newHashMap();
for (ObjectCursor<AliasMetaData> cursor : aliases) {
AliasMetaData aliasMd = cursor.value;
String alias = aliasMd.alias();
CompressedXContent filter = aliasMd.filter();
try {
if (!indexAliasesService.hasAlias(alias)) {
if (logger.isDebugEnabled()) {
logger.debug("[{}] adding alias [{}], filter [{}]", index, alias, filter);
}
newAliases.put(alias, indexAliasesService.create(alias, filter));
} else {
if ((filter == null && indexAliasesService.alias(alias).filter() != null) ||
(filter != null && !filter.equals(indexAliasesService.alias(alias).filter()))) {
if (logger.isDebugEnabled()) {
logger.debug("[{}] updating alias [{}], filter [{}]", index, alias, filter);
}
newAliases.put(alias, indexAliasesService.create(alias, filter));
}
}
} catch (Throwable e) {
logger.warn("[{}] failed to add alias [{}], filter [{}]", e, index, alias, filter);
}
}
indexAliasesService.addAll(newAliases);
}
private void applyNewOrUpdatedShards(final ClusterChangedEvent event) {
if (!indicesService.changesAllowed()) {

View File

@ -20,6 +20,7 @@
package org.elasticsearch.search;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ElasticsearchWrapperException;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
@ -28,7 +29,7 @@ import java.io.IOException;
/**
*
*/
public class SearchException extends ElasticsearchException {
public class SearchException extends ElasticsearchException implements ElasticsearchWrapperException {
private final SearchShardTarget shardTarget;

View File

@ -29,6 +29,7 @@ import org.apache.lucene.search.MultiCollector;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TimeLimitingCollector;
import org.apache.lucene.search.Weight;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.MinimumScoreCollector;
@ -129,7 +130,7 @@ public class ContextIndexSearcher extends IndexSearcher implements Releasable {
return in.createNormalizedWeight(query, needsScores);
} catch (Throwable t) {
searchContext.clearReleasables(Lifetime.COLLECTION);
throw new RuntimeException(t);
throw ExceptionsHelper.convertToElastic(t);
}
}

View File

@ -155,11 +155,7 @@ public class DefaultSearchContext extends SearchContext {
this.fetchResult = new FetchSearchResult(id, shardTarget);
this.indexShard = indexShard;
this.indexService = indexService;
this.searcher = new ContextIndexSearcher(this, engineSearcher);
// initialize the filtering alias based on the provided filters
aliasFilter = indexService.aliasesService().aliasFilter(request.filteringAliases());
this.timeEstimateCounter = timeEstimateCounter;
}
@ -184,6 +180,9 @@ public class DefaultSearchContext extends SearchContext {
}
}
// initialize the filtering alias based on the provided filters
aliasFilter = indexService.aliasesService().aliasFilter(request.filteringAliases());
if (query() == null) {
parsedQuery(ParsedQuery.parsedMatchAllQuery());
}

View File

@ -29,6 +29,7 @@ import org.elasticsearch.action.admin.indices.alias.get.GetAliasesResponse;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.AliasAction;
import org.elasticsearch.cluster.metadata.AliasMetaData;
@ -960,18 +961,19 @@ public class IndexAliasesTests extends ElasticsearchIntegrationTest {
.addMapping("parent")
.addMapping("child", "_parent", "type=parent")
);
try {
client().prepareIndex("my-index", "parent", "1").setSource("{}").get();
client().prepareIndex("my-index", "child", "2").setSource("{}").setParent("1").get();
refresh();
assertAcked(admin().indices().prepareAliases().addAlias("my-index", "filter1", hasChildQuery("child", matchAllQuery())));
} catch (IllegalArgumentException e) {
assertThat(e.getCause(), instanceOf(IllegalStateException.class));
assertThat(e.getCause().getMessage(), equalTo("Search context is required to be set"));
}
try {
assertAcked(admin().indices().prepareAliases().addAlias("my-index", "filter2", hasParentQuery("child", matchAllQuery())));
} catch (IllegalArgumentException e) {
assertThat(e.getCause(), instanceOf(IllegalStateException.class));
assertThat(e.getCause().getMessage(), equalTo("Search context is required to be set"));
}
assertAcked(admin().indices().prepareAliases().addAlias("my-index", "filter2", hasParentQuery("parent", matchAllQuery())));
SearchResponse response = client().prepareSearch("filter1").get();
assertHitCount(response, 1);
assertThat(response.getHits().getAt(0).id(), equalTo("1"));
response = client().prepareSearch("filter2").get();
assertHitCount(response, 1);
assertThat(response.getHits().getAt(0).id(), equalTo("2"));
}
@Test
@ -984,8 +986,18 @@ public class IndexAliasesTests extends ElasticsearchIntegrationTest {
.addMapping("parent")
.addMapping("child", "_parent", "type=parent")
);
client().prepareIndex("my-index", "parent", "1").setSource("{}").get();
client().prepareIndex("my-index", "child", "2").setSource("{}").setParent("1").get();
refresh();
assertAcked(admin().indices().prepareAliases().addAlias("my-index", "filter1", hasChildQuery("child", matchAllQuery())));
assertAcked(admin().indices().prepareAliases().addAlias("my-index", "filter2", hasParentQuery("child", matchAllQuery())));
assertAcked(admin().indices().prepareAliases().addAlias("my-index", "filter2", hasParentQuery("parent", matchAllQuery())));
SearchResponse response = client().prepareSearch("filter1").get();
assertHitCount(response, 1);
assertThat(response.getHits().getAt(0).id(), equalTo("1"));
response = client().prepareSearch("filter2").get();
assertHitCount(response, 1);
assertThat(response.getHits().getAt(0).id(), equalTo("2"));
}
@Test

View File

@ -21,12 +21,11 @@ package org.elasticsearch.index.aliases;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.indices.InvalidAliasNameException;
import org.elasticsearch.test.ElasticsearchSingleNodeTest;
import org.junit.Test;

View File

@ -241,7 +241,7 @@ public class SimpleDateMappingTests extends ElasticsearchSingleNodeTest {
NumericRangeQuery<Long> rangeQuery;
try {
SearchContext.setCurrent(new TestSearchContext());
rangeQuery = (NumericRangeQuery<Long>) defaultMapper.mappers().smartNameFieldMapper("date_field").fieldType().rangeQuery("10:00:00", "11:00:00", true, true, null);
rangeQuery = (NumericRangeQuery<Long>) defaultMapper.mappers().smartNameFieldMapper("date_field").fieldType().rangeQuery("10:00:00", "11:00:00", true, true, null).rewrite(null);
} finally {
SearchContext.removeCurrent();
}
@ -267,7 +267,7 @@ public class SimpleDateMappingTests extends ElasticsearchSingleNodeTest {
NumericRangeQuery<Long> rangeQuery;
try {
SearchContext.setCurrent(new TestSearchContext());
rangeQuery = (NumericRangeQuery<Long>) defaultMapper.mappers().smartNameFieldMapper("date_field").fieldType().rangeQuery("Jan 02 10:00:00", "Jan 02 11:00:00", true, true, null);
rangeQuery = (NumericRangeQuery<Long>) defaultMapper.mappers().smartNameFieldMapper("date_field").fieldType().rangeQuery("Jan 02 10:00:00", "Jan 02 11:00:00", true, true, null).rewrite(null);
} finally {
SearchContext.removeCurrent();
}

View File

@ -22,6 +22,7 @@ package org.elasticsearch.index.query;
import org.apache.lucene.search.NumericRangeQuery;
import org.apache.lucene.search.Query;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.inject.Injector;
@ -80,9 +81,10 @@ public class IndexQueryParserFilterDateRangeFormatTests extends ElasticsearchSin
query = copyToStringFromClasspath("/org/elasticsearch/index/query/date_range_filter_format_invalid.json");
try {
SearchContext.setCurrent(new TestSearchContext());
queryParser.parse(query).query();
// We need to rewrite, because range on date field initially returns LateParsingQuery
queryParser.parse(query).query().rewrite(null);
fail("A Range Filter with a specific format but with an unexpected date should raise a QueryParsingException");
} catch (QueryParsingException e) {
} catch (ElasticsearchParseException e) {
// We expect it
} finally {
SearchContext.removeCurrent();
@ -97,7 +99,8 @@ public class IndexQueryParserFilterDateRangeFormatTests extends ElasticsearchSin
Query parsedQuery;
try {
SearchContext.setCurrent(new TestSearchContext());
parsedQuery = queryParser.parse(query).query();
// We need to rewrite, because range on date field initially returns LateParsingQuery
parsedQuery = queryParser.parse(query).query().rewrite(null);
} finally {
SearchContext.removeCurrent();;
}
@ -115,9 +118,9 @@ public class IndexQueryParserFilterDateRangeFormatTests extends ElasticsearchSin
query = copyToStringFromClasspath("/org/elasticsearch/index/query/date_range_query_format_invalid.json");
try {
SearchContext.setCurrent(new TestSearchContext());
queryParser.parse(query).query();
queryParser.parse(query).query().rewrite(null);
fail("A Range Query with a specific format but with an unexpected date should raise a QueryParsingException");
} catch (QueryParsingException e) {
} catch (ElasticsearchParseException e) {
// We expect it
} finally {
SearchContext.removeCurrent();
@ -131,7 +134,8 @@ public class IndexQueryParserFilterDateRangeFormatTests extends ElasticsearchSin
Query parsedQuery;
try {
SearchContext.setCurrent(new TestSearchContext());
parsedQuery = queryParser.parse(query).query();
// We need to rewrite, because range on date field initially returns LateParsingQuery
parsedQuery = queryParser.parse(query).query().rewrite(null);
} finally {
SearchContext.removeCurrent();
}
@ -149,7 +153,8 @@ public class IndexQueryParserFilterDateRangeFormatTests extends ElasticsearchSin
query = copyToStringFromClasspath("/org/elasticsearch/index/query/date_range_query_boundaries_exclusive.json");
try {
SearchContext.setCurrent(new TestSearchContext());
parsedQuery = queryParser.parse(query).query();
// We need to rewrite, because range on date field initially returns LateParsingQuery
parsedQuery = queryParser.parse(query).query().rewrite(null);
} finally {
SearchContext.removeCurrent();
}

View File

@ -99,7 +99,7 @@ public class IndexQueryParserFilterDateRangeTimezoneTests extends ElasticsearchS
Query parsedQuery;
try {
SearchContext.setCurrent(new TestSearchContext());
parsedQuery = queryParser.parse(query).query();
parsedQuery = queryParser.parse(query).query().rewrite(null);
} finally {
SearchContext.removeCurrent();
}

View File

@ -2052,5 +2052,17 @@ public class PercolatorTests extends ElasticsearchIntegrationTest {
assertThat(e.getCause().getMessage(), containsString("inner_hits unsupported"));
}
}
@Test
public void testParentChild() throws Exception {
// We don't fail p/c queries, but those queries are unsuable because only one document can be provided in
// the percolate api
assertAcked(prepareCreate("index").addMapping("child", "_parent", "type=parent").addMapping("parent"));
client().prepareIndex("index", PercolatorService.TYPE_NAME, "1")
.setSource(jsonBuilder().startObject().field("query", hasChildQuery("child", matchAllQuery())).endObject())
.execute().actionGet();
}
}

View File

@ -133,7 +133,7 @@ public class SimpleValidateQueryTests extends ElasticsearchIntegrationTest {
refresh();
ValidateQueryResponse response = client().admin().indices().prepareValidateQuery()
.setQuery(queryStringQuery("past:[now-2M/d TO now/d]")).setExplain(true).get();
.setQuery(queryStringQuery("past:[now-2M/d TO now/d]")).setRewrite(true).get();
assertNoFailures(response);
assertThat(response.getQueryExplanation().size(), equalTo(1));

View File

@ -31,8 +31,6 @@ This means that a type can't become a parent type after is has been created.
The `parent.type` setting can't point to itself. This means self referential
parent/child isn't supported.
Parent/child queries (`has_child` & `has_parent`) can't be used in index aliases.
==== Global ordinals
Parent-child uses <<global-ordinals,global ordinals>> to speed up joins and global ordinals need to be rebuilt after any change to a shard.