Disabled parent/child queries in the delete by query api.

It wasn't properly implemented and could lead to a shard being failed and not able to recover.

Closes #5828 #5916
This commit is contained in:
Martijn van Groningen 2014-04-23 11:52:31 +07:00
parent 22cbdd930c
commit 17a5575757
10 changed files with 93 additions and 179 deletions

View File

@ -52,6 +52,8 @@ import org.elasticsearch.transport.TransportService;
*/
public class TransportShardDeleteByQueryAction extends TransportShardReplicationOperationAction<ShardDeleteByQueryRequest, ShardDeleteByQueryRequest, ShardDeleteByQueryResponse> {
public final static String DELETE_BY_QUERY_API = "delete_by_query";
private final ScriptService scriptService;
private final CacheRecycler cacheRecycler;
private final PageCacheRecycler pageCacheRecycler;
@ -116,7 +118,7 @@ public class TransportShardDeleteByQueryAction extends TransportShardReplication
IndexShard indexShard = indexService.shardSafe(shardRequest.shardId);
SearchContext.setCurrent(new DefaultSearchContext(0, new ShardSearchRequest().types(request.types()).nowInMillis(request.nowInMillis()), null,
indexShard.acquireSearcher("delete_by_query"), indexService, indexShard, scriptService, cacheRecycler,
indexShard.acquireSearcher(DELETE_BY_QUERY_API), indexService, indexShard, scriptService, cacheRecycler,
pageCacheRecycler, bigArrays));
try {
Engine.DeleteByQuery deleteByQuery = indexShard.prepareDeleteByQuery(request.source(), request.filteringAliases(), Engine.Operation.Origin.PRIMARY, request.types());
@ -138,7 +140,7 @@ public class TransportShardDeleteByQueryAction extends TransportShardReplication
IndexShard indexShard = indexService.shardSafe(shardRequest.shardId);
SearchContext.setCurrent(new DefaultSearchContext(0, new ShardSearchRequest().types(request.types()).nowInMillis(request.nowInMillis()), null,
indexShard.acquireSearcher("delete_by_query", IndexShard.Mode.WRITE), indexService, indexShard, scriptService,
indexShard.acquireSearcher(DELETE_BY_QUERY_API, IndexShard.Mode.WRITE), indexService, indexShard, scriptService,
cacheRecycler, pageCacheRecycler, bigArrays));
try {
Engine.DeleteByQuery deleteByQuery = indexShard.prepareDeleteByQuery(request.source(), request.filteringAliases(), Engine.Operation.Origin.REPLICA, request.types());

View File

@ -24,18 +24,18 @@ import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.search.XFilteredQuery;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.support.XContentStructure;
import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
import org.elasticsearch.index.query.support.XContentStructure;
import org.elasticsearch.index.search.child.ChildrenConstantScoreQuery;
import org.elasticsearch.index.search.child.CustomQueryWrappingFilter;
import org.elasticsearch.index.search.child.DeleteByQueryWrappingFilter;
import org.elasticsearch.index.search.nested.NonNestedDocsFilter;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
import static org.elasticsearch.index.query.QueryParserUtils.ensureNotDeleteByQuery;
/**
*
*/
@ -54,6 +54,7 @@ public class HasChildFilterParser implements FilterParser {
@Override
public Filter parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
ensureNotDeleteByQuery(NAME, parseContext);
XContentParser parser = parseContext.parser();
boolean queryFound = false;
@ -149,12 +150,7 @@ public class HasChildFilterParser implements FilterParser {
if (filterName != null) {
parseContext.addNamedFilter(filterName, new CustomQueryWrappingFilter(childrenConstantScoreQuery));
}
boolean deleteByQuery = "delete_by_query".equals(SearchContext.current().source());
if (deleteByQuery) {
return new DeleteByQueryWrappingFilter(childrenConstantScoreQuery);
} else {
return new CustomQueryWrappingFilter(childrenConstantScoreQuery);
}
return new CustomQueryWrappingFilter(childrenConstantScoreQuery);
}
}

View File

@ -23,19 +23,22 @@ import org.apache.lucene.search.Filter;
import org.apache.lucene.search.Query;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.search.XConstantScoreQuery;
import org.elasticsearch.common.lucene.search.XFilteredQuery;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.support.XContentStructure;
import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
import org.elasticsearch.index.search.child.*;
import org.elasticsearch.index.query.support.XContentStructure;
import org.elasticsearch.index.search.child.ChildrenConstantScoreQuery;
import org.elasticsearch.index.search.child.ChildrenQuery;
import org.elasticsearch.index.search.child.CustomQueryWrappingFilter;
import org.elasticsearch.index.search.child.ScoreType;
import org.elasticsearch.index.search.nested.NonNestedDocsFilter;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
import static org.elasticsearch.index.query.QueryParserUtils.ensureNotDeleteByQuery;
/**
*
*/
@ -54,6 +57,7 @@ public class HasChildQueryParser implements QueryParser {
@Override
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
ensureNotDeleteByQuery(NAME, parseContext);
XContentParser parser = parseContext.parser();
boolean queryFound = false;
@ -147,17 +151,13 @@ public class HasChildQueryParser implements QueryParser {
// wrap the query with type query
innerQuery = new XFilteredQuery(innerQuery, parseContext.cacheFilter(childDocMapper.typeFilter(), null));
boolean deleteByQuery = "delete_by_query".equals(SearchContext.current().source());
Query query;
Filter parentFilter = parseContext.cacheFilter(parentDocMapper.typeFilter(), null);
ParentChildIndexFieldData parentChildIndexFieldData = parseContext.fieldData().getForField(parentFieldMapper);
if (!deleteByQuery && scoreType != null) {
if (scoreType != null) {
query = new ChildrenQuery(parentChildIndexFieldData, parentType, childType, parentFilter, innerQuery, scoreType, shortCircuitParentDocSet, nonNestedDocsFilter);
} else {
query = new ChildrenConstantScoreQuery(parentChildIndexFieldData, innerQuery, parentType, childType, parentFilter, shortCircuitParentDocSet, nonNestedDocsFilter);
if (deleteByQuery) {
query = new XConstantScoreQuery(new DeleteByQueryWrappingFilter(query));
}
}
if (queryName != null) {
parseContext.addNamedFilter(queryName, new CustomQueryWrappingFilter(query));

View File

@ -27,19 +27,19 @@ import org.elasticsearch.common.lucene.search.NotFilter;
import org.elasticsearch.common.lucene.search.XBooleanFilter;
import org.elasticsearch.common.lucene.search.XFilteredQuery;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.support.XContentStructure;
import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
import org.elasticsearch.index.query.support.XContentStructure;
import org.elasticsearch.index.search.child.CustomQueryWrappingFilter;
import org.elasticsearch.index.search.child.DeleteByQueryWrappingFilter;
import org.elasticsearch.index.search.child.ParentConstantScoreQuery;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
import java.util.HashSet;
import java.util.Set;
import static org.elasticsearch.index.query.QueryParserUtils.ensureNotDeleteByQuery;
/**
*
*/
@ -58,6 +58,7 @@ public class HasParentFilterParser implements FilterParser {
@Override
public Filter parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
ensureNotDeleteByQuery(NAME, parseContext);
XContentParser parser = parseContext.parser();
boolean queryFound = false;
@ -165,13 +166,7 @@ public class HasParentFilterParser implements FilterParser {
if (filterName != null) {
parseContext.addNamedFilter(filterName, new CustomQueryWrappingFilter(parentConstantScoreQuery));
}
boolean deleteByQuery = "delete_by_query".equals(SearchContext.current().source());
if (deleteByQuery) {
return new DeleteByQueryWrappingFilter(parentConstantScoreQuery);
} else {
return new CustomQueryWrappingFilter(parentConstantScoreQuery);
}
return new CustomQueryWrappingFilter(parentConstantScoreQuery);
}
}

View File

@ -25,23 +25,22 @@ import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.search.NotFilter;
import org.elasticsearch.common.lucene.search.XBooleanFilter;
import org.elasticsearch.common.lucene.search.XConstantScoreQuery;
import org.elasticsearch.common.lucene.search.XFilteredQuery;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.support.XContentStructure;
import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
import org.elasticsearch.index.query.support.XContentStructure;
import org.elasticsearch.index.search.child.CustomQueryWrappingFilter;
import org.elasticsearch.index.search.child.DeleteByQueryWrappingFilter;
import org.elasticsearch.index.search.child.ParentConstantScoreQuery;
import org.elasticsearch.index.search.child.ParentQuery;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
import java.util.HashSet;
import java.util.Set;
import static org.elasticsearch.index.query.QueryParserUtils.ensureNotDeleteByQuery;
public class HasParentQueryParser implements QueryParser {
public static final String NAME = "has_parent";
@ -57,6 +56,7 @@ public class HasParentQueryParser implements QueryParser {
@Override
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
ensureNotDeleteByQuery(NAME, parseContext);
XContentParser parser = parseContext.parser();
boolean queryFound = false;
@ -165,15 +165,11 @@ public class HasParentQueryParser implements QueryParser {
}
Filter childrenFilter = parseContext.cacheFilter(new NotFilter(parentFilter), null);
boolean deleteByQuery = "delete_by_query".equals(SearchContext.current().source());
Query query;
if (!deleteByQuery && score) {
if (score) {
query = new ParentQuery(parentChildIndexFieldData, innerQuery, parentType, childrenFilter);
} else {
query = new ParentConstantScoreQuery(parentChildIndexFieldData, innerQuery, parentType, childrenFilter);
if (deleteByQuery) {
query = new XConstantScoreQuery(new DeleteByQueryWrappingFilter(query));
}
}
query.setBoost(boost);
if (queryName != null) {

View File

@ -0,0 +1,41 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.query;
import org.elasticsearch.action.deletebyquery.TransportShardDeleteByQueryAction;
import org.elasticsearch.search.internal.SearchContext;
/**
*/
public final class QueryParserUtils {
private QueryParserUtils() {
}
/**
* Ensures that the query parsing wasn't invoked via the delete by query api.
*/
public static void ensureNotDeleteByQuery(String name, QueryParseContext parseContext) {
if (TransportShardDeleteByQueryAction.DELETE_BY_QUERY_API.equals(SearchContext.current().source())) {
throw new QueryParsingException(parseContext.index(), "[" + name + "] unsupported in delete_by_query api");
}
}
}

View File

@ -24,18 +24,19 @@ import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.search.XFilteredQuery;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.support.XContentStructure;
import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
import org.elasticsearch.index.query.support.XContentStructure;
import org.elasticsearch.index.search.child.CustomQueryWrappingFilter;
import org.elasticsearch.index.search.child.ScoreType;
import org.elasticsearch.index.search.child.TopChildrenQuery;
import org.elasticsearch.index.search.nested.NonNestedDocsFilter;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
import static org.elasticsearch.index.query.QueryParserUtils.ensureNotDeleteByQuery;
/**
*
*/
@ -54,6 +55,7 @@ public class TopChildrenQueryParser implements QueryParser {
@Override
public Query parse(QueryParseContext parseContext) throws IOException, QueryParsingException {
ensureNotDeleteByQuery(NAME, parseContext);
XContentParser parser = parseContext.parser();
boolean queryFound = false;
@ -116,10 +118,6 @@ public class TopChildrenQueryParser implements QueryParser {
return null;
}
if ("delete_by_query".equals(SearchContext.current().source())) {
throw new QueryParsingException(parseContext.index(), "[top_children] unsupported in delete_by_query api");
}
DocumentMapper childDocMapper = parseContext.mapperService().documentMapper(childType);
if (childDocMapper == null) {
throw new QueryParsingException(parseContext.index(), "No mapping for for type [" + childType + "]");

View File

@ -1,126 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.search.child;
import org.apache.lucene.index.AtomicReaderContext;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.MultiReader;
import org.apache.lucene.search.*;
import org.apache.lucene.search.similarities.Similarity;
import org.apache.lucene.util.Bits;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
/**
* This filters just exist for wrapping parent child queries in the delete by query api.
* Don't use this filter for other purposes.
*
* @elasticsearch.internal
*/
public class DeleteByQueryWrappingFilter extends Filter {
private final Query query;
private IndexSearcher searcher;
private Weight weight;
/** Constructs a filter which only matches documents matching
* <code>query</code>.
*/
public DeleteByQueryWrappingFilter(Query query) {
if (query == null)
throw new NullPointerException("Query may not be null");
this.query = query;
}
/** returns the inner Query */
public final Query getQuery() {
return query;
}
@Override
public DocIdSet getDocIdSet(final AtomicReaderContext context, final Bits acceptDocs) throws IOException {
SearchContext searchContext = SearchContext.current();
if (weight == null) {
assert searcher == null;
searcher = searchContext.searcher();
IndexReader indexReader = SearchContext.current().searcher().getIndexReader();
IndexReader multiReader = null;
try {
if (!contains(indexReader, context)) {
multiReader = new MultiReader(new IndexReader[]{indexReader, context.reader()}, false);
Similarity similarity = searcher.getSimilarity();
searcher = new IndexSearcher(new MultiReader(indexReader, context.reader()));
searcher.setSimilarity(similarity);
}
weight = searcher.createNormalizedWeight(query);
} finally {
if (multiReader != null) {
multiReader.close();
}
}
} else {
IndexReader indexReader = searcher.getIndexReader();
if (!contains(indexReader, context)) {
try (IndexReader multiReader = new MultiReader(new IndexReader[]{indexReader, context.reader()}, false)) {
Similarity similarity = searcher.getSimilarity();
searcher = new IndexSearcher(multiReader);
searcher.setSimilarity(similarity);
weight = searcher.createNormalizedWeight(query);
}
}
}
return new DocIdSet() {
@Override
public DocIdSetIterator iterator() throws IOException {
return weight.scorer(context, acceptDocs);
}
@Override
public boolean isCacheable() { return false; }
};
}
@Override
public String toString() {
return "DeleteByQueryWrappingFilter(" + query + ")";
}
@Override
public boolean equals(Object o) {
if (!(o instanceof DeleteByQueryWrappingFilter))
return false;
return this.query.equals(((DeleteByQueryWrappingFilter)o).query);
}
@Override
public int hashCode() {
return query.hashCode() ^ 0x823D64CA;
}
static boolean contains(IndexReader indexReader, AtomicReaderContext context) {
for (AtomicReaderContext atomicReaderContext : indexReader.leaves()) {
if (context.reader().getCoreCacheKey().equals(atomicReaderContext.reader().getCoreCacheKey())) {
return true;
}
}
return false;
}
}

View File

@ -116,7 +116,7 @@ public class DeleteByQueryTests extends ElasticsearchIntegrationTest {
assertThat(response.getIndices().get("twitter").getFailedShards(), equalTo(twitter.numPrimaries));
assertThat(response.getIndices().get("twitter").getFailures().length, equalTo(twitter.numPrimaries));
for (ShardOperationFailedException failure : response.getIndices().get("twitter").getFailures()) {
assertThat(failure.reason(), containsString("[twitter] [has_child] No mapping for for type [type]"));
assertThat(failure.reason(), containsString("[twitter] [has_child] unsupported in delete_by_query api"));
assertThat(failure.status(), equalTo(RestStatus.BAD_REQUEST));
assertThat(failure.shardId(), greaterThan(-1));
}

View File

@ -24,6 +24,7 @@ import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse;
import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse;
import org.elasticsearch.action.count.CountResponse;
import org.elasticsearch.action.deletebyquery.DeleteByQueryResponse;
import org.elasticsearch.action.explain.ExplainResponse;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchPhaseExecutionException;
@ -1244,13 +1245,18 @@ public class SimpleChildQuerySearchTests extends ElasticsearchIntegrationTest {
.get();
assertHitCount(searchResponse, 2l);
client().prepareDeleteByQuery("test").setQuery(randomHasChild("child", "c_field", "blue")).get();
// Delete by query doesn't support p/c queries. If the delete by query has a different execution mode
// that doesn't rely on IW#deleteByQuery() then this test can be changed.
DeleteByQueryResponse deleteByQueryResponse = client().prepareDeleteByQuery("test").setQuery(randomHasChild("child", "c_field", "blue")).get();
assertThat(deleteByQueryResponse.getIndex("test").getSuccessfulShards(), equalTo(0));
assertThat(deleteByQueryResponse.getIndex("test").getFailedShards(), equalTo(getNumShards("test").numPrimaries));
assertThat(deleteByQueryResponse.getIndex("test").getFailures()[0].reason(), containsString("[has_child] unsupported in delete_by_query api"));
client().admin().indices().prepareRefresh("test").get();
searchResponse = client().prepareSearch("test")
.setQuery(randomHasChild("child", "c_field", "blue"))
.get();
assertHitCount(searchResponse, 0l);
assertHitCount(searchResponse, 3l);
}
@Test
@ -1286,13 +1292,16 @@ public class SimpleChildQuerySearchTests extends ElasticsearchIntegrationTest {
.get();
assertHitCount(searchResponse, 3l);
client().prepareDeleteByQuery("test").setQuery(randomHasChild("child", "c_field", "blue")).get();
DeleteByQueryResponse deleteByQueryResponse = client().prepareDeleteByQuery("test").setQuery(randomHasChild("child", "c_field", "blue")).get();
assertThat(deleteByQueryResponse.getIndex("test").getSuccessfulShards(), equalTo(0));
assertThat(deleteByQueryResponse.getIndex("test").getFailedShards(), equalTo(getNumShards("test").numPrimaries));
assertThat(deleteByQueryResponse.getIndex("test").getFailures()[0].reason(), containsString("[has_child] unsupported in delete_by_query api"));
client().admin().indices().prepareRefresh("test").get();
searchResponse = client().prepareSearch("test")
.setQuery(randomHasChild("child", "c_field", "blue"))
.get();
assertHitCount(searchResponse, 0l);
assertHitCount(searchResponse, 3l);
}
private QueryBuilder randomHasChild(String type, String field, String value) {
@ -1334,9 +1343,12 @@ public class SimpleChildQuerySearchTests extends ElasticsearchIntegrationTest {
.get();
assertHitCount(searchResponse, 2l);
client().prepareDeleteByQuery("test")
DeleteByQueryResponse deleteByQueryResponse = client().prepareDeleteByQuery("test")
.setQuery(randomHasParent("parent", "p_field", "p_value2"))
.get();
assertThat(deleteByQueryResponse.getIndex("test").getSuccessfulShards(), equalTo(0));
assertThat(deleteByQueryResponse.getIndex("test").getFailedShards(), equalTo(getNumShards("test").numPrimaries));
assertThat(deleteByQueryResponse.getIndex("test").getFailures()[0].reason(), containsString("[has_parent] unsupported in delete_by_query api"));
client().admin().indices().prepareRefresh("test").get();
client().admin().indices().prepareRefresh("test").get();
client().admin().indices().prepareRefresh("test").get();
@ -1344,7 +1356,7 @@ public class SimpleChildQuerySearchTests extends ElasticsearchIntegrationTest {
searchResponse = client().prepareSearch("test")
.setQuery(randomHasParent("parent", "p_field", "p_value2"))
.get();
assertHitCount(searchResponse, 0l);
assertHitCount(searchResponse, 2l);
}
private QueryBuilder randomHasParent(String type, String field, String value) {