Several MapperService cleanups:

* Cleaned up MapperService#searchFilter(...) and moved it DefaultSearchContext, since it that class was the only user. As part of the cleanup percolate query documents are no longer excluded from the search response.
* Removed resolveClosestNestedObjectMapper(...) method as it was no longer used.
* Removed DocumentTypeListener infrastructure. Before it was used by the percolator and parent/child, but these features no longer use it.

Closes #15924
This commit is contained in:
Martijn van Groningen 2016-01-20 21:06:06 +01:00
parent bd1eefa5be
commit 7914d0f940
7 changed files with 96 additions and 216 deletions

View File

@ -1,33 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
/**
*/
public interface DocumentTypeListener {
/**
* Invoked just before a new document type has been created.
*
* @param mapper The new document mapper of the type being added
*/
void beforeCreate(DocumentMapper mapper);
}

View File

@ -125,8 +125,6 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
private final MapperAnalyzerWrapper searchAnalyzer;
private final MapperAnalyzerWrapper searchQuoteAnalyzer;
private final List<DocumentTypeListener> typeListeners = new CopyOnWriteArrayList<>();
private volatile Map<String, MappedFieldType> unmappedFieldTypes = emptyMap();
private volatile Set<String> parentTypes = emptySet();
@ -212,14 +210,6 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
return this.documentParser;
}
public void addTypeListener(DocumentTypeListener listener) {
typeListeners.add(listener);
}
public void removeTypeListener(DocumentTypeListener listener) {
typeListeners.remove(listener);
}
public DocumentMapper merge(String type, CompressedXContent mappingSource, MergeReason reason, boolean updateAllTypes) {
if (DEFAULT_MAPPING.equals(type)) {
// verify we can parse it
@ -335,14 +325,6 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
this.fullPathObjectMappers = fullPathObjectMappers;
this.parentTypes = parentTypes;
// 5. send notifications about the change
if (oldMapper == null) {
// means the mapping was created
for (DocumentTypeListener typeListener : typeListeners) {
typeListener.beforeCreate(mapper);
}
}
assert assertSerialization(newMapper);
assert assertMappersShareSameFieldType();
@ -481,105 +463,6 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
return new DocumentMapperForType(mapper, mapper.mapping());
}
/**
* A filter for search. If a filter is required, will return it, otherwise, will return <tt>null</tt>.
*/
@Nullable
public Query searchFilter(String... types) {
boolean filterPercolateType = hasMapping(PercolatorService.TYPE_NAME);
if (types != null && filterPercolateType) {
for (String type : types) {
if (PercolatorService.TYPE_NAME.equals(type)) {
filterPercolateType = false;
break;
}
}
}
Query percolatorType = null;
if (filterPercolateType) {
percolatorType = documentMapper(PercolatorService.TYPE_NAME).typeFilter();
}
if (types == null || types.length == 0) {
if (hasNested && filterPercolateType) {
BooleanQuery.Builder bq = new BooleanQuery.Builder();
bq.add(percolatorType, Occur.MUST_NOT);
bq.add(Queries.newNonNestedFilter(), Occur.MUST);
return new ConstantScoreQuery(bq.build());
} else if (hasNested) {
return Queries.newNonNestedFilter();
} else if (filterPercolateType) {
return new ConstantScoreQuery(Queries.not(percolatorType));
} else {
return null;
}
}
// if we filter by types, we don't need to filter by non nested docs
// since they have different types (starting with __)
if (types.length == 1) {
DocumentMapper docMapper = documentMapper(types[0]);
Query filter = docMapper != null ? docMapper.typeFilter() : new TermQuery(new Term(TypeFieldMapper.NAME, types[0]));
if (filterPercolateType) {
BooleanQuery.Builder bq = new BooleanQuery.Builder();
bq.add(percolatorType, Occur.MUST_NOT);
bq.add(filter, Occur.MUST);
return new ConstantScoreQuery(bq.build());
} else {
return filter;
}
}
// see if we can use terms filter
boolean useTermsFilter = true;
for (String type : types) {
DocumentMapper docMapper = documentMapper(type);
if (docMapper == null) {
useTermsFilter = false;
break;
}
if (docMapper.typeMapper().fieldType().indexOptions() == IndexOptions.NONE) {
useTermsFilter = false;
break;
}
}
// We only use terms filter is there is a type filter, this means we don't need to check for hasNested here
if (useTermsFilter) {
BytesRef[] typesBytes = new BytesRef[types.length];
for (int i = 0; i < typesBytes.length; i++) {
typesBytes[i] = new BytesRef(types[i]);
}
TermsQuery termsFilter = new TermsQuery(TypeFieldMapper.NAME, typesBytes);
if (filterPercolateType) {
BooleanQuery.Builder bq = new BooleanQuery.Builder();
bq.add(percolatorType, Occur.MUST_NOT);
bq.add(termsFilter, Occur.MUST);
return new ConstantScoreQuery(bq.build());
} else {
return termsFilter;
}
} else {
BooleanQuery.Builder typesBool = new BooleanQuery.Builder();
for (String type : types) {
DocumentMapper docMapper = documentMapper(type);
if (docMapper == null) {
typesBool.add(new TermQuery(new Term(TypeFieldMapper.NAME, type)), BooleanClause.Occur.SHOULD);
} else {
typesBool.add(docMapper.typeFilter(), BooleanClause.Occur.SHOULD);
}
}
BooleanQuery.Builder bool = new BooleanQuery.Builder();
bool.add(typesBool.build(), Occur.MUST);
if (filterPercolateType) {
bool.add(percolatorType, BooleanClause.Occur.MUST_NOT);
}
if (hasNested) {
bool.add(Queries.newNonNestedFilter(), BooleanClause.Occur.MUST);
}
return new ConstantScoreQuery(bool.build());
}
}
/**
* Returns the {@link MappedFieldType} for the give fullName.
*
@ -642,33 +525,6 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
return this.searchQuoteAnalyzer;
}
/**
* Resolves the closest inherited {@link ObjectMapper} that is nested.
*/
public ObjectMapper resolveClosestNestedObjectMapper(String fieldName) {
int indexOf = fieldName.lastIndexOf('.');
if (indexOf == -1) {
return null;
} else {
do {
String objectPath = fieldName.substring(0, indexOf);
ObjectMapper objectMapper = fullPathObjectMappers.get(objectPath);
if (objectMapper == null) {
indexOf = objectPath.lastIndexOf('.');
continue;
}
if (objectMapper.nested().isNested()) {
return objectMapper;
}
indexOf = objectPath.lastIndexOf('.');
} while (indexOf != -1);
}
return null;
}
public Set<String> getParentTypes() {
return parentTypes;
}

View File

@ -19,6 +19,7 @@
package org.elasticsearch.search.internal;
import org.apache.lucene.queries.TermsQuery;
import org.apache.lucene.search.BooleanClause.Occur;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.BoostQuery;
@ -26,6 +27,7 @@ import org.apache.lucene.search.Collector;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.Sort;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.Counter;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.cache.recycler.PageCacheRecycler;
@ -45,6 +47,7 @@ import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.fielddata.IndexFieldDataService;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.query.AbstractQueryBuilder;
import org.elasticsearch.index.query.ParsedQuery;
@ -238,19 +241,34 @@ public class DefaultSearchContext extends SearchContext {
}
@Override
@Nullable
public Query searchFilter(String[] types) {
Query filter = mapperService().searchFilter(types);
if (filter == null && aliasFilter == null) {
return createSearchFilter(types, aliasFilter, mapperService().hasNested());
}
// extracted to static helper method to make writing unit tests easier:
static Query createSearchFilter(String[] types, Query aliasFilter, boolean hasNestedFields) {
Query typesFilter = null;
if (types != null && types.length >= 1) {
BytesRef[] typesBytes = new BytesRef[types.length];
for (int i = 0; i < typesBytes.length; i++) {
typesBytes[i] = new BytesRef(types[i]);
}
typesFilter = new TermsQuery(TypeFieldMapper.NAME, typesBytes);
} else if (hasNestedFields) {
typesFilter = Queries.newNonNestedFilter();
} else if (aliasFilter == null) {
return null;
}
BooleanQuery.Builder bq = new BooleanQuery.Builder();
if (filter != null) {
bq.add(filter, Occur.MUST);
if (typesFilter != null) {
bq.add(typesFilter, Occur.FILTER);
}
if (aliasFilter != null) {
bq.add(aliasFilter, Occur.MUST);
bq.add(aliasFilter, Occur.FILTER);
}
return new ConstantScoreQuery(bq.build());
return bq.build();
}
@Override

View File

@ -20,11 +20,13 @@
package org.elasticsearch.index.mapper;
import org.apache.lucene.index.Term;
import org.apache.lucene.queries.TermsQuery;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.lucene.search.Queries;
@ -40,6 +42,7 @@ import java.util.HashSet;
import java.util.concurrent.ExecutionException;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.nullValue;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasToString;
@ -103,7 +106,7 @@ public class MapperServiceTests extends ESSingleNodeTestCase {
fail();
} catch (Throwable t) {
if (t instanceof ExecutionException) {
t = ((ExecutionException) t).getCause();
t = t.getCause();
}
final Throwable throwable = ExceptionsHelper.unwrapCause(t);
if (throwable instanceof IllegalArgumentException) {
@ -120,7 +123,7 @@ public class MapperServiceTests extends ESSingleNodeTestCase {
fail();
} catch (Throwable t) {
if (t instanceof ExecutionException) {
t = ((ExecutionException) t).getCause();
t = t.getCause();
}
final Throwable throwable = ExceptionsHelper.unwrapCause(t);
if (throwable instanceof IllegalArgumentException) {
@ -132,21 +135,4 @@ public class MapperServiceTests extends ESSingleNodeTestCase {
assertFalse(indexService.mapperService().hasMapping(MapperService.DEFAULT_MAPPING));
}
public void testSearchFilter() {
IndexService indexService = createIndex("index1", client().admin().indices().prepareCreate("index1")
.addMapping("type1", "field1", "type=nested")
.addMapping("type2", new Object[0])
);
Query searchFilter = indexService.mapperService().searchFilter("type1", "type3");
Query expectedQuery = new BooleanQuery.Builder()
.add(new BooleanQuery.Builder()
.add(new ConstantScoreQuery(new TermQuery(new Term(TypeFieldMapper.NAME, "type1"))), BooleanClause.Occur.SHOULD)
.add(new TermQuery(new Term(TypeFieldMapper.NAME, "type3")), BooleanClause.Occur.SHOULD)
.build(), BooleanClause.Occur.MUST
)
.add(Queries.newNonNestedFilter(), BooleanClause.Occur.MUST)
.build();
assertThat(searchFilter, equalTo(new ConstantScoreQuery(expectedQuery)));
}
}

View File

@ -163,12 +163,6 @@ public class PercolatorIT extends ESIntegTestCase {
assertThat(response.getMatches(), arrayWithSize(1));
assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContaining("4"));
logger.info("--> Search dummy doc, percolate queries must not be included");
SearchResponse searchResponse = client().prepareSearch("test", "test").execute().actionGet();
assertThat(searchResponse.getHits().totalHits(), equalTo(1L));
assertThat(searchResponse.getHits().getAt(0).type(), equalTo("type"));
assertThat(searchResponse.getHits().getAt(0).id(), equalTo("1"));
logger.info("--> Percolate non existing doc");
try {
client().preparePercolate()
@ -657,14 +651,6 @@ public class PercolatorIT extends ESIntegTestCase {
assertMatchCount(response, 1l);
assertThat(response.getMatches(), arrayWithSize(1));
assertThat(convertFromTextArray(response.getMatches(), "test"), arrayContaining("4"));
logger.info("--> Search normals docs, percolate queries must not be included");
SearchResponse searchResponse = client().prepareSearch("test").execute().actionGet();
assertThat(searchResponse.getHits().totalHits(), equalTo(4L));
assertThat(searchResponse.getHits().getAt(0).type(), equalTo("type"));
assertThat(searchResponse.getHits().getAt(1).type(), equalTo("type"));
assertThat(searchResponse.getHits().getAt(2).type(), equalTo("type"));
assertThat(searchResponse.getHits().getAt(3).type(), equalTo("type"));
}
public void testPercolatingExistingDocs_routing() throws Exception {

View File

@ -0,0 +1,63 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.internal;
import org.apache.lucene.queries.TermsQuery;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
import org.elasticsearch.test.ESTestCase;
import static org.apache.lucene.search.BooleanClause.Occur.FILTER;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.nullValue;
public class DefaultSearchContextTests extends ESTestCase {
public void testCreateSearchFilter() {
Query searchFilter = DefaultSearchContext.createSearchFilter(new String[]{"type1", "type2"}, null, randomBoolean());
Query expectedQuery = new BooleanQuery.Builder()
.add(new TermsQuery(TypeFieldMapper.NAME, new BytesRef("type1"), new BytesRef("type2")), FILTER)
.build();
assertThat(searchFilter, equalTo(expectedQuery));
searchFilter = DefaultSearchContext.createSearchFilter(new String[]{"type1", "type2"}, new MatchAllDocsQuery(), randomBoolean());
expectedQuery = new BooleanQuery.Builder()
.add(new TermsQuery(TypeFieldMapper.NAME, new BytesRef("type1"), new BytesRef("type2")), FILTER)
.add(new MatchAllDocsQuery(), FILTER)
.build();
assertThat(searchFilter, equalTo(expectedQuery));
searchFilter = DefaultSearchContext.createSearchFilter(null, null, false);
assertThat(searchFilter, nullValue());
searchFilter = DefaultSearchContext.createSearchFilter(null, null, true);
expectedQuery = new BooleanQuery.Builder().add(Queries.newNonNestedFilter(), FILTER).build();
assertThat(searchFilter, equalTo(expectedQuery));
searchFilter = DefaultSearchContext.createSearchFilter(null, new MatchAllDocsQuery(), randomBoolean());
expectedQuery = new BooleanQuery.Builder().add(new MatchAllDocsQuery(), FILTER).build();
assertThat(searchFilter, equalTo(expectedQuery));
}
}

View File

@ -15,6 +15,7 @@ your application to Elasticsearch 3.0.
* <<breaking_30_non_loopback>>
* <<breaking_30_thread_pool>>
* <<breaking_30_allocation>>
* <<breaking_30_percolator>>
[[breaking_30_search_changes]]
=== Warmers
@ -625,6 +626,7 @@ in the case where shard copies can be found. Previously, a node not holding the
holding shard copies were satisfying the allocation deciders. Now, the shard will be assigned to a node having a shard copy,
even if none of the nodes holding a shard copy satisfy the allocation deciders.
[[breaking_30_percolator]]
=== Percolator
Adding percolator queries and modifications to existing percolator queries are no longer visible in immediately
@ -640,3 +642,5 @@ The percolate api can no longer accept documents that have fields that don't exi
When percolating an existing document then specifying a document in the source of the percolate request is not allowed
any more.
Percolator documents are no longer excluded from the search response.