Query: support negative queries, closes #44.
This commit is contained in:
parent
ba0972cde5
commit
e0c3bb5883
|
@ -111,6 +111,17 @@ public class DocumentFieldMappers implements Iterable<FieldMapper> {
|
|||
return fullNameFieldMappers.get(fullName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Tries to find first based on {@link #fullName(String)}, then by {@link #indexName(String)}.
|
||||
*/
|
||||
public FieldMappers smartName(String name) {
|
||||
FieldMappers fieldMappers = fullName(name);
|
||||
if (fieldMappers != null) {
|
||||
return fieldMappers;
|
||||
}
|
||||
return indexName(name);
|
||||
}
|
||||
|
||||
/**
|
||||
* A smart analyzer used for indexing that takes into account specific analyzers configured
|
||||
* per {@link FieldMapper}.
|
||||
|
|
|
@ -20,11 +20,12 @@
|
|||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.document.Fieldable;
|
||||
import org.elasticsearch.util.Nullable;
|
||||
import org.elasticsearch.util.concurrent.ThreadSafe;
|
||||
|
||||
/**
|
||||
* @author kimchy (Shay Banon)
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
@ThreadSafe
|
||||
public interface DocumentMapper {
|
||||
|
@ -73,12 +74,20 @@ public interface DocumentMapper {
|
|||
|
||||
/**
|
||||
* Parses the source into a parsed document.
|
||||
* <p/>
|
||||
*
|
||||
* <p>Validates that the source has the provided id and type. Note, most times
|
||||
* we will already have the id and the type even though they exist in the source as well.
|
||||
*/
|
||||
ParsedDocument parse(@Nullable String type, @Nullable String id, byte[] source) throws MapperParsingException;
|
||||
|
||||
/**
|
||||
* Parses the source into a parsed document.
|
||||
*
|
||||
* <p>Validates that the source has the provided id and type. Note, most times
|
||||
* we will already have the id and the type even though they exist in the source as well.
|
||||
*/
|
||||
ParsedDocument parse(@Nullable String type, @Nullable String id, byte[] source, @Nullable ParseListener listener) throws MapperParsingException;
|
||||
|
||||
/**
|
||||
* Parses the source into the parsed document.
|
||||
*/
|
||||
|
@ -120,4 +129,25 @@ public interface DocumentMapper {
|
|||
return this;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A listener to be called during the parse process.
|
||||
*/
|
||||
public static interface ParseListener<ParseContext> {
|
||||
|
||||
public static final ParseListener EMPTY = new ParseListenerAdapter();
|
||||
|
||||
/**
|
||||
* Called before a field is added to the document. Return <tt>true</tt> to include
|
||||
* it in the document.
|
||||
*/
|
||||
boolean beforeFieldAdded(FieldMapper fieldMapper, Fieldable fieldable, ParseContext parseContent);
|
||||
}
|
||||
|
||||
public static class ParseListenerAdapter implements ParseListener {
|
||||
|
||||
@Override public boolean beforeFieldAdded(FieldMapper fieldMapper, Fieldable fieldable, Object parseContext) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
package org.elasticsearch.index.mapper;
|
||||
|
||||
/**
|
||||
* @author kimchy (Shay Banon)
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public interface FieldMapperListener {
|
||||
|
||||
|
|
|
@ -249,11 +249,7 @@ public class MapperService extends AbstractIndexComponent implements Iterable<Do
|
|||
DocumentMapper possibleDocMapper = mappers.get(possibleType);
|
||||
if (possibleDocMapper != null) {
|
||||
String possibleName = smartName.substring(dotIndex + 1);
|
||||
FieldMappers mappers = possibleDocMapper.mappers().fullName(possibleName);
|
||||
if (mappers != null) {
|
||||
return mappers;
|
||||
}
|
||||
mappers = possibleDocMapper.mappers().indexName(possibleName);
|
||||
FieldMappers mappers = possibleDocMapper.mappers().smartName(possibleName);
|
||||
if (mappers != null) {
|
||||
return mappers;
|
||||
}
|
||||
|
@ -284,11 +280,7 @@ public class MapperService extends AbstractIndexComponent implements Iterable<Do
|
|||
DocumentMapper possibleDocMapper = mappers.get(possibleType);
|
||||
if (possibleDocMapper != null) {
|
||||
String possibleName = smartName.substring(dotIndex + 1);
|
||||
FieldMappers mappers = possibleDocMapper.mappers().fullName(possibleName);
|
||||
if (mappers != null) {
|
||||
return new SmartNameFieldMappers(mappers, possibleDocMapper);
|
||||
}
|
||||
mappers = possibleDocMapper.mappers().indexName(possibleName);
|
||||
FieldMappers mappers = possibleDocMapper.mappers().smartName(possibleName);
|
||||
if (mappers != null) {
|
||||
return new SmartNameFieldMappers(mappers, possibleDocMapper);
|
||||
}
|
||||
|
|
|
@ -251,7 +251,11 @@ public class JsonDocumentMapper implements DocumentMapper, ToJson {
|
|||
return parse(null, null, source);
|
||||
}
|
||||
|
||||
@Override public ParsedDocument parse(String type, String id, byte[] source) {
|
||||
@Override public ParsedDocument parse(@Nullable String type, @Nullable String id, byte[] source) throws MapperParsingException {
|
||||
return parse(type, id, source, ParseListener.EMPTY);
|
||||
}
|
||||
|
||||
@Override public ParsedDocument parse(String type, String id, byte[] source, ParseListener listener) {
|
||||
JsonParseContext jsonContext = cache.get();
|
||||
|
||||
if (type != null && !type.equals(this.type)) {
|
||||
|
@ -262,7 +266,7 @@ public class JsonDocumentMapper implements DocumentMapper, ToJson {
|
|||
JsonParser jp = null;
|
||||
try {
|
||||
jp = jsonFactory.createJsonParser(source);
|
||||
jsonContext.reset(jp, new Document(), type, source);
|
||||
jsonContext.reset(jp, new Document(), type, source, listener);
|
||||
|
||||
// will result in JsonToken.START_OBJECT
|
||||
JsonToken token = jp.nextToken();
|
||||
|
|
|
@ -270,7 +270,9 @@ public abstract class JsonFieldMapper<T> implements FieldMapper<T>, JsonMapper {
|
|||
field.setOmitNorms(omitNorms);
|
||||
field.setOmitTermFreqAndPositions(omitTermFreqAndPositions);
|
||||
field.setBoost(boost);
|
||||
jsonContext.doc().add(field);
|
||||
if (jsonContext.listener().beforeFieldAdded(this, field, jsonContext)) {
|
||||
jsonContext.doc().add(field);
|
||||
}
|
||||
}
|
||||
|
||||
protected abstract Field parseCreateField(JsonParseContext jsonContext) throws IOException;
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.index.mapper.json;
|
|||
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.codehaus.jackson.JsonParser;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.util.concurrent.NotThreadSafe;
|
||||
|
||||
/**
|
||||
|
@ -43,6 +44,8 @@ public class JsonParseContext {
|
|||
|
||||
private String id;
|
||||
|
||||
private DocumentMapper.ParseListener listener;
|
||||
|
||||
private String uid;
|
||||
|
||||
private StringBuilder stringBuilder = new StringBuilder();
|
||||
|
@ -56,7 +59,7 @@ public class JsonParseContext {
|
|||
this.path = path;
|
||||
}
|
||||
|
||||
public void reset(JsonParser jsonParser, Document document, String type, byte[] source) {
|
||||
public void reset(JsonParser jsonParser, Document document, String type, byte[] source, DocumentMapper.ParseListener listener) {
|
||||
this.jsonParser = jsonParser;
|
||||
this.document = document;
|
||||
this.type = type;
|
||||
|
@ -64,6 +67,7 @@ public class JsonParseContext {
|
|||
this.path.reset();
|
||||
this.parsedIdState = ParsedIdState.NO;
|
||||
this.mappersAdded = false;
|
||||
this.listener = listener;
|
||||
}
|
||||
|
||||
public boolean mappersAdded() {
|
||||
|
@ -90,6 +94,10 @@ public class JsonParseContext {
|
|||
return this.jsonParser;
|
||||
}
|
||||
|
||||
public DocumentMapper.ParseListener listener() {
|
||||
return this.listener;
|
||||
}
|
||||
|
||||
public Document doc() {
|
||||
return this.document;
|
||||
}
|
||||
|
|
|
@ -35,6 +35,7 @@ import java.io.IOException;
|
|||
import java.util.List;
|
||||
|
||||
import static com.google.common.collect.Lists.*;
|
||||
import static org.elasticsearch.index.query.support.QueryParsers.*;
|
||||
|
||||
/**
|
||||
* @author kimchy (Shay Banon)
|
||||
|
@ -110,6 +111,6 @@ public class BoolJsonQueryParser extends AbstractIndexComponent implements JsonQ
|
|||
if (minimumNumberShouldMatch != -1) {
|
||||
query.setMinimumNumberShouldMatch(minimumNumberShouldMatch);
|
||||
}
|
||||
return query;
|
||||
return fixNegativeQueryIfNeeded(query);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -116,11 +116,11 @@ public abstract class JsonQueryBuilders {
|
|||
return new ConstantScoreQueryJsonQueryBuilder(filterBuilder);
|
||||
}
|
||||
|
||||
public static MoreLikeThisJsonQueryBuilder moreLikeThis(String... fields) {
|
||||
public static MoreLikeThisJsonQueryBuilder moreLikeThisQuery(String... fields) {
|
||||
return new MoreLikeThisJsonQueryBuilder(fields);
|
||||
}
|
||||
|
||||
public static MoreLikeThisFieldJsonQueryBuilder moreLikeThisField(String name) {
|
||||
public static MoreLikeThisFieldJsonQueryBuilder moreLikeThisFieldQuery(String name) {
|
||||
return new MoreLikeThisFieldJsonQueryBuilder(name);
|
||||
}
|
||||
|
||||
|
|
|
@ -37,6 +37,8 @@ import org.elasticsearch.util.settings.Settings;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.index.query.support.QueryParsers.*;
|
||||
|
||||
/**
|
||||
* @author kimchy (Shay Banon)
|
||||
*/
|
||||
|
@ -154,7 +156,7 @@ public class QueryStringJsonQueryParser extends AbstractIndexComponent implement
|
|||
try {
|
||||
Query query = queryParser.parse(queryString);
|
||||
query.setBoost(boost);
|
||||
return query;
|
||||
return fixNegativeQueryIfNeeded(query);
|
||||
} catch (ParseException e) {
|
||||
throw new QueryParsingException(index, "Failed to parse query [" + queryString + "]", e);
|
||||
}
|
||||
|
|
|
@ -26,6 +26,8 @@ import org.elasticsearch.index.mapper.MapperService;
|
|||
import org.elasticsearch.util.Nullable;
|
||||
import org.elasticsearch.util.lucene.search.TermFilter;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* @author kimchy (Shay Banon)
|
||||
*/
|
||||
|
@ -35,6 +37,29 @@ public final class QueryParsers {
|
|||
|
||||
}
|
||||
|
||||
public static boolean isNegativeQuery(Query q) {
|
||||
if (!(q instanceof BooleanQuery)) {
|
||||
return false;
|
||||
}
|
||||
List<BooleanClause> clauses = ((BooleanQuery) q).clauses();
|
||||
if (clauses.isEmpty()) {
|
||||
return false;
|
||||
}
|
||||
for (BooleanClause clause : clauses) {
|
||||
if (!clause.isProhibited()) return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
public static Query fixNegativeQueryIfNeeded(Query q) {
|
||||
if (isNegativeQuery(q)) {
|
||||
BooleanQuery newBq = (BooleanQuery) q.clone();
|
||||
newBq.add(new MatchAllDocsQuery(), BooleanClause.Occur.MUST);
|
||||
return newBq;
|
||||
}
|
||||
return q;
|
||||
}
|
||||
|
||||
public static Query wrapSmartNameQuery(Query query, @Nullable MapperService.SmartNameFieldMappers smartFieldMappers,
|
||||
@Nullable FilterCache filterCache) {
|
||||
if (smartFieldMappers == null) {
|
||||
|
|
|
@ -603,7 +603,7 @@ public class SimpleJsonIndexQueryParserTests {
|
|||
|
||||
@Test public void testMoreLikeThisBuilder() throws Exception {
|
||||
IndexQueryParser queryParser = newQueryParser();
|
||||
Query parsedQuery = queryParser.parse(moreLikeThis("name.first", "name.last").likeText("something").minTermFrequency(1).maxQueryTerms(12));
|
||||
Query parsedQuery = queryParser.parse(moreLikeThisQuery("name.first", "name.last").likeText("something").minTermFrequency(1).maxQueryTerms(12));
|
||||
assertThat(parsedQuery, instanceOf(MoreLikeThisQuery.class));
|
||||
MoreLikeThisQuery mltQuery = (MoreLikeThisQuery) parsedQuery;
|
||||
assertThat(mltQuery.getMoreLikeFields()[0], equalTo("name.first"));
|
||||
|
@ -627,7 +627,7 @@ public class SimpleJsonIndexQueryParserTests {
|
|||
|
||||
@Test public void testMoreLikeThisFieldBuilder() throws Exception {
|
||||
IndexQueryParser queryParser = newQueryParser();
|
||||
Query parsedQuery = queryParser.parse(moreLikeThisField("name.first").likeText("something").minTermFrequency(1).maxQueryTerms(12));
|
||||
Query parsedQuery = queryParser.parse(moreLikeThisFieldQuery("name.first").likeText("something").minTermFrequency(1).maxQueryTerms(12));
|
||||
assertThat(parsedQuery, instanceOf(MoreLikeThisQuery.class));
|
||||
MoreLikeThisQuery mltQuery = (MoreLikeThisQuery) parsedQuery;
|
||||
assertThat(mltQuery.getMoreLikeFields()[0], equalTo("name.first"));
|
||||
|
|
Loading…
Reference in New Issue