mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-02-17 02:14:54 +00:00
_all field, closes #63.
This commit is contained in:
parent
1d2d4674cf
commit
1dd5997889
@ -20,20 +20,19 @@
|
||||
package org.elasticsearch.action.terms;
|
||||
|
||||
import org.elasticsearch.ElasticSearchIllegalArgumentException;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.support.broadcast.BroadcastOperationRequest;
|
||||
import org.elasticsearch.util.Required;
|
||||
import org.elasticsearch.index.mapper.AllFieldMapper;
|
||||
|
||||
import java.io.DataInput;
|
||||
import java.io.DataOutput;
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.action.Actions.*;
|
||||
|
||||
/**
|
||||
* Terms request represent a request to get terms in one or more indices of specific fields and their
|
||||
* document frequencies (in how many document each term exists).
|
||||
*
|
||||
* <p>By default, the "_all" field will be used to extract terms and frequencies.
|
||||
*
|
||||
* <p>This is very handy to implement things like tag clouds and auto complete (using {@link #prefix(String)} or
|
||||
* {@link #regexp(String)}).
|
||||
*
|
||||
@ -103,7 +102,9 @@ public class TermsRequest extends BroadcastOperationRequest {
|
||||
}
|
||||
}
|
||||
|
||||
private String[] fields;
|
||||
private static final String[] DEFAULT_FIELDS = new String[]{AllFieldMapper.NAME};
|
||||
|
||||
private String[] fields = DEFAULT_FIELDS;
|
||||
|
||||
private String from;
|
||||
|
||||
@ -134,20 +135,12 @@ public class TermsRequest extends BroadcastOperationRequest {
|
||||
|
||||
/**
|
||||
* Constructs a new terms requests with the provided indices. Don't pass anything for it to run
|
||||
* over all the indices. Note, the {@link #fields(String...)} is required.
|
||||
* over all the indices.
|
||||
*/
|
||||
public TermsRequest(String... indices) {
|
||||
super(indices, null);
|
||||
}
|
||||
|
||||
@Override public ActionRequestValidationException validate() {
|
||||
ActionRequestValidationException validationException = super.validate();
|
||||
if (fields == null || fields.length == 0) {
|
||||
validationException = addValidationError("fields is missing", validationException);
|
||||
}
|
||||
return validationException;
|
||||
}
|
||||
|
||||
/**
|
||||
* The fields within each document which terms will be iterated over and returned with the
|
||||
* document frequencies.
|
||||
@ -158,9 +151,9 @@ public class TermsRequest extends BroadcastOperationRequest {
|
||||
|
||||
/**
|
||||
* The fields within each document which terms will be iterated over and returned with the
|
||||
* document frequencies.
|
||||
* document frequencies. By default will use the "_all" field.
|
||||
*/
|
||||
@Required public TermsRequest fields(String... fields) {
|
||||
public TermsRequest fields(String... fields) {
|
||||
this.fields = fields;
|
||||
return this;
|
||||
}
|
||||
|
@ -0,0 +1,30 @@
|
||||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.mapper;
|
||||
|
||||
import org.apache.lucene.util.StringHelper;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public interface AllFieldMapper extends FieldMapper<Void>, InternalMapper {
|
||||
|
||||
public static final String NAME = StringHelper.intern("_all");
|
||||
}
|
@ -53,6 +53,8 @@ public interface DocumentMapper {
|
||||
|
||||
BoostFieldMapper boostMapper();
|
||||
|
||||
AllFieldMapper allFieldMapper();
|
||||
|
||||
DocumentFieldMappers mappers();
|
||||
|
||||
/**
|
||||
|
@ -0,0 +1,154 @@
|
||||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.mapper.json;
|
||||
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.Fieldable;
|
||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
import org.elasticsearch.index.mapper.AllFieldMapper;
|
||||
import org.elasticsearch.index.mapper.MergeMappingException;
|
||||
import org.elasticsearch.util.json.JsonBuilder;
|
||||
import org.elasticsearch.util.lucene.Lucene;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.util.lucene.all.AllTokenFilter.*;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class JsonAllFieldMapper extends JsonFieldMapper<Void> implements AllFieldMapper {
|
||||
|
||||
public static final String JSON_TYPE = "allField";
|
||||
|
||||
public static class Defaults extends JsonFieldMapper.Defaults {
|
||||
public static final String NAME = AllFieldMapper.NAME;
|
||||
public static final String INDEX_NAME = AllFieldMapper.NAME;
|
||||
public static final boolean ENABLED = true;
|
||||
}
|
||||
|
||||
|
||||
public static class Builder extends JsonFieldMapper.Builder<Builder, JsonAllFieldMapper> {
|
||||
|
||||
private boolean enabled = Defaults.ENABLED;
|
||||
|
||||
public Builder() {
|
||||
super(Defaults.NAME);
|
||||
builder = this;
|
||||
indexName = Defaults.INDEX_NAME;
|
||||
}
|
||||
|
||||
public Builder enabled(boolean enabled) {
|
||||
this.enabled = enabled;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override public Builder termVector(Field.TermVector termVector) {
|
||||
return super.termVector(termVector);
|
||||
}
|
||||
|
||||
@Override protected Builder indexAnalyzer(NamedAnalyzer indexAnalyzer) {
|
||||
return super.indexAnalyzer(indexAnalyzer);
|
||||
}
|
||||
|
||||
@Override protected Builder searchAnalyzer(NamedAnalyzer searchAnalyzer) {
|
||||
return super.searchAnalyzer(searchAnalyzer);
|
||||
}
|
||||
|
||||
@Override public JsonAllFieldMapper build(BuilderContext context) {
|
||||
return new JsonAllFieldMapper(name, termVector, omitNorms, omitTermFreqAndPositions,
|
||||
indexAnalyzer, searchAnalyzer, enabled);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private boolean enabled;
|
||||
|
||||
public JsonAllFieldMapper() {
|
||||
this(Defaults.NAME, Defaults.TERM_VECTOR, Defaults.OMIT_NORMS, Defaults.OMIT_TERM_FREQ_AND_POSITIONS, null, null, Defaults.ENABLED);
|
||||
}
|
||||
|
||||
protected JsonAllFieldMapper(String name, Field.TermVector termVector, boolean omitNorms, boolean omitTermFreqAndPositions,
|
||||
NamedAnalyzer indexAnalyzer, NamedAnalyzer searchAnalyzer, boolean enabled) {
|
||||
super(new Names(name, name, name, name), Field.Index.ANALYZED, Field.Store.NO, termVector, 1.0f, omitNorms, omitTermFreqAndPositions,
|
||||
indexAnalyzer, searchAnalyzer);
|
||||
this.enabled = enabled;
|
||||
}
|
||||
|
||||
public boolean enabled() {
|
||||
return this.enabled;
|
||||
}
|
||||
|
||||
@Override protected Field parseCreateField(JsonParseContext jsonContext) throws IOException {
|
||||
if (!enabled) {
|
||||
return null;
|
||||
}
|
||||
Analyzer analyzer = indexAnalyzer();
|
||||
if (analyzer == null) {
|
||||
analyzer = jsonContext.docMapper().indexAnalyzer();
|
||||
if (analyzer == null) {
|
||||
analyzer = Lucene.STANDARD_ANALYZER;
|
||||
}
|
||||
}
|
||||
return new Field(names.indexName(), allTokenStream(names.indexName(), jsonContext.allEntries().finishTexts(), analyzer), termVector);
|
||||
}
|
||||
|
||||
@Override public Void value(Fieldable field) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override public String valueAsString(Fieldable field) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override public Object valueForSearch(Fieldable field) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override public String indexedValue(String value) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override public String indexedValue(Void value) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override protected String jsonType() {
|
||||
return JSON_TYPE;
|
||||
}
|
||||
|
||||
@Override public void toJson(JsonBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(JSON_TYPE);
|
||||
builder.field("enabled", enabled);
|
||||
builder.field("termVector", termVector.name().toLowerCase());
|
||||
if (indexAnalyzer != null && !indexAnalyzer.name().startsWith("_")) {
|
||||
builder.field("indexAnalyzer", indexAnalyzer.name());
|
||||
}
|
||||
if (searchAnalyzer != null && !searchAnalyzer.name().startsWith("_")) {
|
||||
builder.field("searchAnalyzer", searchAnalyzer.name());
|
||||
}
|
||||
builder.endObject();
|
||||
}
|
||||
|
||||
@Override public void merge(JsonMapper mergeWith, JsonMergeContext mergeContext) throws MergeMappingException {
|
||||
// do nothing here, no merging, but also no exception
|
||||
}
|
||||
}
|
@ -34,7 +34,7 @@ import org.elasticsearch.util.json.JsonBuilder;
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* @author kimchy (Shay Banon)
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class JsonDateFieldMapper extends JsonNumberFieldMapper<Long> {
|
||||
|
||||
@ -68,8 +68,10 @@ public class JsonDateFieldMapper extends JsonNumberFieldMapper<Long> {
|
||||
}
|
||||
|
||||
@Override public JsonDateFieldMapper build(BuilderContext context) {
|
||||
return new JsonDateFieldMapper(buildNames(context), dateTimeFormatter,
|
||||
JsonDateFieldMapper fieldMapper = new JsonDateFieldMapper(buildNames(context), dateTimeFormatter,
|
||||
precisionStep, index, store, boost, omitNorms, omitTermFreqAndPositions, nullValue);
|
||||
fieldMapper.includeInAll(includeInAll);
|
||||
return fieldMapper;
|
||||
}
|
||||
}
|
||||
|
||||
@ -137,6 +139,9 @@ public class JsonDateFieldMapper extends JsonNumberFieldMapper<Long> {
|
||||
if (dateAsString == null) {
|
||||
return null;
|
||||
}
|
||||
if (includeInAll == null || includeInAll) {
|
||||
jsonContext.allEntries().addText(names.fullName(), dateAsString, boost);
|
||||
}
|
||||
long value = dateTimeFormatter.parser().parseMillis(dateAsString);
|
||||
Field field = null;
|
||||
if (stored()) {
|
||||
@ -164,5 +169,8 @@ public class JsonDateFieldMapper extends JsonNumberFieldMapper<Long> {
|
||||
if (nullValue != null) {
|
||||
builder.field("nullValue", nullValue);
|
||||
}
|
||||
if (includeInAll != null) {
|
||||
builder.field("includeInAll", includeInAll);
|
||||
}
|
||||
}
|
||||
}
|
@ -40,7 +40,7 @@ import static com.google.common.collect.Lists.*;
|
||||
import static org.elasticsearch.util.json.JsonBuilder.*;
|
||||
|
||||
/**
|
||||
* @author kimchy (Shay Banon)
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class JsonDocumentMapper implements DocumentMapper, ToJson {
|
||||
|
||||
@ -56,6 +56,8 @@ public class JsonDocumentMapper implements DocumentMapper, ToJson {
|
||||
|
||||
private JsonBoostFieldMapper boostFieldMapper = new JsonBoostFieldMapper();
|
||||
|
||||
private JsonAllFieldMapper allFieldMapper = new JsonAllFieldMapper();
|
||||
|
||||
private Analyzer indexAnalyzer;
|
||||
|
||||
private Analyzer searchAnalyzer;
|
||||
@ -95,6 +97,11 @@ public class JsonDocumentMapper implements DocumentMapper, ToJson {
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder allField(JsonAllFieldMapper.Builder builder) {
|
||||
this.allFieldMapper = builder.build(builderContext);
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder mappingSource(String mappingSource) {
|
||||
this.mappingSource = mappingSource;
|
||||
return this;
|
||||
@ -121,7 +128,7 @@ public class JsonDocumentMapper implements DocumentMapper, ToJson {
|
||||
public JsonDocumentMapper build() {
|
||||
Preconditions.checkNotNull(rootObjectMapper, "Json mapper builder must have the root object mapper set");
|
||||
return new JsonDocumentMapper(rootObjectMapper, uidFieldMapper, idFieldMapper, typeFieldMapper,
|
||||
sourceFieldMapper, indexAnalyzer, searchAnalyzer, boostFieldMapper, mappingSource);
|
||||
sourceFieldMapper, allFieldMapper, indexAnalyzer, searchAnalyzer, boostFieldMapper, mappingSource);
|
||||
}
|
||||
}
|
||||
|
||||
@ -148,6 +155,8 @@ public class JsonDocumentMapper implements DocumentMapper, ToJson {
|
||||
|
||||
private final JsonBoostFieldMapper boostFieldMapper;
|
||||
|
||||
private final JsonAllFieldMapper allFieldMapper;
|
||||
|
||||
private final JsonObjectMapper rootObjectMapper;
|
||||
|
||||
private final Analyzer indexAnalyzer;
|
||||
@ -165,6 +174,7 @@ public class JsonDocumentMapper implements DocumentMapper, ToJson {
|
||||
JsonIdFieldMapper idFieldMapper,
|
||||
JsonTypeFieldMapper typeFieldMapper,
|
||||
JsonSourceFieldMapper sourceFieldMapper,
|
||||
JsonAllFieldMapper allFieldMapper,
|
||||
Analyzer indexAnalyzer, Analyzer searchAnalyzer,
|
||||
@Nullable JsonBoostFieldMapper boostFieldMapper,
|
||||
@Nullable String mappingSource) {
|
||||
@ -175,11 +185,17 @@ public class JsonDocumentMapper implements DocumentMapper, ToJson {
|
||||
this.idFieldMapper = idFieldMapper;
|
||||
this.typeFieldMapper = typeFieldMapper;
|
||||
this.sourceFieldMapper = sourceFieldMapper;
|
||||
this.allFieldMapper = allFieldMapper;
|
||||
this.boostFieldMapper = boostFieldMapper;
|
||||
|
||||
this.indexAnalyzer = indexAnalyzer;
|
||||
this.searchAnalyzer = searchAnalyzer;
|
||||
|
||||
// if we are not enabling all, set it to false on the root object, (and on all the rest...)
|
||||
if (!allFieldMapper.enabled()) {
|
||||
this.rootObjectMapper.includeInAll(allFieldMapper.enabled());
|
||||
}
|
||||
|
||||
rootObjectMapper.putMapper(idFieldMapper);
|
||||
if (boostFieldMapper != null) {
|
||||
rootObjectMapper.putMapper(boostFieldMapper);
|
||||
@ -235,6 +251,10 @@ public class JsonDocumentMapper implements DocumentMapper, ToJson {
|
||||
return this.boostFieldMapper;
|
||||
}
|
||||
|
||||
@Override public AllFieldMapper allFieldMapper() {
|
||||
return this.allFieldMapper;
|
||||
}
|
||||
|
||||
@Override public Analyzer indexAnalyzer() {
|
||||
return this.indexAnalyzer;
|
||||
}
|
||||
@ -311,6 +331,7 @@ public class JsonDocumentMapper implements DocumentMapper, ToJson {
|
||||
jsonContext.parsedId(JsonParseContext.ParsedIdState.EXTERNAL);
|
||||
idFieldMapper.parse(jsonContext);
|
||||
}
|
||||
allFieldMapper.parse(jsonContext);
|
||||
} catch (IOException e) {
|
||||
throw new MapperParsingException("Failed to parse", e);
|
||||
} finally {
|
||||
@ -342,6 +363,7 @@ public class JsonDocumentMapper implements DocumentMapper, ToJson {
|
||||
fieldMapperListener.fieldMapper(typeFieldMapper);
|
||||
fieldMapperListener.fieldMapper(idFieldMapper);
|
||||
fieldMapperListener.fieldMapper(uidFieldMapper);
|
||||
fieldMapperListener.fieldMapper(allFieldMapper);
|
||||
rootObjectMapper.traverse(fieldMapperListener);
|
||||
}
|
||||
}
|
||||
|
@ -103,6 +103,8 @@ public class JsonDocumentMapperParser implements DocumentMapperParser {
|
||||
docBuilder.uidField(parseUidField((ObjectNode) fieldNode));
|
||||
} else if (JsonBoostFieldMapper.JSON_TYPE.equals(fieldName)) {
|
||||
docBuilder.boostField(parseBoostField((ObjectNode) fieldNode));
|
||||
} else if (JsonAllFieldMapper.JSON_TYPE.equals(fieldName)) {
|
||||
docBuilder.allField(parseAllField((ObjectNode) fieldNode));
|
||||
} else if ("indexAnalyzer".equals(fieldName)) {
|
||||
docBuilder.indexAnalyzer(analysisService.analyzer(fieldNode.getTextValue()));
|
||||
} else if ("searchAnalyzer".equals(fieldName)) {
|
||||
@ -173,6 +175,21 @@ public class JsonDocumentMapperParser implements DocumentMapperParser {
|
||||
return builder;
|
||||
}
|
||||
|
||||
private JsonAllFieldMapper.Builder parseAllField(ObjectNode allNode) {
|
||||
// String name = idNode.get("name") == null ? JsonIdFieldMapper.Defaults.NAME : idNode.get("name").getTextValue();
|
||||
JsonAllFieldMapper.Builder builder = all();
|
||||
parseJsonField(builder, builder.name, allNode);
|
||||
for (Iterator<Map.Entry<String, JsonNode>> fieldsIt = allNode.getFields(); fieldsIt.hasNext();) {
|
||||
Map.Entry<String, JsonNode> entry = fieldsIt.next();
|
||||
String fieldName = entry.getKey();
|
||||
JsonNode fieldNode = entry.getValue();
|
||||
if (fieldName.equals("enabled")) {
|
||||
builder.enabled(nodeBooleanValue(fieldNode));
|
||||
}
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
||||
private JsonSourceFieldMapper.Builder parseSourceField(ObjectNode sourceNode) {
|
||||
// String name = sourceNode.get("name") == null ? JsonSourceFieldMapper.Defaults.NAME : sourceNode.get("name").getTextValue();
|
||||
JsonSourceFieldMapper.Builder builder = source();
|
||||
@ -233,6 +250,8 @@ public class JsonDocumentMapperParser implements DocumentMapperParser {
|
||||
builder.pathType(parsePathType(name, fieldNode.getValueAsText()));
|
||||
} else if (fieldName.equals("properties")) {
|
||||
parseProperties(builder, (ObjectNode) fieldNode);
|
||||
} else if (fieldName.equals("includeInAll")) {
|
||||
builder.includeInAll(nodeBooleanValue(fieldNode));
|
||||
}
|
||||
}
|
||||
return builder;
|
||||
@ -483,6 +502,8 @@ public class JsonDocumentMapperParser implements DocumentMapperParser {
|
||||
} else if (propName.equals("analyzer")) {
|
||||
builder.indexAnalyzer(analysisService.analyzer(propNode.getTextValue()));
|
||||
builder.searchAnalyzer(analysisService.analyzer(propNode.getTextValue()));
|
||||
} else if (propName.equals("includeInAll")) {
|
||||
builder.includeInAll(nodeBooleanValue(propNode));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -32,7 +32,7 @@ import org.elasticsearch.util.json.JsonBuilder;
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* @author kimchy (Shay Banon)
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class JsonDoubleFieldMapper extends JsonNumberFieldMapper<Double> {
|
||||
|
||||
@ -57,13 +57,17 @@ public class JsonDoubleFieldMapper extends JsonNumberFieldMapper<Double> {
|
||||
}
|
||||
|
||||
@Override public JsonDoubleFieldMapper build(BuilderContext context) {
|
||||
return new JsonDoubleFieldMapper(buildNames(context),
|
||||
JsonDoubleFieldMapper fieldMapper = new JsonDoubleFieldMapper(buildNames(context),
|
||||
precisionStep, index, store, boost, omitNorms, omitTermFreqAndPositions, nullValue);
|
||||
fieldMapper.includeInAll(includeInAll);
|
||||
return fieldMapper;
|
||||
}
|
||||
}
|
||||
|
||||
private final Double nullValue;
|
||||
|
||||
private final String nullValueAsString;
|
||||
|
||||
protected JsonDoubleFieldMapper(Names names, int precisionStep,
|
||||
Field.Index index, Field.Store store,
|
||||
float boost, boolean omitNorms, boolean omitTermFreqAndPositions,
|
||||
@ -72,6 +76,7 @@ public class JsonDoubleFieldMapper extends JsonNumberFieldMapper<Double> {
|
||||
new NamedAnalyzer("_double/" + precisionStep, new NumericDoubleAnalyzer(precisionStep)),
|
||||
new NamedAnalyzer("_double/max", new NumericDoubleAnalyzer(Integer.MAX_VALUE)));
|
||||
this.nullValue = nullValue;
|
||||
this.nullValueAsString = nullValue == null ? null : nullValue.toString();
|
||||
}
|
||||
|
||||
@Override protected int maxPrecisionStep() {
|
||||
@ -115,12 +120,18 @@ public class JsonDoubleFieldMapper extends JsonNumberFieldMapper<Double> {
|
||||
return null;
|
||||
}
|
||||
value = nullValue;
|
||||
if (includeInAll == null || includeInAll) {
|
||||
jsonContext.allEntries().addText(names.fullName(), nullValueAsString, boost);
|
||||
}
|
||||
} else {
|
||||
if (jsonContext.jp().getCurrentToken() == JsonToken.VALUE_STRING) {
|
||||
value = Double.parseDouble(jsonContext.jp().getText());
|
||||
} else {
|
||||
value = jsonContext.jp().getDoubleValue();
|
||||
}
|
||||
if (includeInAll == null || includeInAll) {
|
||||
jsonContext.allEntries().addText(names.fullName(), jsonContext.jp().getText(), boost);
|
||||
}
|
||||
}
|
||||
Field field = null;
|
||||
if (stored()) {
|
||||
@ -147,5 +158,8 @@ public class JsonDoubleFieldMapper extends JsonNumberFieldMapper<Double> {
|
||||
if (nullValue != null) {
|
||||
builder.field("nullValue", nullValue);
|
||||
}
|
||||
if (includeInAll != null) {
|
||||
builder.field("includeInAll", includeInAll);
|
||||
}
|
||||
}
|
||||
}
|
@ -110,6 +110,8 @@ public abstract class JsonFieldMapper<T> implements FieldMapper<T>, JsonMapper {
|
||||
|
||||
protected NamedAnalyzer searchAnalyzer;
|
||||
|
||||
protected Boolean includeInAll;
|
||||
|
||||
protected Builder(String name) {
|
||||
super(name);
|
||||
}
|
||||
@ -162,6 +164,11 @@ public abstract class JsonFieldMapper<T> implements FieldMapper<T>, JsonMapper {
|
||||
return builder;
|
||||
}
|
||||
|
||||
protected T includeInAll(Boolean includeInAll) {
|
||||
this.includeInAll = includeInAll;
|
||||
return builder;
|
||||
}
|
||||
|
||||
protected Names buildNames(BuilderContext context) {
|
||||
return new Names(name, buildIndexName(context), indexName == null ? name : indexName, buildFullName(context));
|
||||
}
|
||||
|
@ -57,14 +57,18 @@ public class JsonFloatFieldMapper extends JsonNumberFieldMapper<Float> {
|
||||
}
|
||||
|
||||
@Override public JsonFloatFieldMapper build(BuilderContext context) {
|
||||
return new JsonFloatFieldMapper(buildNames(context),
|
||||
JsonFloatFieldMapper fieldMapper = new JsonFloatFieldMapper(buildNames(context),
|
||||
precisionStep, index, store, boost, omitNorms, omitTermFreqAndPositions, nullValue);
|
||||
fieldMapper.includeInAll(includeInAll);
|
||||
return fieldMapper;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private final Float nullValue;
|
||||
|
||||
private final String nullValueAsString;
|
||||
|
||||
protected JsonFloatFieldMapper(Names names, int precisionStep, Field.Index index, Field.Store store,
|
||||
float boost, boolean omitNorms, boolean omitTermFreqAndPositions,
|
||||
Float nullValue) {
|
||||
@ -72,6 +76,7 @@ public class JsonFloatFieldMapper extends JsonNumberFieldMapper<Float> {
|
||||
new NamedAnalyzer("_float/" + precisionStep, new NumericFloatAnalyzer(precisionStep)),
|
||||
new NamedAnalyzer("_float/max", new NumericFloatAnalyzer(Integer.MAX_VALUE)));
|
||||
this.nullValue = nullValue;
|
||||
this.nullValueAsString = nullValue == null ? null : nullValue.toString();
|
||||
}
|
||||
|
||||
@Override protected int maxPrecisionStep() {
|
||||
@ -115,12 +120,18 @@ public class JsonFloatFieldMapper extends JsonNumberFieldMapper<Float> {
|
||||
return null;
|
||||
}
|
||||
value = nullValue;
|
||||
if (includeInAll == null || includeInAll) {
|
||||
jsonContext.allEntries().addText(names.fullName(), nullValueAsString, boost);
|
||||
}
|
||||
} else {
|
||||
if (jsonContext.jp().getCurrentToken() == JsonToken.VALUE_STRING) {
|
||||
value = Float.parseFloat(jsonContext.jp().getText());
|
||||
} else {
|
||||
value = jsonContext.jp().getFloatValue();
|
||||
}
|
||||
if (includeInAll == null || includeInAll) {
|
||||
jsonContext.allEntries().addText(names.fullName(), jsonContext.jp().getText(), boost);
|
||||
}
|
||||
}
|
||||
Field field = null;
|
||||
if (stored()) {
|
||||
@ -147,5 +158,8 @@ public class JsonFloatFieldMapper extends JsonNumberFieldMapper<Float> {
|
||||
if (nullValue != null) {
|
||||
builder.field("nullValue", nullValue);
|
||||
}
|
||||
if (includeInAll != null) {
|
||||
builder.field("includeInAll", includeInAll);
|
||||
}
|
||||
}
|
||||
}
|
@ -22,7 +22,6 @@ package org.elasticsearch.index.mapper.json;
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.Fieldable;
|
||||
import org.elasticsearch.index.mapper.FieldMapperListener;
|
||||
import org.elasticsearch.index.mapper.IdFieldMapper;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MergeMappingException;
|
||||
@ -72,8 +71,8 @@ public class JsonIdFieldMapper extends JsonFieldMapper<String> implements IdFiel
|
||||
Defaults.OMIT_NORMS, Defaults.OMIT_TERM_FREQ_AND_POSITIONS);
|
||||
}
|
||||
|
||||
public JsonIdFieldMapper(String name, String indexName, Field.Store store, Field.TermVector termVector,
|
||||
float boost, boolean omitNorms, boolean omitTermFreqAndPositions) {
|
||||
protected JsonIdFieldMapper(String name, String indexName, Field.Store store, Field.TermVector termVector,
|
||||
float boost, boolean omitNorms, boolean omitTermFreqAndPositions) {
|
||||
super(new Names(name, indexName, indexName, name), Defaults.INDEX, store, termVector, boost, omitNorms, omitTermFreqAndPositions,
|
||||
Lucene.KEYWORD_ANALYZER, Lucene.KEYWORD_ANALYZER);
|
||||
}
|
||||
@ -114,10 +113,6 @@ public class JsonIdFieldMapper extends JsonFieldMapper<String> implements IdFiel
|
||||
}
|
||||
}
|
||||
|
||||
@Override public void traverse(FieldMapperListener fieldMapperListener) {
|
||||
fieldMapperListener.fieldMapper(this);
|
||||
}
|
||||
|
||||
@Override protected String jsonType() {
|
||||
return JSON_TYPE;
|
||||
}
|
||||
|
@ -0,0 +1,28 @@
|
||||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.mapper.json;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public interface JsonIncludeInAllMapper extends JsonMapper {
|
||||
|
||||
void includeInAll(Boolean includeInAll);
|
||||
}
|
@ -32,7 +32,7 @@ import org.elasticsearch.util.json.JsonBuilder;
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* @author kimchy (Shay Banon)
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class JsonIntegerFieldMapper extends JsonNumberFieldMapper<Integer> {
|
||||
|
||||
@ -57,13 +57,17 @@ public class JsonIntegerFieldMapper extends JsonNumberFieldMapper<Integer> {
|
||||
}
|
||||
|
||||
@Override public JsonIntegerFieldMapper build(BuilderContext context) {
|
||||
return new JsonIntegerFieldMapper(buildNames(context),
|
||||
JsonIntegerFieldMapper fieldMapper = new JsonIntegerFieldMapper(buildNames(context),
|
||||
precisionStep, index, store, boost, omitNorms, omitTermFreqAndPositions, nullValue);
|
||||
fieldMapper.includeInAll(includeInAll);
|
||||
return fieldMapper;
|
||||
}
|
||||
}
|
||||
|
||||
private final Integer nullValue;
|
||||
|
||||
private final String nullValueAsString;
|
||||
|
||||
protected JsonIntegerFieldMapper(Names names, int precisionStep, Field.Index index, Field.Store store,
|
||||
float boost, boolean omitNorms, boolean omitTermFreqAndPositions,
|
||||
Integer nullValue) {
|
||||
@ -71,6 +75,7 @@ public class JsonIntegerFieldMapper extends JsonNumberFieldMapper<Integer> {
|
||||
new NamedAnalyzer("_int/" + precisionStep, new NumericIntegerAnalyzer(precisionStep)),
|
||||
new NamedAnalyzer("_int/max", new NumericIntegerAnalyzer(Integer.MAX_VALUE)));
|
||||
this.nullValue = nullValue;
|
||||
this.nullValueAsString = nullValue == null ? null : nullValue.toString();
|
||||
}
|
||||
|
||||
@Override protected int maxPrecisionStep() {
|
||||
@ -114,12 +119,18 @@ public class JsonIntegerFieldMapper extends JsonNumberFieldMapper<Integer> {
|
||||
return null;
|
||||
}
|
||||
value = nullValue;
|
||||
if (includeInAll == null || includeInAll) {
|
||||
jsonContext.allEntries().addText(names.fullName(), nullValueAsString, boost);
|
||||
}
|
||||
} else {
|
||||
if (jsonContext.jp().getCurrentToken() == JsonToken.VALUE_STRING) {
|
||||
value = Integer.parseInt(jsonContext.jp().getText());
|
||||
} else {
|
||||
value = jsonContext.jp().getIntValue();
|
||||
}
|
||||
if (includeInAll == null || includeInAll) {
|
||||
jsonContext.allEntries().addText(names.fullName(), jsonContext.jp().getText(), boost);
|
||||
}
|
||||
}
|
||||
Field field = null;
|
||||
if (stored()) {
|
||||
@ -146,5 +157,8 @@ public class JsonIntegerFieldMapper extends JsonNumberFieldMapper<Integer> {
|
||||
if (nullValue != null) {
|
||||
builder.field("nullValue", nullValue);
|
||||
}
|
||||
if (includeInAll != null) {
|
||||
builder.field("includeInAll", includeInAll);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -32,7 +32,7 @@ import org.elasticsearch.util.json.JsonBuilder;
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* @author kimchy (Shay Banon)
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class JsonLongFieldMapper extends JsonNumberFieldMapper<Long> {
|
||||
|
||||
@ -57,13 +57,17 @@ public class JsonLongFieldMapper extends JsonNumberFieldMapper<Long> {
|
||||
}
|
||||
|
||||
@Override public JsonLongFieldMapper build(BuilderContext context) {
|
||||
return new JsonLongFieldMapper(buildNames(context),
|
||||
JsonLongFieldMapper fieldMapper = new JsonLongFieldMapper(buildNames(context),
|
||||
precisionStep, index, store, boost, omitNorms, omitTermFreqAndPositions, nullValue);
|
||||
fieldMapper.includeInAll(includeInAll);
|
||||
return fieldMapper;
|
||||
}
|
||||
}
|
||||
|
||||
private final Long nullValue;
|
||||
|
||||
private final String nullValueAsString;
|
||||
|
||||
protected JsonLongFieldMapper(Names names, int precisionStep, Field.Index index, Field.Store store,
|
||||
float boost, boolean omitNorms, boolean omitTermFreqAndPositions,
|
||||
Long nullValue) {
|
||||
@ -71,6 +75,7 @@ public class JsonLongFieldMapper extends JsonNumberFieldMapper<Long> {
|
||||
new NamedAnalyzer("_long/" + precisionStep, new NumericLongAnalyzer(precisionStep)),
|
||||
new NamedAnalyzer("_long/max", new NumericLongAnalyzer(Integer.MAX_VALUE)));
|
||||
this.nullValue = nullValue;
|
||||
this.nullValueAsString = nullValue == null ? null : nullValue.toString();
|
||||
}
|
||||
|
||||
@Override protected int maxPrecisionStep() {
|
||||
@ -114,12 +119,18 @@ public class JsonLongFieldMapper extends JsonNumberFieldMapper<Long> {
|
||||
return null;
|
||||
}
|
||||
value = nullValue;
|
||||
if (includeInAll == null || includeInAll) {
|
||||
jsonContext.allEntries().addText(names.fullName(), nullValueAsString, boost);
|
||||
}
|
||||
} else {
|
||||
if (jsonContext.jp().getCurrentToken() == JsonToken.VALUE_STRING) {
|
||||
value = Long.parseLong(jsonContext.jp().getText());
|
||||
} else {
|
||||
value = jsonContext.jp().getLongValue();
|
||||
}
|
||||
if (includeInAll == null || includeInAll) {
|
||||
jsonContext.allEntries().addText(names.fullName(), jsonContext.jp().getText(), boost);
|
||||
}
|
||||
}
|
||||
Field field = null;
|
||||
if (stored()) {
|
||||
@ -146,5 +157,8 @@ public class JsonLongFieldMapper extends JsonNumberFieldMapper<Long> {
|
||||
if (nullValue != null) {
|
||||
builder.field("nullValue", nullValue);
|
||||
}
|
||||
if (includeInAll != null) {
|
||||
builder.field("includeInAll", includeInAll);
|
||||
}
|
||||
}
|
||||
}
|
@ -52,6 +52,10 @@ public final class JsonMapperBuilders {
|
||||
return new JsonBoostFieldMapper.Builder(name);
|
||||
}
|
||||
|
||||
public static JsonAllFieldMapper.Builder all() {
|
||||
return new JsonAllFieldMapper.Builder();
|
||||
}
|
||||
|
||||
public static JsonMultiFieldMapper.Builder multiField(String name) {
|
||||
return new JsonMultiFieldMapper.Builder(name);
|
||||
}
|
||||
|
@ -36,7 +36,7 @@ import static org.elasticsearch.util.MapBuilder.*;
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class JsonMultiFieldMapper implements JsonMapper {
|
||||
public class JsonMultiFieldMapper implements JsonMapper, JsonIncludeInAllMapper {
|
||||
|
||||
public static final String JSON_TYPE = "multi_field";
|
||||
|
||||
@ -113,12 +113,25 @@ public class JsonMultiFieldMapper implements JsonMapper {
|
||||
this.pathType = pathType;
|
||||
this.mappers = ImmutableMap.copyOf(mappers);
|
||||
this.defaultMapper = defaultMapper;
|
||||
|
||||
// we disable the all in mappers, only the default one can be added
|
||||
for (JsonMapper mapper : mappers.values()) {
|
||||
if (mapper instanceof JsonIncludeInAllMapper) {
|
||||
((JsonIncludeInAllMapper) mapper).includeInAll(false);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Override public String name() {
|
||||
return this.name;
|
||||
}
|
||||
|
||||
@Override public void includeInAll(Boolean includeInAll) {
|
||||
if (includeInAll != null && defaultMapper != null && (defaultMapper instanceof JsonIncludeInAllMapper)) {
|
||||
((JsonIncludeInAllMapper) defaultMapper).includeInAll(includeInAll);
|
||||
}
|
||||
}
|
||||
|
||||
public JsonPath.Type pathType() {
|
||||
return pathType;
|
||||
}
|
||||
@ -176,6 +189,10 @@ public class JsonMultiFieldMapper implements JsonMapper {
|
||||
if (mergeIntoMapper == null) {
|
||||
// no mapping, simply add it if not simulating
|
||||
if (!mergeContext.mergeFlags().simulate()) {
|
||||
// disable the mapper from being in all, only the default mapper is in all
|
||||
if (mergeWithMapper instanceof JsonIncludeInAllMapper) {
|
||||
((JsonIncludeInAllMapper) mergeWithMapper).includeInAll(false);
|
||||
}
|
||||
mappers = newMapBuilder(mappers).put(mergeWithMapper.name(), mergeWithMapper).immutableMap();
|
||||
if (mergeWithMapper instanceof JsonFieldMapper) {
|
||||
mergeContext.docMapper().addFieldMapper((FieldMapper) mergeWithMapper);
|
||||
|
@ -33,9 +33,9 @@ import java.util.ArrayDeque;
|
||||
import java.util.Deque;
|
||||
|
||||
/**
|
||||
* @author kimchy (Shay Banon)
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public abstract class JsonNumberFieldMapper<T extends Number> extends JsonFieldMapper<T> {
|
||||
public abstract class JsonNumberFieldMapper<T extends Number> extends JsonFieldMapper<T> implements JsonIncludeInAllMapper {
|
||||
|
||||
public static class Defaults extends JsonFieldMapper.Defaults {
|
||||
public static final int PRECISION_STEP = NumericUtils.PRECISION_STEP_DEFAULT;
|
||||
@ -59,14 +59,18 @@ public abstract class JsonNumberFieldMapper<T extends Number> extends JsonFieldM
|
||||
return super.store(store);
|
||||
}
|
||||
|
||||
@Override protected T boost(float boost) {
|
||||
@Override public T boost(float boost) {
|
||||
return super.boost(boost);
|
||||
}
|
||||
|
||||
@Override protected T indexName(String indexName) {
|
||||
@Override public T indexName(String indexName) {
|
||||
return super.indexName(indexName);
|
||||
}
|
||||
|
||||
@Override public T includeInAll(Boolean includeInAll) {
|
||||
return super.includeInAll(includeInAll);
|
||||
}
|
||||
|
||||
public T precisionStep(int precisionStep) {
|
||||
this.precisionStep = precisionStep;
|
||||
return builder;
|
||||
@ -81,6 +85,8 @@ public abstract class JsonNumberFieldMapper<T extends Number> extends JsonFieldM
|
||||
|
||||
protected final int precisionStep;
|
||||
|
||||
protected Boolean includeInAll;
|
||||
|
||||
protected JsonNumberFieldMapper(Names names, int precisionStep,
|
||||
Field.Index index, Field.Store store,
|
||||
float boost, boolean omitNorms, boolean omitTermFreqAndPositions,
|
||||
@ -93,6 +99,12 @@ public abstract class JsonNumberFieldMapper<T extends Number> extends JsonFieldM
|
||||
}
|
||||
}
|
||||
|
||||
@Override public void includeInAll(Boolean includeInAll) {
|
||||
if (includeInAll != null) {
|
||||
this.includeInAll = includeInAll;
|
||||
}
|
||||
}
|
||||
|
||||
protected abstract int maxPrecisionStep();
|
||||
|
||||
public int precisionStep() {
|
||||
|
@ -42,10 +42,10 @@ import static org.elasticsearch.index.mapper.json.JsonMapperBuilders.*;
|
||||
import static org.elasticsearch.util.MapBuilder.*;
|
||||
|
||||
/**
|
||||
* @author kimchy (Shay Banon)
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
@ThreadSafe
|
||||
public class JsonObjectMapper implements JsonMapper {
|
||||
public class JsonObjectMapper implements JsonMapper, JsonIncludeInAllMapper {
|
||||
|
||||
public static final String JSON_TYPE = "object";
|
||||
|
||||
@ -66,6 +66,8 @@ public class JsonObjectMapper implements JsonMapper {
|
||||
|
||||
private List<FormatDateTimeFormatter> dateTimeFormatters = newArrayList();
|
||||
|
||||
private Boolean includeInAll;
|
||||
|
||||
private final List<JsonMapper.Builder> mappersBuilders = newArrayList();
|
||||
|
||||
public Builder(String name) {
|
||||
@ -93,6 +95,11 @@ public class JsonObjectMapper implements JsonMapper {
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder includeInAll(boolean includeInAll) {
|
||||
this.includeInAll = includeInAll;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Builder dateTimeFormatter(Iterable<FormatDateTimeFormatter> dateTimeFormatters) {
|
||||
for (FormatDateTimeFormatter dateTimeFormatter : dateTimeFormatters) {
|
||||
this.dateTimeFormatters.add(dateTimeFormatter);
|
||||
@ -138,6 +145,8 @@ public class JsonObjectMapper implements JsonMapper {
|
||||
context.path().pathType(origPathType);
|
||||
context.path().remove();
|
||||
|
||||
objectMapper.includeInAll(includeInAll);
|
||||
|
||||
return objectMapper;
|
||||
}
|
||||
}
|
||||
@ -152,6 +161,8 @@ public class JsonObjectMapper implements JsonMapper {
|
||||
|
||||
private final FormatDateTimeFormatter[] dateTimeFormatters;
|
||||
|
||||
private Boolean includeInAll;
|
||||
|
||||
private volatile ImmutableMap<String, JsonMapper> mappers = ImmutableMap.of();
|
||||
|
||||
private final Object mutex = new Object();
|
||||
@ -185,7 +196,23 @@ public class JsonObjectMapper implements JsonMapper {
|
||||
return this.name;
|
||||
}
|
||||
|
||||
@Override public void includeInAll(Boolean includeInAll) {
|
||||
if (includeInAll == null) {
|
||||
return;
|
||||
}
|
||||
this.includeInAll = includeInAll;
|
||||
// when called from outside, apply this on all the inner mappers
|
||||
for (JsonMapper mapper : mappers.values()) {
|
||||
if (mapper instanceof JsonIncludeInAllMapper) {
|
||||
((JsonIncludeInAllMapper) mapper).includeInAll(includeInAll);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public JsonObjectMapper putMapper(JsonMapper mapper) {
|
||||
if (mapper instanceof JsonIncludeInAllMapper) {
|
||||
((JsonIncludeInAllMapper) mapper).includeInAll(includeInAll);
|
||||
}
|
||||
synchronized (mutex) {
|
||||
mappers = newMapBuilder(mappers).put(mapper.name(), mapper).immutableMap();
|
||||
}
|
||||
@ -389,6 +416,9 @@ public class JsonObjectMapper implements JsonMapper {
|
||||
builder.field("dynamic", dynamic);
|
||||
builder.field("enabled", enabled);
|
||||
builder.field("pathType", pathType.name().toLowerCase());
|
||||
if (includeInAll != null) {
|
||||
builder.field("includeInAll", includeInAll);
|
||||
}
|
||||
|
||||
if (dateTimeFormatters.length > 0) {
|
||||
builder.startArray("dateFormats");
|
||||
|
@ -23,6 +23,7 @@ import org.apache.lucene.document.Document;
|
||||
import org.codehaus.jackson.JsonParser;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.util.concurrent.NotThreadSafe;
|
||||
import org.elasticsearch.util.lucene.all.AllEntries;
|
||||
|
||||
/**
|
||||
* @author kimchy (Shay Banon)
|
||||
@ -54,6 +55,8 @@ public class JsonParseContext {
|
||||
|
||||
private boolean mappersAdded = false;
|
||||
|
||||
private AllEntries allEntries = new AllEntries();
|
||||
|
||||
public JsonParseContext(JsonDocumentMapper docMapper, JsonPath path) {
|
||||
this.docMapper = docMapper;
|
||||
this.path = path;
|
||||
@ -68,6 +71,7 @@ public class JsonParseContext {
|
||||
this.parsedIdState = ParsedIdState.NO;
|
||||
this.mappersAdded = false;
|
||||
this.listener = listener;
|
||||
this.allEntries = new AllEntries();
|
||||
}
|
||||
|
||||
public boolean mappersAdded() {
|
||||
@ -136,6 +140,10 @@ public class JsonParseContext {
|
||||
this.uid = uid;
|
||||
}
|
||||
|
||||
public AllEntries allEntries() {
|
||||
return this.allEntries;
|
||||
}
|
||||
|
||||
/**
|
||||
* A string builder that can be used to construct complex names for example.
|
||||
* Its better to reuse the.
|
||||
|
@ -30,7 +30,7 @@ import java.io.IOException;
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class JsonStringFieldMapper extends JsonFieldMapper<String> {
|
||||
public class JsonStringFieldMapper extends JsonFieldMapper<String> implements JsonIncludeInAllMapper {
|
||||
|
||||
public static final String JSON_TYPE = "string";
|
||||
|
||||
@ -53,15 +53,24 @@ public class JsonStringFieldMapper extends JsonFieldMapper<String> {
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override public Builder includeInAll(Boolean includeInAll) {
|
||||
this.includeInAll = includeInAll;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override public JsonStringFieldMapper build(BuilderContext context) {
|
||||
return new JsonStringFieldMapper(buildNames(context),
|
||||
JsonStringFieldMapper fieldMapper = new JsonStringFieldMapper(buildNames(context),
|
||||
index, store, termVector, boost, omitNorms, omitTermFreqAndPositions, nullValue,
|
||||
indexAnalyzer, searchAnalyzer);
|
||||
fieldMapper.includeInAll(includeInAll);
|
||||
return fieldMapper;
|
||||
}
|
||||
}
|
||||
|
||||
private final String nullValue;
|
||||
|
||||
private Boolean includeInAll;
|
||||
|
||||
protected JsonStringFieldMapper(Names names, Field.Index index, Field.Store store, Field.TermVector termVector,
|
||||
float boost, boolean omitNorms, boolean omitTermFreqAndPositions,
|
||||
String nullValue, NamedAnalyzer indexAnalyzer, NamedAnalyzer searchAnalyzer) {
|
||||
@ -69,6 +78,12 @@ public class JsonStringFieldMapper extends JsonFieldMapper<String> {
|
||||
this.nullValue = nullValue;
|
||||
}
|
||||
|
||||
@Override public void includeInAll(Boolean includeInAll) {
|
||||
if (includeInAll != null) {
|
||||
this.includeInAll = includeInAll;
|
||||
}
|
||||
}
|
||||
|
||||
@Override public String value(Fieldable field) {
|
||||
return field.stringValue();
|
||||
}
|
||||
@ -91,6 +106,9 @@ public class JsonStringFieldMapper extends JsonFieldMapper<String> {
|
||||
if (value == null) {
|
||||
return null;
|
||||
}
|
||||
if (includeInAll == null || includeInAll) {
|
||||
jsonContext.allEntries().addText(names.fullName(), value, boost);
|
||||
}
|
||||
return new Field(names.indexName(), value, store, index, termVector);
|
||||
}
|
||||
|
||||
@ -103,5 +121,8 @@ public class JsonStringFieldMapper extends JsonFieldMapper<String> {
|
||||
if (nullValue != null) {
|
||||
builder.field("nullValue", nullValue);
|
||||
}
|
||||
if (includeInAll != null) {
|
||||
builder.field("includeInAll", includeInAll);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -272,6 +272,14 @@ public abstract class JsonQueryBuilders {
|
||||
return new MoreLikeThisJsonQueryBuilder(fields);
|
||||
}
|
||||
|
||||
/**
|
||||
* A more like this query that finds documents that are "like" the provided {@link MoreLikeThisJsonQueryBuilder#likeText(String)}
|
||||
* which is checked against the "_all" field.
|
||||
*/
|
||||
public static MoreLikeThisJsonQueryBuilder moreLikeThisQuery() {
|
||||
return new MoreLikeThisJsonQueryBuilder();
|
||||
}
|
||||
|
||||
/**
|
||||
* A more like this query that runs against a specific field.
|
||||
*
|
||||
|
@ -46,6 +46,13 @@ public class MoreLikeThisJsonQueryBuilder extends BaseJsonQueryBuilder {
|
||||
private Boolean boostTerms = null;
|
||||
private float boostTermsFactor = -1;
|
||||
|
||||
/**
|
||||
* Constructs a new more like this query which uses the "_all" field.
|
||||
*/
|
||||
public MoreLikeThisJsonQueryBuilder() {
|
||||
this.fields = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the field names that will be used when generating the 'More Like This' query.
|
||||
*
|
||||
@ -159,11 +166,13 @@ public class MoreLikeThisJsonQueryBuilder extends BaseJsonQueryBuilder {
|
||||
if (fields == null || fields.length == 0) {
|
||||
throw new QueryBuilderException("moreLikeThis requires 'fields' to be provided");
|
||||
}
|
||||
builder.startArray("fields");
|
||||
for (String field : fields) {
|
||||
builder.value(field);
|
||||
if (fields != null) {
|
||||
builder.startArray("fields");
|
||||
for (String field : fields) {
|
||||
builder.value(field);
|
||||
}
|
||||
builder.endArray();
|
||||
}
|
||||
builder.endArray();
|
||||
if (likeText == null) {
|
||||
throw new QueryBuilderException("moreLikeThis requires 'likeText' to be provided");
|
||||
}
|
||||
|
@ -25,6 +25,7 @@ import org.codehaus.jackson.JsonParser;
|
||||
import org.codehaus.jackson.JsonToken;
|
||||
import org.elasticsearch.index.AbstractIndexComponent;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.mapper.AllFieldMapper;
|
||||
import org.elasticsearch.index.query.QueryParsingException;
|
||||
import org.elasticsearch.index.settings.IndexSettings;
|
||||
import org.elasticsearch.util.Booleans;
|
||||
@ -56,6 +57,7 @@ public class MoreLikeThisJsonQueryParser extends AbstractIndexComponent implemen
|
||||
JsonParser jp = parseContext.jp();
|
||||
|
||||
MoreLikeThisQuery mltQuery = new MoreLikeThisQuery();
|
||||
mltQuery.setMoreLikeFields(new String[]{AllFieldMapper.NAME});
|
||||
|
||||
JsonToken token;
|
||||
String currentFieldName = null;
|
||||
|
@ -31,6 +31,7 @@ import org.codehaus.jackson.JsonToken;
|
||||
import org.elasticsearch.index.AbstractIndexComponent;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.analysis.AnalysisService;
|
||||
import org.elasticsearch.index.mapper.AllFieldMapper;
|
||||
import org.elasticsearch.index.query.QueryParsingException;
|
||||
import org.elasticsearch.index.query.support.MapperQueryParser;
|
||||
import org.elasticsearch.index.query.support.MultiFieldMapperQueryParser;
|
||||
@ -45,7 +46,7 @@ import java.util.List;
|
||||
import static org.elasticsearch.util.lucene.search.Queries.*;
|
||||
|
||||
/**
|
||||
* @author kimchy (Shay Banon)
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class QueryStringJsonQueryParser extends AbstractIndexComponent implements JsonQueryParser {
|
||||
|
||||
@ -68,7 +69,7 @@ public class QueryStringJsonQueryParser extends AbstractIndexComponent implement
|
||||
// move to the field value
|
||||
|
||||
String queryString = null;
|
||||
String defaultField = null;
|
||||
String defaultField = AllFieldMapper.NAME; // default to all
|
||||
MapperQueryParser.Operator defaultOperator = QueryParser.Operator.OR;
|
||||
boolean allowLeadingWildcard = true;
|
||||
boolean lowercaseExpandedTerms = true;
|
||||
|
@ -0,0 +1,159 @@
|
||||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.util.lucene.all;
|
||||
|
||||
import com.google.common.collect.Lists;
|
||||
import org.elasticsearch.util.io.FastStringReader;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.Reader;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import static com.google.common.collect.Sets.*;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class AllEntries extends Reader {
|
||||
|
||||
public static class Entry {
|
||||
private final String name;
|
||||
private final Reader reader;
|
||||
private final float boost;
|
||||
|
||||
public Entry(String name, Reader reader, float boost) {
|
||||
this.name = name;
|
||||
this.reader = reader;
|
||||
this.boost = boost;
|
||||
}
|
||||
|
||||
public String name() {
|
||||
return this.name;
|
||||
}
|
||||
|
||||
public float boost() {
|
||||
return this.boost;
|
||||
}
|
||||
|
||||
public Reader reader() {
|
||||
return this.reader;
|
||||
}
|
||||
}
|
||||
|
||||
private final List<Entry> entries = Lists.newArrayList();
|
||||
|
||||
private Entry current;
|
||||
|
||||
private Iterator<Entry> it;
|
||||
|
||||
private boolean itsSeparatorTime = false;
|
||||
|
||||
public void addText(String name, String text, float boost) {
|
||||
Entry entry = new Entry(name, new FastStringReader(text), boost);
|
||||
entries.add(entry);
|
||||
}
|
||||
|
||||
public void clear() {
|
||||
this.entries.clear();
|
||||
this.current = null;
|
||||
this.it = null;
|
||||
itsSeparatorTime = false;
|
||||
}
|
||||
|
||||
public AllEntries finishTexts() {
|
||||
it = entries.iterator();
|
||||
if (it.hasNext()) {
|
||||
current = it.next();
|
||||
itsSeparatorTime = true;
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public List<Entry> entries() {
|
||||
return this.entries;
|
||||
}
|
||||
|
||||
public Set<String> fields() {
|
||||
Set<String> fields = newHashSet();
|
||||
for (Entry entry : entries) {
|
||||
fields.add(entry.name());
|
||||
}
|
||||
return fields;
|
||||
}
|
||||
|
||||
public Entry current() {
|
||||
return this.current;
|
||||
}
|
||||
|
||||
@Override public int read(char[] cbuf, int off, int len) throws IOException {
|
||||
if (current == null) {
|
||||
return -1;
|
||||
}
|
||||
int result = current.reader().read(cbuf, off, len);
|
||||
if (result == -1) {
|
||||
if (itsSeparatorTime) {
|
||||
itsSeparatorTime = false;
|
||||
cbuf[off] = ' ';
|
||||
return 1;
|
||||
}
|
||||
itsSeparatorTime = true;
|
||||
advance();
|
||||
return read(cbuf, off, len);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override public void close() {
|
||||
if (current != null) {
|
||||
try {
|
||||
current.reader().close();
|
||||
} catch (IOException e) {
|
||||
// can't happen...
|
||||
} finally {
|
||||
current = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Override public boolean ready() throws IOException {
|
||||
return (current != null) && current.reader().ready();
|
||||
}
|
||||
|
||||
/**
|
||||
* Closes the current reader and opens the next one, if any.
|
||||
*/
|
||||
private void advance() {
|
||||
close();
|
||||
if (it.hasNext()) {
|
||||
current = it.next();
|
||||
}
|
||||
}
|
||||
|
||||
@Override public String toString() {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
for (Entry entry : entries) {
|
||||
sb.append(entry.name()).append(',');
|
||||
}
|
||||
return sb.toString();
|
||||
}
|
||||
}
|
@ -0,0 +1,188 @@
|
||||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.util.lucene.all;
|
||||
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.index.TermPositions;
|
||||
import org.apache.lucene.search.*;
|
||||
import org.apache.lucene.search.spans.SpanScorer;
|
||||
import org.apache.lucene.search.spans.SpanTermQuery;
|
||||
import org.apache.lucene.search.spans.SpanWeight;
|
||||
import org.apache.lucene.search.spans.TermSpans;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.apache.lucene.analysis.payloads.PayloadHelper.*;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class AllTermQuery extends SpanTermQuery {
|
||||
|
||||
private boolean includeSpanScore;
|
||||
|
||||
public AllTermQuery(Term term) {
|
||||
this(term, true);
|
||||
}
|
||||
|
||||
public AllTermQuery(Term term, boolean includeSpanScore) {
|
||||
super(term);
|
||||
this.includeSpanScore = includeSpanScore;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Weight createWeight(Searcher searcher) throws IOException {
|
||||
return new AllTermWeight(this, searcher);
|
||||
}
|
||||
|
||||
protected class AllTermWeight extends SpanWeight {
|
||||
|
||||
public AllTermWeight(AllTermQuery query, Searcher searcher) throws IOException {
|
||||
super(query, searcher);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Scorer scorer(IndexReader reader, boolean scoreDocsInOrder,
|
||||
boolean topScorer) throws IOException {
|
||||
return new AllTermSpanScorer((TermSpans) query.getSpans(reader), this, similarity, reader.norms(query.getField()));
|
||||
}
|
||||
|
||||
protected class AllTermSpanScorer extends SpanScorer {
|
||||
// TODO: is this the best way to allocate this?
|
||||
protected byte[] payload = new byte[4];
|
||||
protected TermPositions positions;
|
||||
protected float payloadScore;
|
||||
protected int payloadsSeen;
|
||||
|
||||
public AllTermSpanScorer(TermSpans spans, Weight weight, Similarity similarity, byte[] norms) throws IOException {
|
||||
super(spans, weight, similarity, norms);
|
||||
positions = spans.getPositions();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected boolean setFreqCurrentDoc() throws IOException {
|
||||
if (!more) {
|
||||
return false;
|
||||
}
|
||||
doc = spans.doc();
|
||||
freq = 0.0f;
|
||||
payloadScore = 0;
|
||||
payloadsSeen = 0;
|
||||
Similarity similarity1 = getSimilarity();
|
||||
while (more && doc == spans.doc()) {
|
||||
int matchLength = spans.end() - spans.start();
|
||||
|
||||
freq += similarity1.sloppyFreq(matchLength);
|
||||
processPayload(similarity1);
|
||||
|
||||
more = spans.next();// this moves positions to the next match in this
|
||||
// document
|
||||
}
|
||||
return more || (freq != 0);
|
||||
}
|
||||
|
||||
protected void processPayload(Similarity similarity) throws IOException {
|
||||
if (positions.isPayloadAvailable()) {
|
||||
payload = positions.getPayload(payload, 0);
|
||||
payloadScore += decodeFloat(payload);
|
||||
payloadsSeen++;
|
||||
|
||||
} else {
|
||||
// zero out the payload?
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @return {@link #getSpanScore()} * {@link #getPayloadScore()}
|
||||
* @throws IOException
|
||||
*/
|
||||
@Override
|
||||
public float score() throws IOException {
|
||||
return includeSpanScore ? getSpanScore() * getPayloadScore() : getPayloadScore();
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the SpanScorer score only.
|
||||
* <p/>
|
||||
* Should not be overridden without good cause!
|
||||
*
|
||||
* @return the score for just the Span part w/o the payload
|
||||
* @throws IOException
|
||||
* @see #score()
|
||||
*/
|
||||
protected float getSpanScore() throws IOException {
|
||||
return super.score();
|
||||
}
|
||||
|
||||
/**
|
||||
* The score for the payload
|
||||
*/
|
||||
protected float getPayloadScore() {
|
||||
return payloadsSeen > 0 ? (payloadScore / payloadsSeen) : 1;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Explanation explain(final int doc) throws IOException {
|
||||
ComplexExplanation result = new ComplexExplanation();
|
||||
Explanation nonPayloadExpl = super.explain(doc);
|
||||
result.addDetail(nonPayloadExpl);
|
||||
// QUESTION: Is there a way to avoid this skipTo call? We need to know
|
||||
// whether to load the payload or not
|
||||
Explanation payloadBoost = new Explanation();
|
||||
result.addDetail(payloadBoost);
|
||||
|
||||
float payloadScore = getPayloadScore();
|
||||
payloadBoost.setValue(payloadScore);
|
||||
// GSI: I suppose we could toString the payload, but I don't think that
|
||||
// would be a good idea
|
||||
payloadBoost.setDescription("allPayload(...)");
|
||||
result.setValue(nonPayloadExpl.getValue() * payloadScore);
|
||||
result.setDescription("btq, product of:");
|
||||
result.setMatch(nonPayloadExpl.getValue() == 0 ? Boolean.FALSE : Boolean.TRUE); // LUCENE-1303
|
||||
return result;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
final int prime = 31;
|
||||
int result = super.hashCode();
|
||||
result = prime * result + (includeSpanScore ? 1231 : 1237);
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj)
|
||||
return true;
|
||||
if (!super.equals(obj))
|
||||
return false;
|
||||
if (getClass() != obj.getClass())
|
||||
return false;
|
||||
AllTermQuery other = (AllTermQuery) obj;
|
||||
if (includeSpanScore != other.includeSpanScore)
|
||||
return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
}
|
@ -0,0 +1,67 @@
|
||||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.util.lucene.all;
|
||||
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.analysis.TokenFilter;
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.analysis.tokenattributes.PayloadAttribute;
|
||||
import org.apache.lucene.index.Payload;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.apache.lucene.analysis.payloads.PayloadHelper.*;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class AllTokenFilter extends TokenFilter {
|
||||
|
||||
public static TokenStream allTokenStream(String allFieldName, AllEntries allEntries, Analyzer analyzer) throws IOException {
|
||||
return new AllTokenFilter(analyzer.reusableTokenStream(allFieldName, allEntries), allEntries);
|
||||
}
|
||||
|
||||
private final AllEntries allEntries;
|
||||
|
||||
private final PayloadAttribute payloadAttribute;
|
||||
|
||||
AllTokenFilter(TokenStream input, AllEntries allEntries) {
|
||||
super(input);
|
||||
this.allEntries = allEntries;
|
||||
payloadAttribute = addAttribute(PayloadAttribute.class);
|
||||
}
|
||||
|
||||
public AllEntries allEntries() {
|
||||
return allEntries;
|
||||
}
|
||||
|
||||
@Override public boolean incrementToken() throws IOException {
|
||||
if (!input.incrementToken()) {
|
||||
return false;
|
||||
}
|
||||
float boost = allEntries.current().boost();
|
||||
if (boost != 1.0f) {
|
||||
payloadAttribute.setPayload(new Payload(encodeFloat(boost)));
|
||||
} else {
|
||||
payloadAttribute.setPayload(null);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
@ -0,0 +1,70 @@
|
||||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.mapper.json.all;
|
||||
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.index.analysis.AnalysisService;
|
||||
import org.elasticsearch.index.mapper.json.JsonDocumentMapper;
|
||||
import org.elasticsearch.index.mapper.json.JsonDocumentMapperParser;
|
||||
import org.elasticsearch.util.lucene.all.AllEntries;
|
||||
import org.elasticsearch.util.lucene.all.AllTokenFilter;
|
||||
import org.hamcrest.MatcherAssert;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
import static org.elasticsearch.util.io.Streams.*;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
@Test
|
||||
public class SimpleAllMapperTests {
|
||||
|
||||
@Test public void testSimpleAllMappers() throws Exception {
|
||||
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/json/all/mapping.json");
|
||||
JsonDocumentMapper docMapper = (JsonDocumentMapper) new JsonDocumentMapperParser(new AnalysisService(new Index("test"))).parse(mapping);
|
||||
byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/json/all/test1.json");
|
||||
Document doc = docMapper.parse(json).doc();
|
||||
Field field = doc.getField("_all");
|
||||
AllEntries allEntries = ((AllTokenFilter) field.tokenStreamValue()).allEntries();
|
||||
MatcherAssert.assertThat(allEntries.fields().size(), equalTo(2));
|
||||
MatcherAssert.assertThat(allEntries.fields().contains("name.last"), equalTo(true));
|
||||
MatcherAssert.assertThat(allEntries.fields().contains("simple1"), equalTo(true));
|
||||
}
|
||||
|
||||
@Test public void testSimpleAllMappersWithReparse() throws Exception {
|
||||
String mapping = copyToStringFromClasspath("/org/elasticsearch/index/mapper/json/all/mapping.json");
|
||||
JsonDocumentMapper docMapper = (JsonDocumentMapper) new JsonDocumentMapperParser(new AnalysisService(new Index("test"))).parse(mapping);
|
||||
String builtMapping = docMapper.buildSource();
|
||||
// System.out.println(builtMapping);
|
||||
// reparse it
|
||||
JsonDocumentMapper builtDocMapper = (JsonDocumentMapper) new JsonDocumentMapperParser(new AnalysisService(new Index("test"))).parse(builtMapping);
|
||||
byte[] json = copyToBytesFromClasspath("/org/elasticsearch/index/mapper/json/all/test1.json");
|
||||
Document doc = builtDocMapper.parse(json).doc();
|
||||
|
||||
Field field = doc.getField("_all");
|
||||
AllEntries allEntries = ((AllTokenFilter) field.tokenStreamValue()).allEntries();
|
||||
MatcherAssert.assertThat(allEntries.fields().size(), equalTo(2));
|
||||
MatcherAssert.assertThat(allEntries.fields().contains("name.last"), equalTo(true));
|
||||
MatcherAssert.assertThat(allEntries.fields().contains("simple1"), equalTo(true));
|
||||
}
|
||||
}
|
@ -0,0 +1,33 @@
|
||||
{
|
||||
person : {
|
||||
allField : {enabled : true},
|
||||
properties : {
|
||||
name : {
|
||||
type : "object",
|
||||
dynamic : false,
|
||||
properties : {
|
||||
first : {type : "string", store : "yes", includeInAll : false},
|
||||
last : {type : "string", index : "not_analyzed"}
|
||||
}
|
||||
},
|
||||
address : {
|
||||
type : "object",
|
||||
includeInAll : false,
|
||||
properties : {
|
||||
first : {
|
||||
properties : {
|
||||
location : {type : "string", store : "yes", indexName : "firstLocation"}
|
||||
}
|
||||
},
|
||||
last : {
|
||||
properties : {
|
||||
location : {type : "string"}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
simple1 : {type : "long", includeInAll : true},
|
||||
simple2 : {type : "long", includeInAll : false}
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,20 @@
|
||||
{
|
||||
person : {
|
||||
_boost : 3.7,
|
||||
_id : "1",
|
||||
name : {
|
||||
first : "shay",
|
||||
last : "banon"
|
||||
},
|
||||
address : {
|
||||
first : {
|
||||
location : "first location"
|
||||
},
|
||||
last : {
|
||||
location : "last location"
|
||||
}
|
||||
},
|
||||
simple1 : 1,
|
||||
simple2 : 2
|
||||
}
|
||||
}
|
@ -0,0 +1,231 @@
|
||||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.util.lucene.all;
|
||||
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.IndexWriter;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.search.TopDocs;
|
||||
import org.apache.lucene.store.Directory;
|
||||
import org.apache.lucene.store.RAMDirectory;
|
||||
import org.elasticsearch.util.lucene.Lucene;
|
||||
import org.testng.annotations.Test;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.*;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
@Test
|
||||
public class SimpleAllTests {
|
||||
|
||||
@Test public void testSimpleAllNoBoost() throws Exception {
|
||||
Directory dir = new RAMDirectory();
|
||||
IndexWriter indexWriter = new IndexWriter(dir, Lucene.STANDARD_ANALYZER, true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("_id", "1", Field.Store.YES, Field.Index.NO));
|
||||
AllEntries allEntries = new AllEntries();
|
||||
allEntries.addText("field1", "something", 1.0f);
|
||||
allEntries.addText("field2", "else", 1.0f);
|
||||
allEntries.finishTexts();
|
||||
doc.add(new Field("_all", AllTokenFilter.allTokenStream("_all", allEntries, Lucene.STANDARD_ANALYZER)));
|
||||
|
||||
indexWriter.addDocument(doc);
|
||||
|
||||
doc = new Document();
|
||||
doc.add(new Field("_id", "2", Field.Store.YES, Field.Index.NO));
|
||||
allEntries = new AllEntries();
|
||||
allEntries.addText("field1", "else", 1.0f);
|
||||
allEntries.addText("field2", "something", 1.0f);
|
||||
allEntries.finishTexts();
|
||||
doc.add(new Field("_all", AllTokenFilter.allTokenStream("_all", allEntries, Lucene.STANDARD_ANALYZER)));
|
||||
|
||||
indexWriter.addDocument(doc);
|
||||
|
||||
IndexReader reader = indexWriter.getReader();
|
||||
IndexSearcher searcher = new IndexSearcher(reader);
|
||||
|
||||
TopDocs docs = searcher.search(new AllTermQuery(new Term("_all", "else")), 10);
|
||||
assertThat(docs.totalHits, equalTo(2));
|
||||
assertThat(docs.scoreDocs[0].doc, equalTo(0));
|
||||
assertThat(docs.scoreDocs[1].doc, equalTo(1));
|
||||
|
||||
docs = searcher.search(new AllTermQuery(new Term("_all", "something")), 10);
|
||||
assertThat(docs.totalHits, equalTo(2));
|
||||
assertThat(docs.scoreDocs[0].doc, equalTo(0));
|
||||
assertThat(docs.scoreDocs[1].doc, equalTo(1));
|
||||
|
||||
searcher.close();
|
||||
|
||||
indexWriter.close();
|
||||
}
|
||||
|
||||
@Test public void testSimpleAllWithBoost() throws Exception {
|
||||
Directory dir = new RAMDirectory();
|
||||
IndexWriter indexWriter = new IndexWriter(dir, Lucene.STANDARD_ANALYZER, true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("_id", "1", Field.Store.YES, Field.Index.NO));
|
||||
AllEntries allEntries = new AllEntries();
|
||||
allEntries.addText("field1", "something", 1.0f);
|
||||
allEntries.addText("field2", "else", 1.0f);
|
||||
allEntries.finishTexts();
|
||||
doc.add(new Field("_all", AllTokenFilter.allTokenStream("_all", allEntries, Lucene.STANDARD_ANALYZER)));
|
||||
|
||||
indexWriter.addDocument(doc);
|
||||
|
||||
doc = new Document();
|
||||
doc.add(new Field("_id", "2", Field.Store.YES, Field.Index.NO));
|
||||
allEntries = new AllEntries();
|
||||
allEntries.addText("field1", "else", 2.0f);
|
||||
allEntries.addText("field2", "something", 1.0f);
|
||||
allEntries.finishTexts();
|
||||
doc.add(new Field("_all", AllTokenFilter.allTokenStream("_all", allEntries, Lucene.STANDARD_ANALYZER)));
|
||||
|
||||
indexWriter.addDocument(doc);
|
||||
|
||||
IndexReader reader = indexWriter.getReader();
|
||||
IndexSearcher searcher = new IndexSearcher(reader);
|
||||
|
||||
// this one is boosted. so the second doc is more relevant
|
||||
TopDocs docs = searcher.search(new AllTermQuery(new Term("_all", "else")), 10);
|
||||
assertThat(docs.totalHits, equalTo(2));
|
||||
assertThat(docs.scoreDocs[0].doc, equalTo(1));
|
||||
assertThat(docs.scoreDocs[1].doc, equalTo(0));
|
||||
|
||||
docs = searcher.search(new AllTermQuery(new Term("_all", "something")), 10);
|
||||
assertThat(docs.totalHits, equalTo(2));
|
||||
assertThat(docs.scoreDocs[0].doc, equalTo(0));
|
||||
assertThat(docs.scoreDocs[1].doc, equalTo(1));
|
||||
|
||||
searcher.close();
|
||||
|
||||
indexWriter.close();
|
||||
}
|
||||
|
||||
@Test public void testMultipleTokensAllNoBoost() throws Exception {
|
||||
Directory dir = new RAMDirectory();
|
||||
IndexWriter indexWriter = new IndexWriter(dir, Lucene.STANDARD_ANALYZER, true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("_id", "1", Field.Store.YES, Field.Index.NO));
|
||||
AllEntries allEntries = new AllEntries();
|
||||
allEntries.addText("field1", "something moo", 1.0f);
|
||||
allEntries.addText("field2", "else koo", 1.0f);
|
||||
allEntries.finishTexts();
|
||||
doc.add(new Field("_all", AllTokenFilter.allTokenStream("_all", allEntries, Lucene.STANDARD_ANALYZER)));
|
||||
|
||||
indexWriter.addDocument(doc);
|
||||
|
||||
doc = new Document();
|
||||
doc.add(new Field("_id", "2", Field.Store.YES, Field.Index.NO));
|
||||
allEntries = new AllEntries();
|
||||
allEntries.addText("field1", "else koo", 1.0f);
|
||||
allEntries.addText("field2", "something moo", 1.0f);
|
||||
allEntries.finishTexts();
|
||||
doc.add(new Field("_all", AllTokenFilter.allTokenStream("_all", allEntries, Lucene.STANDARD_ANALYZER)));
|
||||
|
||||
indexWriter.addDocument(doc);
|
||||
|
||||
IndexReader reader = indexWriter.getReader();
|
||||
IndexSearcher searcher = new IndexSearcher(reader);
|
||||
|
||||
TopDocs docs = searcher.search(new AllTermQuery(new Term("_all", "else")), 10);
|
||||
assertThat(docs.totalHits, equalTo(2));
|
||||
assertThat(docs.scoreDocs[0].doc, equalTo(0));
|
||||
assertThat(docs.scoreDocs[1].doc, equalTo(1));
|
||||
|
||||
docs = searcher.search(new AllTermQuery(new Term("_all", "koo")), 10);
|
||||
assertThat(docs.totalHits, equalTo(2));
|
||||
assertThat(docs.scoreDocs[0].doc, equalTo(0));
|
||||
assertThat(docs.scoreDocs[1].doc, equalTo(1));
|
||||
|
||||
docs = searcher.search(new AllTermQuery(new Term("_all", "something")), 10);
|
||||
assertThat(docs.totalHits, equalTo(2));
|
||||
assertThat(docs.scoreDocs[0].doc, equalTo(0));
|
||||
assertThat(docs.scoreDocs[1].doc, equalTo(1));
|
||||
|
||||
docs = searcher.search(new AllTermQuery(new Term("_all", "moo")), 10);
|
||||
assertThat(docs.totalHits, equalTo(2));
|
||||
assertThat(docs.scoreDocs[0].doc, equalTo(0));
|
||||
assertThat(docs.scoreDocs[1].doc, equalTo(1));
|
||||
|
||||
searcher.close();
|
||||
|
||||
indexWriter.close();
|
||||
}
|
||||
|
||||
@Test public void testMultipleTokensAllWithBoost() throws Exception {
|
||||
Directory dir = new RAMDirectory();
|
||||
IndexWriter indexWriter = new IndexWriter(dir, Lucene.STANDARD_ANALYZER, true, IndexWriter.MaxFieldLength.UNLIMITED);
|
||||
|
||||
Document doc = new Document();
|
||||
doc.add(new Field("_id", "1", Field.Store.YES, Field.Index.NO));
|
||||
AllEntries allEntries = new AllEntries();
|
||||
allEntries.addText("field1", "something moo", 1.0f);
|
||||
allEntries.addText("field2", "else koo", 1.0f);
|
||||
allEntries.finishTexts();
|
||||
doc.add(new Field("_all", AllTokenFilter.allTokenStream("_all", allEntries, Lucene.STANDARD_ANALYZER)));
|
||||
|
||||
indexWriter.addDocument(doc);
|
||||
|
||||
doc = new Document();
|
||||
doc.add(new Field("_id", "2", Field.Store.YES, Field.Index.NO));
|
||||
allEntries = new AllEntries();
|
||||
allEntries.addText("field1", "else koo", 2.0f);
|
||||
allEntries.addText("field2", "something moo", 1.0f);
|
||||
allEntries.finishTexts();
|
||||
doc.add(new Field("_all", AllTokenFilter.allTokenStream("_all", allEntries, Lucene.STANDARD_ANALYZER)));
|
||||
|
||||
indexWriter.addDocument(doc);
|
||||
|
||||
IndexReader reader = indexWriter.getReader();
|
||||
IndexSearcher searcher = new IndexSearcher(reader);
|
||||
|
||||
TopDocs docs = searcher.search(new AllTermQuery(new Term("_all", "else")), 10);
|
||||
assertThat(docs.totalHits, equalTo(2));
|
||||
assertThat(docs.scoreDocs[0].doc, equalTo(1));
|
||||
assertThat(docs.scoreDocs[1].doc, equalTo(0));
|
||||
|
||||
docs = searcher.search(new AllTermQuery(new Term("_all", "koo")), 10);
|
||||
assertThat(docs.totalHits, equalTo(2));
|
||||
assertThat(docs.scoreDocs[0].doc, equalTo(1));
|
||||
assertThat(docs.scoreDocs[1].doc, equalTo(0));
|
||||
|
||||
docs = searcher.search(new AllTermQuery(new Term("_all", "something")), 10);
|
||||
assertThat(docs.totalHits, equalTo(2));
|
||||
assertThat(docs.scoreDocs[0].doc, equalTo(0));
|
||||
assertThat(docs.scoreDocs[1].doc, equalTo(1));
|
||||
|
||||
docs = searcher.search(new AllTermQuery(new Term("_all", "moo")), 10);
|
||||
assertThat(docs.totalHits, equalTo(2));
|
||||
assertThat(docs.scoreDocs[0].doc, equalTo(0));
|
||||
assertThat(docs.scoreDocs[1].doc, equalTo(1));
|
||||
|
||||
searcher.close();
|
||||
|
||||
indexWriter.close();
|
||||
}
|
||||
}
|
@ -111,6 +111,17 @@ public class TermsActionTests extends AbstractServersTests {
|
||||
assertThat(termsResponse.field("value").docFreq("aaa"), equalTo(1));
|
||||
assertThat(termsResponse.field("value").docFreq("bbb"), equalTo(1));
|
||||
|
||||
logger.info("Verify freqs (no fields, on _all)");
|
||||
termsResponse = client.terms(termsRequest("test")).actionGet();
|
||||
assertThat(termsResponse.numDocs(), equalTo(2l));
|
||||
assertThat(termsResponse.maxDoc(), equalTo(2l));
|
||||
assertThat(termsResponse.deletedDocs(), equalTo(0l));
|
||||
assertThat(termsResponse.successfulShards(), equalTo(indexStatus.shards().size()));
|
||||
assertThat(termsResponse.failedShards(), equalTo(0));
|
||||
assertThat(termsResponse.fieldsAsMap().isEmpty(), equalTo(false));
|
||||
assertThat(termsResponse.field("_all").docFreq("aaa"), equalTo(1));
|
||||
assertThat(termsResponse.field("_all").docFreq("bbb"), equalTo(1));
|
||||
|
||||
logger.info("Delete 3");
|
||||
client.index(indexRequest("test").type("type1").id("3").source(binaryJsonBuilder().startObject().field("value", "bbb").endObject())).actionGet();
|
||||
logger.info("Refresh");
|
||||
|
Loading…
x
Reference in New Issue
Block a user