Mappings: Remove _boost field
This has been deprecated since 1.0.0.RC1. It is finally removed here. closes #8875
This commit is contained in:
parent
cc8f159a3c
commit
7181bbde26
|
@ -181,7 +181,10 @@ def generate_index(client, version):
|
|||
},
|
||||
'_routing': {
|
||||
'path': 'myrouting'
|
||||
}
|
||||
},
|
||||
'_boost': {
|
||||
'null_value': 2.0
|
||||
}
|
||||
}
|
||||
mappings['custom_formats'] = {
|
||||
'properties': {
|
||||
|
@ -196,7 +199,6 @@ def generate_index(client, version):
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
client.indices.create(index='test', body={
|
||||
'settings': {
|
||||
'number_of_shards': 1,
|
||||
|
|
|
@ -247,7 +247,7 @@ curl -XGET 'localhost:9200/index/type/_search'
|
|||
}
|
||||
---------------
|
||||
|
||||
==== Meta fields have limited confiugration
|
||||
==== Meta fields have limited configuration
|
||||
Meta fields (those beginning with underscore) are fields used by elasticsearch
|
||||
to provide special features. They now have limited configuration options.
|
||||
|
||||
|
@ -255,6 +255,7 @@ to provide special features. They now have limited configuration options.
|
|||
* `_type` configuration can no longer be changed.
|
||||
* `_index` configuration is limited to enabling the field.
|
||||
* `_routing` configuration is limited to requiring the field.
|
||||
* `_boost` has been removed.
|
||||
|
||||
=== Codecs
|
||||
|
||||
|
|
|
@ -53,7 +53,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError;
|
|||
public class PutMappingRequest extends AcknowledgedRequest<PutMappingRequest> implements IndicesRequest.Replaceable {
|
||||
|
||||
private static ObjectOpenHashSet<String> RESERVED_FIELDS = ObjectOpenHashSet.from(
|
||||
"_uid", "_id", "_type", "_source", "_all", "_analyzer", "_boost", "_parent", "_routing", "_index",
|
||||
"_uid", "_id", "_type", "_source", "_all", "_analyzer", "_parent", "_routing", "_index",
|
||||
"_size", "_timestamp", "_ttl"
|
||||
);
|
||||
|
||||
|
|
|
@ -22,9 +22,7 @@ package org.elasticsearch.index.mapper;
|
|||
import com.google.common.collect.ImmutableMap;
|
||||
import com.google.common.collect.Maps;
|
||||
import com.google.common.collect.Sets;
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.FieldType;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.index.IndexableField;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
|
@ -33,7 +31,6 @@ import org.apache.lucene.util.BitDocIdSet;
|
|||
import org.apache.lucene.util.CloseableThreadLocal;
|
||||
import org.elasticsearch.ElasticsearchGenerationException;
|
||||
import org.elasticsearch.ElasticsearchIllegalArgumentException;
|
||||
import org.elasticsearch.common.Booleans;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Preconditions;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
|
@ -50,10 +47,8 @@ import org.elasticsearch.common.xcontent.XContentFactory;
|
|||
import org.elasticsearch.common.xcontent.XContentHelper;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.analysis.NamedAnalyzer;
|
||||
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
|
||||
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.BoostFieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.FieldNamesFieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.IdFieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.IndexFieldMapper;
|
||||
|
@ -194,7 +189,6 @@ public class DocumentMapper implements ToXContent {
|
|||
this.rootMappers.put(SourceFieldMapper.class, new SourceFieldMapper(indexSettings));
|
||||
this.rootMappers.put(TypeFieldMapper.class, new TypeFieldMapper(indexSettings));
|
||||
this.rootMappers.put(AllFieldMapper.class, new AllFieldMapper(indexSettings));
|
||||
this.rootMappers.put(BoostFieldMapper.class, new BoostFieldMapper(indexSettings));
|
||||
this.rootMappers.put(TimestampFieldMapper.class, new TimestampFieldMapper(indexSettings));
|
||||
this.rootMappers.put(TTLFieldMapper.class, new TTLFieldMapper(indexSettings));
|
||||
this.rootMappers.put(VersionFieldMapper.class, new VersionFieldMapper(indexSettings));
|
||||
|
@ -411,10 +405,6 @@ public class DocumentMapper implements ToXContent {
|
|||
return rootMapper(SizeFieldMapper.class);
|
||||
}
|
||||
|
||||
public BoostFieldMapper boostFieldMapper() {
|
||||
return rootMapper(BoostFieldMapper.class);
|
||||
}
|
||||
|
||||
public Filter typeFilter() {
|
||||
return this.typeFilter;
|
||||
}
|
||||
|
|
|
@ -54,7 +54,6 @@ import org.elasticsearch.index.mapper.core.TypeParsers;
|
|||
import org.elasticsearch.index.mapper.geo.GeoPointFieldMapper;
|
||||
import org.elasticsearch.index.mapper.geo.GeoShapeFieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.BoostFieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.FieldNamesFieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.IdFieldMapper;
|
||||
import org.elasticsearch.index.mapper.internal.IndexFieldMapper;
|
||||
|
@ -139,7 +138,6 @@ public class DocumentMapperParser extends AbstractIndexComponent {
|
|||
.put(SourceFieldMapper.NAME, new SourceFieldMapper.TypeParser())
|
||||
.put(TypeFieldMapper.NAME, new TypeFieldMapper.TypeParser())
|
||||
.put(AllFieldMapper.NAME, new AllFieldMapper.TypeParser())
|
||||
.put(BoostFieldMapper.NAME, new BoostFieldMapper.TypeParser())
|
||||
.put(ParentFieldMapper.NAME, new ParentFieldMapper.TypeParser())
|
||||
.put(RoutingFieldMapper.NAME, new RoutingFieldMapper.TypeParser())
|
||||
.put(TimestampFieldMapper.NAME, new TimestampFieldMapper.TypeParser())
|
||||
|
|
|
@ -89,10 +89,6 @@ public final class MapperBuilders {
|
|||
return new ParentFieldMapper.Builder();
|
||||
}
|
||||
|
||||
public static BoostFieldMapper.Builder boost(String name) {
|
||||
return new BoostFieldMapper.Builder(name);
|
||||
}
|
||||
|
||||
public static AllFieldMapper.Builder all() {
|
||||
return new AllFieldMapper.Builder();
|
||||
}
|
||||
|
|
|
@ -90,7 +90,7 @@ public class MapperService extends AbstractIndexComponent {
|
|||
|
||||
public static final String DEFAULT_MAPPING = "_default_";
|
||||
private static ObjectOpenHashSet<String> META_FIELDS = ObjectOpenHashSet.from(
|
||||
"_uid", "_id", "_type", "_all", "_analyzer", "_boost", "_parent", "_routing", "_index",
|
||||
"_uid", "_id", "_type", "_all", "_analyzer", "_parent", "_routing", "_index",
|
||||
"_size", "_timestamp", "_ttl"
|
||||
);
|
||||
private final AnalysisService analysisService;
|
||||
|
|
|
@ -1,336 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.mapper.internal;
|
||||
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.FieldType;
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.search.Filter;
|
||||
import org.apache.lucene.search.NumericRangeFilter;
|
||||
import org.apache.lucene.search.NumericRangeQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.apache.lucene.util.BytesRefBuilder;
|
||||
import org.apache.lucene.util.NumericUtils;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.Numbers;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.Fuzziness;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.analysis.NumericFloatAnalyzer;
|
||||
import org.elasticsearch.index.fielddata.FieldDataType;
|
||||
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
|
||||
import org.elasticsearch.index.mapper.InternalMapper;
|
||||
import org.elasticsearch.index.mapper.Mapper;
|
||||
import org.elasticsearch.index.mapper.MapperBuilders;
|
||||
import org.elasticsearch.index.mapper.MapperParsingException;
|
||||
import org.elasticsearch.index.mapper.MergeContext;
|
||||
import org.elasticsearch.index.mapper.MergeMappingException;
|
||||
import org.elasticsearch.index.mapper.ParseContext;
|
||||
import org.elasticsearch.index.mapper.RootMapper;
|
||||
import org.elasticsearch.index.mapper.core.FloatFieldMapper;
|
||||
import org.elasticsearch.index.mapper.core.NumberFieldMapper;
|
||||
import org.elasticsearch.index.query.QueryParseContext;
|
||||
import org.elasticsearch.index.search.NumericRangeFieldDataFilter;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeFloatValue;
|
||||
import static org.elasticsearch.index.mapper.core.TypeParsers.parseNumberField;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class BoostFieldMapper extends NumberFieldMapper<Float> implements InternalMapper, RootMapper {
|
||||
|
||||
public static final String CONTENT_TYPE = "_boost";
|
||||
public static final String NAME = "_boost";
|
||||
|
||||
public static class Defaults extends NumberFieldMapper.Defaults {
|
||||
public static final String NAME = "_boost";
|
||||
public static final Float NULL_VALUE = null;
|
||||
|
||||
public static final FieldType FIELD_TYPE = new FieldType(NumberFieldMapper.Defaults.FIELD_TYPE);
|
||||
|
||||
static {
|
||||
FIELD_TYPE.setStored(false);
|
||||
FIELD_TYPE.setIndexOptions(IndexOptions.NONE); // not indexed
|
||||
}
|
||||
}
|
||||
|
||||
public static class Builder extends NumberFieldMapper.Builder<Builder, BoostFieldMapper> {
|
||||
|
||||
protected Float nullValue = Defaults.NULL_VALUE;
|
||||
|
||||
public Builder(String name) {
|
||||
super(name, new FieldType(Defaults.FIELD_TYPE), Defaults.PRECISION_STEP_32_BIT);
|
||||
builder = this;
|
||||
}
|
||||
|
||||
public Builder nullValue(float nullValue) {
|
||||
this.nullValue = nullValue;
|
||||
return this;
|
||||
}
|
||||
|
||||
// if we are indexed we use DOCS_ONLY
|
||||
@Override
|
||||
protected IndexOptions getDefaultIndexOption() {
|
||||
return IndexOptions.DOCS;
|
||||
}
|
||||
|
||||
@Override
|
||||
public BoostFieldMapper build(BuilderContext context) {
|
||||
return new BoostFieldMapper(name, buildIndexName(context),
|
||||
fieldType.numericPrecisionStep(), boost, fieldType, docValues, nullValue, fieldDataSettings, context.indexSettings());
|
||||
}
|
||||
}
|
||||
|
||||
public static class TypeParser implements Mapper.TypeParser {
|
||||
@Override
|
||||
public Mapper.Builder parse(String fieldName, Map<String, Object> node, ParserContext parserContext) throws MapperParsingException {
|
||||
String name = node.get("name") == null ? BoostFieldMapper.Defaults.NAME : node.remove("name").toString();
|
||||
BoostFieldMapper.Builder builder = MapperBuilders.boost(name);
|
||||
parseNumberField(builder, name, node, parserContext);
|
||||
for (Iterator<Map.Entry<String, Object>> iterator = node.entrySet().iterator(); iterator.hasNext();) {
|
||||
Map.Entry<String, Object> entry = iterator.next();
|
||||
String propName = Strings.toUnderscoreCase(entry.getKey());
|
||||
Object propNode = entry.getValue();
|
||||
if (propName.equals("null_value")) {
|
||||
builder.nullValue(nodeFloatValue(propNode));
|
||||
iterator.remove();
|
||||
}
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
}
|
||||
|
||||
private final Float nullValue;
|
||||
|
||||
public BoostFieldMapper(Settings indexSettings) {
|
||||
this(Defaults.NAME, Defaults.NAME, indexSettings);
|
||||
}
|
||||
|
||||
protected BoostFieldMapper(String name, String indexName, Settings indexSettings) {
|
||||
this(name, indexName, Defaults.PRECISION_STEP_32_BIT, Defaults.BOOST, new FieldType(Defaults.FIELD_TYPE), null,
|
||||
Defaults.NULL_VALUE, null, indexSettings);
|
||||
}
|
||||
|
||||
protected BoostFieldMapper(String name, String indexName, int precisionStep, float boost, FieldType fieldType, Boolean docValues, Float nullValue,
|
||||
@Nullable Settings fieldDataSettings, Settings indexSettings) {
|
||||
super(new Names(name, indexName, indexName, name), precisionStep, boost, fieldType, docValues, Defaults.IGNORE_MALFORMED, Defaults.COERCE,
|
||||
NumericFloatAnalyzer.buildNamedAnalyzer(precisionStep), NumericFloatAnalyzer.buildNamedAnalyzer(Integer.MAX_VALUE),
|
||||
null, null, fieldDataSettings, indexSettings, MultiFields.empty(), null);
|
||||
this.nullValue = nullValue;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldType defaultFieldType() {
|
||||
return Defaults.FIELD_TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public FieldDataType defaultFieldDataType() {
|
||||
return new FieldDataType("float");
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasDocValues() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected int maxPrecisionStep() {
|
||||
return 32;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Float value(Object value) {
|
||||
if (value == null) {
|
||||
return null;
|
||||
}
|
||||
if (value instanceof Number) {
|
||||
return ((Number) value).floatValue();
|
||||
}
|
||||
if (value instanceof BytesRef) {
|
||||
return Numbers.bytesToFloat((BytesRef) value);
|
||||
}
|
||||
return Float.parseFloat(value.toString());
|
||||
}
|
||||
|
||||
@Override
|
||||
public BytesRef indexedValueForSearch(Object value) {
|
||||
int intValue = NumericUtils.floatToSortableInt(parseValue(value));
|
||||
BytesRefBuilder bytesRef = new BytesRefBuilder();
|
||||
NumericUtils.intToPrefixCoded(intValue, precisionStep(), bytesRef);
|
||||
return bytesRef.get();
|
||||
}
|
||||
|
||||
private float parseValue(Object value) {
|
||||
if (value instanceof Number) {
|
||||
return ((Number) value).floatValue();
|
||||
}
|
||||
if (value instanceof BytesRef) {
|
||||
return Float.parseFloat(((BytesRef) value).utf8ToString());
|
||||
}
|
||||
return Float.parseFloat(value.toString());
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query fuzzyQuery(String value, Fuzziness fuzziness, int prefixLength, int maxExpansions, boolean transpositions) {
|
||||
float iValue = Float.parseFloat(value);
|
||||
float iSim = fuzziness.asFloat();
|
||||
return NumericRangeQuery.newFloatRange(names.indexName(), precisionStep,
|
||||
iValue - iSim,
|
||||
iValue + iSim,
|
||||
true, true);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
|
||||
return NumericRangeQuery.newFloatRange(names.indexName(), precisionStep,
|
||||
lowerTerm == null ? null : parseValue(lowerTerm),
|
||||
upperTerm == null ? null : parseValue(upperTerm),
|
||||
includeLower, includeUpper);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Filter rangeFilter(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
|
||||
return NumericRangeFilter.newFloatRange(names.indexName(), precisionStep,
|
||||
lowerTerm == null ? null : parseValue(lowerTerm),
|
||||
upperTerm == null ? null : parseValue(upperTerm),
|
||||
includeLower, includeUpper);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Filter rangeFilter(QueryParseContext parseContext, Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, @Nullable QueryParseContext context) {
|
||||
return NumericRangeFieldDataFilter.newFloatRange((IndexNumericFieldData) parseContext.getForField(this),
|
||||
lowerTerm == null ? null : parseValue(lowerTerm),
|
||||
upperTerm == null ? null : parseValue(upperTerm),
|
||||
includeLower, includeUpper);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Filter nullValueFilter() {
|
||||
if (nullValue == null) {
|
||||
return null;
|
||||
}
|
||||
return NumericRangeFilter.newFloatRange(names.indexName(), precisionStep,
|
||||
nullValue,
|
||||
nullValue,
|
||||
true, true);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void preParse(ParseContext context) throws IOException {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void postParse(ParseContext context) throws IOException {
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean includeInObject() {
|
||||
return true;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void parse(ParseContext context) throws IOException {
|
||||
// we override parse since we want to handle cases where it is not indexed and not stored (the default)
|
||||
float value = parseFloatValue(context);
|
||||
if (!Float.isNaN(value)) {
|
||||
context.docBoost(value);
|
||||
}
|
||||
super.parse(context);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void innerParseCreateField(ParseContext context, List<Field> fields) throws IOException {
|
||||
final float value = parseFloatValue(context);
|
||||
if (Float.isNaN(value)) {
|
||||
return;
|
||||
}
|
||||
context.docBoost(value);
|
||||
fields.add(new FloatFieldMapper.CustomFloatNumericField(this, value, fieldType));
|
||||
}
|
||||
|
||||
private float parseFloatValue(ParseContext context) throws IOException {
|
||||
float value;
|
||||
if (context.parser().currentToken() == XContentParser.Token.VALUE_NULL) {
|
||||
if (nullValue == null) {
|
||||
return Float.NaN;
|
||||
}
|
||||
value = nullValue;
|
||||
} else {
|
||||
value = context.parser().floatValue(coerce.value());
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String contentType() {
|
||||
return CONTENT_TYPE;
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
boolean includeDefaults = params.paramAsBoolean("include_defaults", false);
|
||||
boolean indexed = fieldType.indexOptions() != IndexOptions.NONE;
|
||||
boolean indexedDefault = Defaults.FIELD_TYPE.indexOptions() != IndexOptions.NONE;
|
||||
|
||||
// all are defaults, don't write it at all
|
||||
if (!includeDefaults && name().equals(Defaults.NAME) && nullValue == null &&
|
||||
indexed == indexedDefault &&
|
||||
fieldType.stored() == Defaults.FIELD_TYPE.stored() &&
|
||||
customFieldDataSettings == null) {
|
||||
return builder;
|
||||
}
|
||||
builder.startObject(contentType());
|
||||
if (includeDefaults || !name().equals(Defaults.NAME)) {
|
||||
builder.field("name", name());
|
||||
}
|
||||
if (includeDefaults || nullValue != null) {
|
||||
builder.field("null_value", nullValue);
|
||||
}
|
||||
if (includeDefaults || indexed != indexedDefault) {
|
||||
builder.field("index", indexTokenizeOptionToString(indexed, fieldType.tokenized()));
|
||||
}
|
||||
if (includeDefaults || fieldType.stored() != Defaults.FIELD_TYPE.stored()) {
|
||||
builder.field("store", fieldType.stored());
|
||||
}
|
||||
if (customFieldDataSettings != null) {
|
||||
builder.field("fielddata", (Map) customFieldDataSettings.getAsMap());
|
||||
} else if (includeDefaults) {
|
||||
builder.field("fielddata", (Map) fieldDataType.getSettings().getAsMap());
|
||||
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void merge(Mapper mergeWith, MergeContext mergeContext) throws MergeMappingException {
|
||||
// do nothing here, no merging, but also no exception
|
||||
}
|
||||
}
|
|
@ -1008,7 +1008,7 @@ public class GetActionTests extends ElasticsearchIntegrationTest {
|
|||
@Test
|
||||
public void testUngeneratedFieldsPartOfSourceUnstoredSourceDisabled() throws IOException {
|
||||
indexSingleDocumentWithUngeneratedFieldsThatArePartOf_source(false, false);
|
||||
String[] fieldsList = {"my_boost"};
|
||||
String[] fieldsList = {};
|
||||
// before refresh - document is only in translog
|
||||
assertGetFieldsAlwaysNull(indexOrAlias(), "doc", "1", fieldsList);
|
||||
refresh();
|
||||
|
@ -1027,7 +1027,7 @@ public class GetActionTests extends ElasticsearchIntegrationTest {
|
|||
sourceEnabled = randomBoolean();
|
||||
}
|
||||
indexSingleDocumentWithUngeneratedFieldsThatArePartOf_source(stored, sourceEnabled);
|
||||
String[] fieldsList = {"my_boost"};
|
||||
String[] fieldsList = {};
|
||||
// before refresh - document is only in translog
|
||||
assertGetFieldsAlwaysWorks(indexOrAlias(), "doc", "1", fieldsList);
|
||||
refresh();
|
||||
|
@ -1049,11 +1049,6 @@ public class GetActionTests extends ElasticsearchIntegrationTest {
|
|||
" \"doc\": {\n" +
|
||||
" \"_source\": {\n" +
|
||||
" \"enabled\": " + sourceEnabled + "\n" +
|
||||
" },\n" +
|
||||
" \"_boost\": {\n" +
|
||||
" \"name\": \"my_boost\",\n" +
|
||||
" \"null_value\": 1,\n" +
|
||||
" \"store\": \"" + storedString + "\"\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
{
|
||||
"_boost":3.7,
|
||||
"_id":"1",
|
||||
"name":{
|
||||
"first":"shay",
|
||||
|
|
|
@ -1,52 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.mapper.boost;
|
||||
|
||||
import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.test.ElasticsearchIntegrationTest;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
|
||||
public class BoostMappingIntegrationTests extends ElasticsearchIntegrationTest {
|
||||
|
||||
@Test
|
||||
public void testSetValues() throws Exception {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_boost")
|
||||
.field("store", "yes").field("index", "not_analyzed")
|
||||
.endObject()
|
||||
.endObject().endObject().string();
|
||||
assertAcked(prepareCreate("test").addMapping("type", mapping));
|
||||
ensureYellow();
|
||||
GetFieldMappingsResponse response = client().admin().indices().prepareGetFieldMappings().addIndices("test").addTypes("type").setFields("_boost").get();
|
||||
assertTrue(response.mappings().containsKey("test"));
|
||||
assertNotNull(response.fieldMappings("test", "type", "_boost"));
|
||||
Map<String, Object> boostSource = response.fieldMappings("test", "type", "_boost").sourceAsMap();
|
||||
assertThat((Boolean)((LinkedHashMap)(boostSource.get("_boost"))).get("store"), equalTo(true));
|
||||
assertThat((String)((LinkedHashMap)(boostSource.get("_boost"))).get("index"), equalTo("not_analyzed"));
|
||||
}
|
||||
}
|
|
@ -1,101 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.index.mapper.boost;
|
||||
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.index.IndexableField;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.index.mapper.DocumentMapper;
|
||||
import org.elasticsearch.index.mapper.ParsedDocument;
|
||||
import org.elasticsearch.index.mapper.internal.BoostFieldMapper;
|
||||
import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.test.ElasticsearchSingleNodeTest;
|
||||
import org.junit.Test;
|
||||
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
/**
|
||||
*/
|
||||
public class BoostMappingTests extends ElasticsearchSingleNodeTest {
|
||||
|
||||
@Test
|
||||
public void testDefaultMapping() throws Exception {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().endObject().string();
|
||||
|
||||
DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
|
||||
|
||||
ParsedDocument doc = mapper.parse("type", "1", XContentFactory.jsonBuilder().startObject()
|
||||
.field("_boost", 2.0f)
|
||||
.field("field", "a")
|
||||
.field("field", "b")
|
||||
.endObject().bytes());
|
||||
|
||||
// one fo the same named field will have the proper boost, the others will have 1
|
||||
IndexableField[] fields = doc.rootDoc().getFields("field");
|
||||
assertThat(fields[0].boost(), equalTo(2.0f));
|
||||
assertThat(fields[1].boost(), equalTo(1.0f));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCustomName() throws Exception {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_boost").field("name", "custom_boost").endObject()
|
||||
.endObject().endObject().string();
|
||||
|
||||
DocumentMapper mapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
|
||||
|
||||
ParsedDocument doc = mapper.parse("type", "1", XContentFactory.jsonBuilder().startObject()
|
||||
.field("field", "a")
|
||||
.field("_boost", 2.0f)
|
||||
.endObject().bytes());
|
||||
assertThat(doc.rootDoc().getField("field").boost(), equalTo(1.0f));
|
||||
|
||||
doc = mapper.parse("type", "1", XContentFactory.jsonBuilder().startObject()
|
||||
.field("field", "a")
|
||||
.field("custom_boost", 2.0f)
|
||||
.endObject().bytes());
|
||||
assertThat(doc.rootDoc().getField("field").boost(), equalTo(2.0f));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDefaultValues() throws Exception {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").endObject().string();
|
||||
DocumentMapper docMapper = createIndex("test").mapperService().documentMapperParser().parse(mapping);
|
||||
assertThat(docMapper.boostFieldMapper().fieldType().stored(), equalTo(BoostFieldMapper.Defaults.FIELD_TYPE.stored()));
|
||||
assertThat(docMapper.boostFieldMapper().fieldType().indexOptions(), equalTo(BoostFieldMapper.Defaults.FIELD_TYPE.indexOptions()));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSetValues() throws Exception {
|
||||
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type")
|
||||
.startObject("_boost")
|
||||
.field("store", "yes").field("index", "not_analyzed")
|
||||
.endObject()
|
||||
.endObject().endObject().string();
|
||||
IndexService indexServices = createIndex("test");
|
||||
DocumentMapper docMapper = indexServices.mapperService().documentMapperParser().parse("type", mapping);
|
||||
assertThat(docMapper.boostFieldMapper().fieldType().stored(), equalTo(true));
|
||||
assertEquals(IndexOptions.DOCS, docMapper.boostFieldMapper().fieldType().indexOptions());
|
||||
docMapper.refreshSource();
|
||||
docMapper = indexServices.mapperService().documentMapperParser().parse("type", docMapper.mappingSource().string());
|
||||
assertThat(docMapper.boostFieldMapper().fieldType().stored(), equalTo(true));
|
||||
assertEquals(IndexOptions.DOCS, docMapper.boostFieldMapper().fieldType().indexOptions());
|
||||
}
|
||||
}
|
|
@ -8,9 +8,6 @@
|
|||
enabled:true,
|
||||
_source:{
|
||||
},
|
||||
_boost:{
|
||||
null_value:2.0
|
||||
},
|
||||
properties:{
|
||||
name:{
|
||||
type:"object",
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
{
|
||||
_boost:3.7,
|
||||
name:{
|
||||
first:"shay",
|
||||
last:"banon"
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
{
|
||||
_boost:3.7,
|
||||
_id:"1",
|
||||
name:{
|
||||
first:"shay",
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
{
|
||||
person:{
|
||||
_boost:3.7,
|
||||
_id:"1",
|
||||
name:{
|
||||
first:"shay",
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
{
|
||||
_boost:3.7,
|
||||
_id:"1",
|
||||
name:{
|
||||
first:"shay",
|
||||
|
|
|
@ -123,7 +123,6 @@ public class TransportSearchFailuresTests extends ElasticsearchIntegrationTest {
|
|||
.field("name", nameValue + id)
|
||||
.field("age", age)
|
||||
.field("multi", multi.toString())
|
||||
.field("_boost", age * 10)
|
||||
.endObject();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -60,9 +60,6 @@ import static org.elasticsearch.search.builder.SearchSourceBuilder.searchSource;
|
|||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
public class TransportTwoNodesSearchTests extends ElasticsearchIntegrationTest {
|
||||
|
||||
@Override
|
||||
|
@ -113,13 +110,27 @@ public class TransportTwoNodesSearchTests extends ElasticsearchIntegrationTest {
|
|||
.field("name", nameValue + id)
|
||||
.field("age", age)
|
||||
.field("multi", multi.toString())
|
||||
.field("_boost", age * 10)
|
||||
.endObject();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDfsQueryThenFetch() throws Exception {
|
||||
prepareData();
|
||||
ImmutableSettings.Builder settingsBuilder = settingsBuilder()
|
||||
.put(indexSettings())
|
||||
.put("routing.hash.type", "simple");
|
||||
client().admin().indices().create(createIndexRequest("test")
|
||||
.settings(settingsBuilder))
|
||||
.actionGet();
|
||||
ensureGreen();
|
||||
|
||||
// we need to have age (ie number of repeats of "test" term) high enough
|
||||
// to produce the same 8-bit norm for all docs here, so that
|
||||
// the tf is basically the entire score (assuming idf is fixed, which
|
||||
// it should be if dfs is working correctly)
|
||||
for (int i = 1024; i < 1124; i++) {
|
||||
index(Integer.toString(i - 1024), "test", i);
|
||||
}
|
||||
refresh();
|
||||
|
||||
int total = 0;
|
||||
SearchResponse searchResponse = client().prepareSearch("test").setSearchType(DFS_QUERY_THEN_FETCH).setQuery(termQuery("multi", "test")).setSize(60).setExplain(true).setScroll(TimeValue.timeValueSeconds(30)).get();
|
||||
|
@ -133,7 +144,7 @@ public class TransportTwoNodesSearchTests extends ElasticsearchIntegrationTest {
|
|||
for (int i = 0; i < hits.length; ++i) {
|
||||
SearchHit hit = hits[i];
|
||||
assertThat(hit.explanation(), notNullValue());
|
||||
assertThat("id[" + hit.id() + "]", hit.id(), equalTo(Integer.toString(100 - total - i - 1)));
|
||||
assertThat("id[" + hit.id() + "] -> " + hit.explanation().toString(), hit.id(), equalTo(Integer.toString(100 - total - i - 1)));
|
||||
}
|
||||
total += hits.length;
|
||||
searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueSeconds(30)).get();
|
||||
|
|
|
@ -155,7 +155,7 @@ public class SimpleSortTests extends ElasticsearchIntegrationTest {
|
|||
|
||||
public void testIssue6639() throws ExecutionException, InterruptedException {
|
||||
assertAcked(prepareCreate("$index")
|
||||
.addMapping("$type","{\"$type\": {\"_boost\": {\"name\": \"boost\", \"null_value\": 1.0}, \"properties\": {\"grantee\": {\"index\": \"not_analyzed\", \"term_vector\": \"with_positions_offsets\", \"type\": \"string\", \"analyzer\": \"snowball\", \"boost\": 1.0, \"store\": \"yes\"}}}}"));
|
||||
.addMapping("$type","{\"$type\": {\"properties\": {\"grantee\": {\"index\": \"not_analyzed\", \"term_vector\": \"with_positions_offsets\", \"type\": \"string\", \"analyzer\": \"snowball\", \"boost\": 1.0, \"store\": \"yes\"}}}}"));
|
||||
indexRandom(true,
|
||||
client().prepareIndex("$index", "$type", "data.activity.5").setSource("{\"django_ct\": \"data.activity\", \"grantee\": \"Grantee 1\"}"),
|
||||
client().prepareIndex("$index", "$type", "data.activity.6").setSource("{\"django_ct\": \"data.activity\", \"grantee\": \"Grantee 2\"}"));
|
||||
|
|
Binary file not shown.
Binary file not shown.
Loading…
Reference in New Issue