Merge pull request #16900 from jimferenczi/mapping_field_level_boost

Change the field mapping index time boost into a query time boost
This commit is contained in:
Jim Ferenczi 2016-03-04 11:50:29 +01:00
commit cd950a34da
28 changed files with 286 additions and 92 deletions

View File

@ -98,7 +98,6 @@ class DocumentParser implements Closeable {
} }
reverseOrder(context); reverseOrder(context);
applyDocBoost(context);
ParsedDocument doc = parsedDocument(source, context, update(context, mapping)); ParsedDocument doc = parsedDocument(source, context, update(context, mapping));
// reset the context to free up memory // reset the context to free up memory
@ -186,24 +185,6 @@ class DocumentParser implements Closeable {
} }
} }
private static void applyDocBoost(ParseContext.InternalParseContext context) {
// apply doc boost
if (context.docBoost() != 1.0f) {
Set<String> encounteredFields = new HashSet<>();
for (ParseContext.Document doc : context.docs()) {
encounteredFields.clear();
for (IndexableField field : doc) {
if (field.fieldType().indexOptions() != IndexOptions.NONE && !field.fieldType().omitNorms()) {
if (!encounteredFields.contains(field.name())) {
((Field) field).setBoost(context.docBoost() * field.boost());
encounteredFields.add(field.name());
}
}
}
}
}
}
private static ParsedDocument parsedDocument(SourceToParse source, ParseContext.InternalParseContext context, Mapping update) { private static ParsedDocument parsedDocument(SourceToParse source, ParseContext.InternalParseContext context, Mapping update) {
return new ParsedDocument( return new ParsedDocument(
context.uid(), context.uid(),

View File

@ -300,7 +300,8 @@ public abstract class FieldMapper extends Mapper implements Cloneable {
for (Field field : fields) { for (Field field : fields) {
if (!customBoost() if (!customBoost()
// don't set boosts eg. on dv fields // don't set boosts eg. on dv fields
&& field.fieldType().indexOptions() != IndexOptions.NONE) { && field.fieldType().indexOptions() != IndexOptions.NONE
&& Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0)) {
field.setBoost(fieldType().boost()); field.setBoost(fieldType().boost());
} }
context.doc().add(field); context.doc().add(field);

View File

@ -32,7 +32,9 @@ import org.apache.lucene.search.Query;
import org.apache.lucene.search.RegexpQuery; import org.apache.lucene.search.RegexpQuery;
import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TermRangeQuery; import org.apache.lucene.search.TermRangeQuery;
import org.apache.lucene.search.BoostQuery;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
import org.elasticsearch.action.fieldstats.FieldStats; import org.elasticsearch.action.fieldstats.FieldStats;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
@ -398,7 +400,12 @@ public abstract class MappedFieldType extends FieldType {
} }
public Query termQuery(Object value, @Nullable QueryShardContext context) { public Query termQuery(Object value, @Nullable QueryShardContext context) {
return new TermQuery(createTerm(value)); TermQuery query = new TermQuery(createTerm(value));
if (boost == 1f ||
(context != null && context.indexVersionCreated().before(Version.V_5_0_0))) {
return query;
}
return new BoostQuery(query, boost);
} }
public Query termsQuery(List values, @Nullable QueryShardContext context) { public Query termsQuery(List values, @Nullable QueryShardContext context) {

View File

@ -321,16 +321,6 @@ public abstract class ParseContext {
return in.externalValue(); return in.externalValue();
} }
@Override
public float docBoost() {
return in.docBoost();
}
@Override
public void docBoost(float docBoost) {
in.docBoost(docBoost);
}
@Override @Override
public StringBuilder stringBuilder() { public StringBuilder stringBuilder() {
return in.stringBuilder(); return in.stringBuilder();
@ -375,8 +365,6 @@ public abstract class ParseContext {
private AllEntries allEntries = new AllEntries(); private AllEntries allEntries = new AllEntries();
private float docBoost = 1.0f;
private Mapper dynamicMappingsUpdate = null; private Mapper dynamicMappingsUpdate = null;
public InternalParseContext(@Nullable Settings indexSettings, DocumentMapperParser docMapperParser, DocumentMapper docMapper, ContentPath path) { public InternalParseContext(@Nullable Settings indexSettings, DocumentMapperParser docMapperParser, DocumentMapper docMapper, ContentPath path) {
@ -402,7 +390,6 @@ public abstract class ParseContext {
this.source = source == null ? null : sourceToParse.source(); this.source = source == null ? null : sourceToParse.source();
this.path.reset(); this.path.reset();
this.allEntries = new AllEntries(); this.allEntries = new AllEntries();
this.docBoost = 1.0f;
this.dynamicMappingsUpdate = null; this.dynamicMappingsUpdate = null;
} }
@ -534,16 +521,6 @@ public abstract class ParseContext {
return this.allEntries; return this.allEntries;
} }
@Override
public float docBoost() {
return this.docBoost;
}
@Override
public void docBoost(float docBoost) {
this.docBoost = docBoost;
}
/** /**
* A string builder that can be used to construct complex names for example. * A string builder that can be used to construct complex names for example.
* Its better to reuse the. * Its better to reuse the.
@ -759,10 +736,6 @@ public abstract class ParseContext {
return clazz.cast(externalValue()); return clazz.cast(externalValue());
} }
public abstract float docBoost();
public abstract void docBoost(float docBoost);
/** /**
* A string builder that can be used to construct complex names for example. * A string builder that can be used to construct complex names for example.
* Its better to reuse the. * Its better to reuse the.

View File

@ -285,7 +285,9 @@ public class ByteFieldMapper extends NumberFieldMapper {
} }
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
CustomByteNumericField field = new CustomByteNumericField(value, fieldType()); CustomByteNumericField field = new CustomByteNumericField(value, fieldType());
field.setBoost(boost); if (boost != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0)) {
field.setBoost(boost);
}
fields.add(field); fields.add(field);
} }
if (fieldType().hasDocValues()) { if (fieldType().hasDocValues()) {

View File

@ -513,7 +513,9 @@ public class DateFieldMapper extends NumberFieldMapper {
if (value != null) { if (value != null) {
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
CustomLongNumericField field = new CustomLongNumericField(value, fieldType()); CustomLongNumericField field = new CustomLongNumericField(value, fieldType());
field.setBoost(boost); if (boost != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0)) {
field.setBoost(boost);
}
fields.add(field); fields.add(field);
} }
if (fieldType().hasDocValues()) { if (fieldType().hasDocValues()) {

View File

@ -278,7 +278,9 @@ public class DoubleFieldMapper extends NumberFieldMapper {
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
CustomDoubleNumericField field = new CustomDoubleNumericField(value, fieldType()); CustomDoubleNumericField field = new CustomDoubleNumericField(value, fieldType());
field.setBoost(boost); if (boost != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0)) {
field.setBoost(boost);
}
fields.add(field); fields.add(field);
} }
if (fieldType().hasDocValues()) { if (fieldType().hasDocValues()) {

View File

@ -290,7 +290,9 @@ public class FloatFieldMapper extends NumberFieldMapper {
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
CustomFloatNumericField field = new CustomFloatNumericField(value, fieldType()); CustomFloatNumericField field = new CustomFloatNumericField(value, fieldType());
field.setBoost(boost); if (boost != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0)) {
field.setBoost(boost);
}
fields.add(field); fields.add(field);
} }
if (fieldType().hasDocValues()) { if (fieldType().hasDocValues()) {

View File

@ -298,7 +298,9 @@ public class IntegerFieldMapper extends NumberFieldMapper {
protected void addIntegerFields(ParseContext context, List<Field> fields, int value, float boost) { protected void addIntegerFields(ParseContext context, List<Field> fields, int value, float boost) {
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
CustomIntegerNumericField field = new CustomIntegerNumericField(value, fieldType()); CustomIntegerNumericField field = new CustomIntegerNumericField(value, fieldType());
field.setBoost(boost); if (boost != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0)) {
field.setBoost(boost);
}
fields.add(field); fields.add(field);
} }
if (fieldType().hasDocValues()) { if (fieldType().hasDocValues()) {

View File

@ -282,7 +282,9 @@ public class LongFieldMapper extends NumberFieldMapper {
} }
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
CustomLongNumericField field = new CustomLongNumericField(value, fieldType()); CustomLongNumericField field = new CustomLongNumericField(value, fieldType());
field.setBoost(boost); if (boost != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0)) {
field.setBoost(boost);
}
fields.add(field); fields.add(field);
} }
if (fieldType().hasDocValues()) { if (fieldType().hasDocValues()) {

View File

@ -290,7 +290,9 @@ public class ShortFieldMapper extends NumberFieldMapper {
} }
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
CustomShortNumericField field = new CustomShortNumericField(value, fieldType()); CustomShortNumericField field = new CustomShortNumericField(value, fieldType());
field.setBoost(boost); if (boost != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0)) {
field.setBoost(boost);
}
fields.add(field); fields.add(field);
} }
if (fieldType().hasDocValues()) { if (fieldType().hasDocValues()) {

View File

@ -317,7 +317,9 @@ public class StringFieldMapper extends FieldMapper implements AllFieldMapper.Inc
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
Field field = new Field(fieldType().name(), valueAndBoost.value(), fieldType()); Field field = new Field(fieldType().name(), valueAndBoost.value(), fieldType());
field.setBoost(valueAndBoost.boost()); if (valueAndBoost.boost() != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0)) {
field.setBoost(valueAndBoost.boost());
}
fields.add(field); fields.add(field);
} }
if (fieldType().hasDocValues()) { if (fieldType().hasDocValues()) {

View File

@ -30,6 +30,7 @@ import org.apache.lucene.spatial.prefix.tree.GeohashPrefixTree;
import org.apache.lucene.spatial.prefix.tree.PackedQuadPrefixTree; import org.apache.lucene.spatial.prefix.tree.PackedQuadPrefixTree;
import org.apache.lucene.spatial.prefix.tree.QuadPrefixTree; import org.apache.lucene.spatial.prefix.tree.QuadPrefixTree;
import org.apache.lucene.spatial.prefix.tree.SpatialPrefixTree; import org.apache.lucene.spatial.prefix.tree.SpatialPrefixTree;
import org.elasticsearch.Version;
import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.geo.GeoUtils;
@ -452,7 +453,8 @@ public class GeoShapeFieldMapper extends FieldMapper {
return null; return null;
} }
for (Field field : fields) { for (Field field : fields) {
if (!customBoost()) { if (!customBoost() &&
fieldType.boost() != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0)) {
field.setBoost(fieldType().boost()); field.setBoost(fieldType().boost());
} }
context.doc().add(field); context.doc().add(field);

View File

@ -27,6 +27,7 @@ import org.apache.lucene.search.Query;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.BytesRefBuilder;
import org.apache.lucene.util.NumericUtils; import org.apache.lucene.util.NumericUtils;
import org.elasticsearch.Version;
import org.elasticsearch.common.Explicit; import org.elasticsearch.common.Explicit;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Numbers; import org.elasticsearch.common.Numbers;
@ -305,7 +306,9 @@ public class IpFieldMapper extends NumberFieldMapper {
final long value = ipToLong(ipAsString); final long value = ipToLong(ipAsString);
if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) {
CustomLongNumericField field = new CustomLongNumericField(value, fieldType()); CustomLongNumericField field = new CustomLongNumericField(value, fieldType());
field.setBoost(fieldType().boost()); if (fieldType.boost() != 1f && Version.indexCreated(context.indexSettings()).before(Version.V_5_0_0)) {
field.setBoost(fieldType().boost());
}
fields.add(field); fields.add(field);
} }
if (fieldType().hasDocValues()) { if (fieldType().hasDocValues()) {

View File

@ -19,13 +19,18 @@
package org.elasticsearch.index.mapper.boost; package org.elasticsearch.index.mapper.boost;
import org.apache.lucene.search.BoostQuery;
import org.apache.lucene.search.TermQuery;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.DocumentFieldMappers;
import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.ESSingleNodeTestCase;
import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.InternalSettingsPlugin;
@ -33,6 +38,7 @@ import org.elasticsearch.test.InternalSettingsPlugin;
import java.util.Collection; import java.util.Collection;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.instanceOf;
public class CustomBoostMappingTests extends ESSingleNodeTestCase { public class CustomBoostMappingTests extends ESSingleNodeTestCase {
@ -77,4 +83,87 @@ public class CustomBoostMappingTests extends ESSingleNodeTestCase {
assertThat(doc.rootDoc().getField("f_field").boost(), equalTo(8.0f)); assertThat(doc.rootDoc().getField("f_field").boost(), equalTo(8.0f));
assertThat(doc.rootDoc().getField("date_field").boost(), equalTo(9.0f)); assertThat(doc.rootDoc().getField("date_field").boost(), equalTo(9.0f));
} }
public void testBackCompatFieldMappingBoostValues() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type").startObject("properties")
.startObject("s_field").field("type", "keyword").field("boost", 2.0f).endObject()
.startObject("l_field").field("type", "long").field("boost", 3.0f).startObject("norms").field("enabled", true).endObject().endObject()
.startObject("i_field").field("type", "integer").field("boost", 4.0f).startObject("norms").field("enabled", true).endObject().endObject()
.startObject("sh_field").field("type", "short").field("boost", 5.0f).startObject("norms").field("enabled", true).endObject().endObject()
.startObject("b_field").field("type", "byte").field("boost", 6.0f).startObject("norms").field("enabled", true).endObject().endObject()
.startObject("d_field").field("type", "double").field("boost", 7.0f).startObject("norms").field("enabled", true).endObject().endObject()
.startObject("f_field").field("type", "float").field("boost", 8.0f).startObject("norms").field("enabled", true).endObject().endObject()
.startObject("date_field").field("type", "date").field("boost", 9.0f).startObject("norms").field("enabled", true).endObject().endObject()
.endObject().endObject().endObject().string();
{
IndexService indexService = createIndex("test", BW_SETTINGS);
QueryShardContext context = indexService.newQueryShardContext();
DocumentMapper mapper = indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
DocumentFieldMappers fieldMappers = mapper.mappers();
assertThat(fieldMappers.getMapper("s_field").fieldType().termQuery("0", context), instanceOf(TermQuery.class));
assertThat(fieldMappers.getMapper("l_field").fieldType().termQuery("0", context), instanceOf(TermQuery.class));
assertThat(fieldMappers.getMapper("i_field").fieldType().termQuery("0", context), instanceOf(TermQuery.class));
assertThat(fieldMappers.getMapper("sh_field").fieldType().termQuery("0", context), instanceOf(TermQuery.class));
assertThat(fieldMappers.getMapper("b_field").fieldType().termQuery("0", context), instanceOf(TermQuery.class));
assertThat(fieldMappers.getMapper("d_field").fieldType().termQuery("0", context), instanceOf(TermQuery.class));
assertThat(fieldMappers.getMapper("f_field").fieldType().termQuery("0", context), instanceOf(TermQuery.class));
assertThat(fieldMappers.getMapper("date_field").fieldType().termQuery("0", context), instanceOf(TermQuery.class));
ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder().startObject()
.field("s_field", "s_value")
.field("l_field", 1L)
.field("i_field", 1)
.field("sh_field", 1)
.field("b_field", 1)
.field("d_field", 1)
.field("f_field", 1)
.field("date_field", "20100101")
.endObject().bytes());
assertThat(doc.rootDoc().getField("s_field").boost(), equalTo(2.0f));
assertThat(doc.rootDoc().getField("l_field").boost(), equalTo(3.0f));
assertThat(doc.rootDoc().getField("i_field").boost(), equalTo(4.0f));
assertThat(doc.rootDoc().getField("sh_field").boost(), equalTo(5.0f));
assertThat(doc.rootDoc().getField("b_field").boost(), equalTo(6.0f));
assertThat(doc.rootDoc().getField("d_field").boost(), equalTo(7.0f));
assertThat(doc.rootDoc().getField("f_field").boost(), equalTo(8.0f));
assertThat(doc.rootDoc().getField("date_field").boost(), equalTo(9.0f));
}
{
IndexService indexService = createIndex("text");
QueryShardContext context = indexService.newQueryShardContext();
DocumentMapper mapper = indexService.mapperService().documentMapperParser().parse("type", new CompressedXContent(mapping));
DocumentFieldMappers fieldMappers = mapper.mappers();
assertThat(fieldMappers.getMapper("s_field").fieldType().termQuery("0", context), instanceOf(BoostQuery.class));
assertThat(fieldMappers.getMapper("l_field").fieldType().termQuery("0", context), instanceOf(BoostQuery.class));
assertThat(fieldMappers.getMapper("i_field").fieldType().termQuery("0", context), instanceOf(BoostQuery.class));
assertThat(fieldMappers.getMapper("sh_field").fieldType().termQuery("0", context), instanceOf(BoostQuery.class));
assertThat(fieldMappers.getMapper("b_field").fieldType().termQuery("0", context), instanceOf(BoostQuery.class));
assertThat(fieldMappers.getMapper("d_field").fieldType().termQuery("0", context), instanceOf(BoostQuery.class));
assertThat(fieldMappers.getMapper("f_field").fieldType().termQuery("0", context), instanceOf(BoostQuery.class));
assertThat(fieldMappers.getMapper("date_field").fieldType().termQuery("0", context), instanceOf(BoostQuery.class));
ParsedDocument doc = mapper.parse("test", "type", "1", XContentFactory.jsonBuilder().startObject()
.field("s_field", "s_value")
.field("l_field", 1L)
.field("i_field", 1)
.field("sh_field", 1)
.field("b_field", 1)
.field("d_field", 1)
.field("f_field", 1)
.field("date_field", "20100101")
.endObject().bytes());
assertThat(doc.rootDoc().getField("s_field").boost(), equalTo(1f));
assertThat(doc.rootDoc().getField("l_field").boost(), equalTo(1f));
assertThat(doc.rootDoc().getField("i_field").boost(), equalTo(1f));
assertThat(doc.rootDoc().getField("sh_field").boost(), equalTo(1f));
assertThat(doc.rootDoc().getField("b_field").boost(), equalTo(1f));
assertThat(doc.rootDoc().getField("d_field").boost(), equalTo(1f));
assertThat(doc.rootDoc().getField("f_field").boost(), equalTo(1f));
assertThat(doc.rootDoc().getField("date_field").boost(), equalTo(1f));
}
}
} }

View File

@ -36,6 +36,7 @@ import org.elasticsearch.test.InternalSettingsPlugin;
import java.util.Collection; import java.util.Collection;
import static org.hamcrest.Matchers.closeTo; import static org.hamcrest.Matchers.closeTo;
import static org.hamcrest.Matchers.equalTo;
/** /**
*/ */
@ -98,6 +99,97 @@ public class FieldLevelBoostTests extends ESSingleNodeTestCase {
assertThat((double) f.boost(), closeTo(9.0, 0.001)); assertThat((double) f.boost(), closeTo(9.0, 0.001));
} }
public void testBackCompatFieldLevelMappingBoost() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("person").startObject("properties")
.startObject("str_field").field("type", "keyword").field("boost", "2.0").endObject()
.startObject("int_field").field("type", "integer").field("boost", "3.0").startObject("norms").field("enabled", true).endObject().endObject()
.startObject("byte_field").field("type", "byte").field("boost", "4.0").startObject("norms").field("enabled", true).endObject().endObject()
.startObject("date_field").field("type", "date").field("boost", "5.0").startObject("norms").field("enabled", true).endObject().endObject()
.startObject("double_field").field("type", "double").field("boost", "6.0").startObject("norms").field("enabled", true).endObject().endObject()
.startObject("float_field").field("type", "float").field("boost", "7.0").startObject("norms").field("enabled", true).endObject().endObject()
.startObject("long_field").field("type", "long").field("boost", "8.0").startObject("norms").field("enabled", true).endObject().endObject()
.startObject("short_field").field("type", "short").field("boost", "9.0").startObject("norms").field("enabled", true).endObject().endObject()
.string();
{
DocumentMapper docMapper = createIndex("test", BW_SETTINGS).mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping));
BytesReference json = XContentFactory.jsonBuilder().startObject()
.field("str_field", "some name")
.field("int_field", 10)
.field("byte_field", 20)
.field("date_field", "2012-01-10")
.field("double_field", 30.0)
.field("float_field", 40.0)
.field("long_field", 50)
.field("short_field", 60)
.bytes();
Document doc = docMapper.parse("test", "person", "1", json).rootDoc();
IndexableField f = doc.getField("str_field");
assertThat((double) f.boost(), closeTo(2.0, 0.001));
f = doc.getField("int_field");
assertThat((double) f.boost(), closeTo(3.0, 0.001));
f = doc.getField("byte_field");
assertThat((double) f.boost(), closeTo(4.0, 0.001));
f = doc.getField("date_field");
assertThat((double) f.boost(), closeTo(5.0, 0.001));
f = doc.getField("double_field");
assertThat((double) f.boost(), closeTo(6.0, 0.001));
f = doc.getField("float_field");
assertThat((double) f.boost(), closeTo(7.0, 0.001));
f = doc.getField("long_field");
assertThat((double) f.boost(), closeTo(8.0, 0.001));
f = doc.getField("short_field");
assertThat((double) f.boost(), closeTo(9.0, 0.001));
}
{
DocumentMapper docMapper = createIndex("test2").mapperService().documentMapperParser().parse("person", new CompressedXContent(mapping));
BytesReference json = XContentFactory.jsonBuilder().startObject()
.field("str_field", "some name")
.field("int_field", 10)
.field("byte_field", 20)
.field("date_field", "2012-01-10")
.field("double_field", 30.0)
.field("float_field", 40.0)
.field("long_field", 50)
.field("short_field", 60)
.bytes();
Document doc = docMapper.parse("test", "person", "1", json).rootDoc();
IndexableField f = doc.getField("str_field");
assertThat(f.boost(), equalTo(1f));
f = doc.getField("int_field");
assertThat(f.boost(), equalTo(1f));
f = doc.getField("byte_field");
assertThat(f.boost(), equalTo(1f));
f = doc.getField("date_field");
assertThat(f.boost(), equalTo(1f));
f = doc.getField("double_field");
assertThat(f.boost(), equalTo(1f));
f = doc.getField("float_field");
assertThat(f.boost(), equalTo(1f));
f = doc.getField("long_field");
assertThat(f.boost(), equalTo(1f));
f = doc.getField("short_field");
assertThat(f.boost(), equalTo(1f));
}
}
public void testBackCompatInvalidFieldLevelBoost() throws Exception { public void testBackCompatInvalidFieldLevelBoost() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("person").startObject("properties") String mapping = XContentFactory.jsonBuilder().startObject().startObject("person").startObject("properties")
.startObject("str_field").field("type", "string").endObject() .startObject("str_field").field("type", "string").endObject()

View File

@ -161,7 +161,7 @@ Individual fields can be included or excluded from the `_all` field with the
[[all-field-and-boosting]] [[all-field-and-boosting]]
==== Index boosting and the `_all` field ==== Index boosting and the `_all` field
Individual fields can be _boosted_ at index time, with the <<index-boost,`boost`>> Individual fields can be _boosted_ at index time, with the <<mapping-boost,`boost`>>
parameter. The `_all` field takes these boosts into account: parameter. The `_all` field takes these boosts into account:
[source,js] [source,js]

View File

@ -8,7 +8,7 @@ parameters that are used by <<mapping-types,field mappings>>:
The following mapping parameters are common to some or all field datatypes: The following mapping parameters are common to some or all field datatypes:
* <<analyzer,`analyzer`>> * <<analyzer,`analyzer`>>
* <<index-boost,`boost`>> * <<mapping-boost,`boost`>>
* <<coerce,`coerce`>> * <<coerce,`coerce`>>
* <<copy-to,`copy_to`>> * <<copy-to,`copy_to`>>
* <<doc-values,`doc_values`>> * <<doc-values,`doc_values`>>

View File

@ -1,8 +1,8 @@
[[index-boost]] [[mapping-boost]]
=== `boost` === `boost`
Individual fields can be _boosted_ -- count more towards the relevance score Individual fields can be _boosted_ automatically -- count more towards the relevance score
-- at index time, with the `boost` parameter as follows: -- at query time, with the `boost` parameter as follows:
[source,js] [source,js]
-------------------------------------------------- --------------------------------------------------
@ -28,10 +28,45 @@ PUT my_index
<1> Matches on the `title` field will have twice the weight as those on the <1> Matches on the `title` field will have twice the weight as those on the
`content` field, which has the default `boost` of `1.0`. `content` field, which has the default `boost` of `1.0`.
Note that a `title` field will usually be shorter than a `content` field. The NOTE: The boost is applied only for term queries (prefix, range and fuzzy queries are not _boosted_).
default relevance calculation takes field length into account, so a short
`title` field will have a higher natural boost than a long `content` field.
You can achieve the same effect by using the boost parameter directly in the query, for instance the following query (with field time boost):
[source,js]
--------------------------------------------------
{
"match" : {
"title": {
"query": "quick brown fox"
}
}
}
--------------------------------------------------
is equivalent to:
[source,js]
--------------------------------------------------
{
"match" : {
"title": {
"query": "quick brown fox",
"boost": 2
}
}
}
--------------------------------------------------
// AUTOSENSE
The boost is also applied when it is copied with the
value in the <<mapping-all-field,`_all`>> field. This means that, when
querying the `_all` field, words that originated from the `title` field will
have a higher score than words that originated in the `content` field.
This functionality comes at a cost: queries on the `_all` field are slower
when field boosting is used.
deprecated[5.0.0, index time boost is deprecated. Instead, the field mapping boost is applied at query time. For indices created before 5.0.0 the boost will still be applied at index time.]
[WARNING] [WARNING]
.Why index time boosting is a bad idea .Why index time boosting is a bad idea
================================================== ==================================================
@ -48,12 +83,4 @@ We advise against using index time boosting for the following reasons:
byte. This reduces the resolution of the field length normalization factor byte. This reduces the resolution of the field length normalization factor
which can lead to lower quality relevance calculations. which can lead to lower quality relevance calculations.
================================================== ==================================================
The only advantage that index time boosting has is that it is copied with the
value into the <<mapping-all-field,`_all`>> field. This means that, when
querying the `_all` field, words that originated from the `title` field will
have a higher score than words that originated in the `content` field.
This functionality comes at a cost: queries on the `_all` field are slower
when index-time boosting is used.

View File

@ -1,10 +1,8 @@
[[norms]] [[norms]]
=== `norms` === `norms`
Norms store various normalization factors -- a number to represent the Norms store various normalization factors that are later used at query time
relative field length and the <<index-boost,index time `boost`>> setting -- in order to compute the score of a document relatively to a query.
that are later used at query time in order to compute the score of a document
relatively to a query.
Although useful for scoring, norms also require quite a lot of memory Although useful for scoring, norms also require quite a lot of memory
(typically in the order of one byte per document per field in your index, even (typically in the order of one byte per document per field in your index, even

View File

@ -91,9 +91,9 @@ The following parameters are accepted by `boolean` fields:
[horizontal] [horizontal]
<<index-boost,`boost`>>:: <<mapping-boost,`boost`>>::
Field-level index time boosting. Accepts a floating point number, defaults Mapping field-level query time boosting. Accepts a floating point number, defaults
to `1.0`. to `1.0`.
<<doc-values,`doc_values`>>:: <<doc-values,`doc_values`>>::

View File

@ -90,9 +90,9 @@ The following parameters are accepted by `date` fields:
[horizontal] [horizontal]
<<index-boost,`boost`>>:: <<mapping-boost,`boost`>>::
Field-level index time boosting. Accepts a floating point number, defaults Mapping field-level query time boosting. Accepts a floating point number, defaults
to `1.0`. to `1.0`.
<<doc-values,`doc_values`>>:: <<doc-values,`doc_values`>>::

View File

@ -47,9 +47,9 @@ The following parameters are accepted by `ip` fields:
[horizontal] [horizontal]
<<index-boost,`boost`>>:: <<mapping-boost,`boost`>>::
Field-level index time boosting. Accepts a floating point number, defaults Mapping field-level query time boosting. Accepts a floating point number, defaults
to `1.0`. to `1.0`.
<<doc-values,`doc_values`>>:: <<doc-values,`doc_values`>>::

View File

@ -45,9 +45,9 @@ The following parameters are accepted by numeric types:
Try to convert strings to numbers and truncate fractions for integers. Try to convert strings to numbers and truncate fractions for integers.
Accepts `true` (default) and `false`. Accepts `true` (default) and `false`.
<<index-boost,`boost`>>:: <<mapping-boost,`boost`>>::
Field-level index time boosting. Accepts a floating point number, defaults Mapping field-level query time boosting. Accepts a floating point number, defaults
to `1.0`. to `1.0`.
<<doc-values,`doc_values`>>:: <<doc-values,`doc_values`>>::

View File

@ -75,9 +75,9 @@ The following parameters are accepted by `string` fields:
Defaults to the default index analyzer, or the Defaults to the default index analyzer, or the
<<analysis-standard-analyzer,`standard` analyzer>>. <<analysis-standard-analyzer,`standard` analyzer>>.
<<index-boost,`boost`>>:: <<mapping-boost,`boost`>>::
Field-level index time boosting. Accepts a floating point number, defaults Mapping field-level query time boosting. Accepts a floating point number, defaults
to `1.0`. to `1.0`.
<<doc-values,`doc_values`>>:: <<doc-values,`doc_values`>>::

View File

@ -68,9 +68,9 @@ The following parameters are accepted by `token_count` fields:
value. Required. For best performance, use an analyzer without token value. Required. For best performance, use an analyzer without token
filters. filters.
<<index-boost,`boost`>>:: <<mapping-boost,`boost`>>::
Field-level index time boosting. Accepts a floating point number, defaults Mapping field-level query time boosting. Accepts a floating point number, defaults
to `1.0`. to `1.0`.
<<doc-values,`doc_values`>>:: <<doc-values,`doc_values`>>::

View File

@ -449,3 +449,8 @@ The docs for the `nested` field datatype have moved to <<nested>>.
Warmers have been removed. There have been significant improvements to the Warmers have been removed. There have been significant improvements to the
index that make warmers not necessary anymore. index that make warmers not necessary anymore.
[role="exclude",id="index-boost"]
=== Index time boosting
The index time boost mapping has been replaced with query time boost (see <<mapping-boost>>).

View File

@ -165,7 +165,7 @@ include::request/explain.asciidoc[]
include::request/version.asciidoc[] include::request/version.asciidoc[]
include::request/index-boost.asciidoc[] include::request/mapping-boost.asciidoc[]
include::request/min-score.asciidoc[] include::request/min-score.asciidoc[]