Multi value handling in decay functions
Decay functions currently only use the first value in a field that contains multiple values to compute the distance to the origin. Instead, it should consider all distances if more values are in the field and then use one of min/max/sum/avg which is defined by the user. Relates to #3960 closes #5940
This commit is contained in:
parent
f993945e5c
commit
f285ffc610
|
@ -150,6 +150,46 @@ that is initialized with a `seed`.
|
|||
}
|
||||
--------------------------------------------------
|
||||
|
||||
===== Field Value factor
|
||||
The `field_value_factor` function allows you to use a field from a document to
|
||||
influence the score. It's similar to using the `script_score` function, however,
|
||||
it avoids the overhead of scripting. If used on a multi-valued field, only the
|
||||
first value of the field is used in calculations.
|
||||
|
||||
As an example, imagine you have a document indexed with a numeric `popularity`
|
||||
field and wish in influence the score of a document with this field, an example
|
||||
doing so would look like:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
"field_value_factor": {
|
||||
"field": "popularity",
|
||||
"factor": 1.2,
|
||||
"modifier": "sqrt"
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
Which will translate into the following forumla for scoring:
|
||||
|
||||
`sqrt(1.2 * doc['popularity'].value)`
|
||||
|
||||
There are a number of options for the `field_value_factor` function:
|
||||
|
||||
[cols="<,<",options="header",]
|
||||
|=======================================================================
|
||||
| Parameter |Description
|
||||
|`field` |Field to be extracted from the document.
|
||||
|`factor` |Optional factor to multiply the field value with, defaults to 1.
|
||||
|`modifier` |Modifier to apply to the field value, can be one of: `none`, `log`,
|
||||
`log1p`, `log2p`, `ln`, `ln1p`, `ln2p`, `square`, `sqrt`, or `reciprocal`.
|
||||
Defaults to `none`.
|
||||
|=======================================================================
|
||||
|
||||
Keep in mind that taking the log() of 0, or the square root of a negative number
|
||||
is an illegal operation, and an exception will be thrown. Be sure to limit the
|
||||
values of the field with a range filter to avoid this, or use `log1p` and
|
||||
`ln1p`.
|
||||
|
||||
===== Decay functions
|
||||
|
||||
Decay functions score a document with a function that decays depending
|
||||
|
@ -246,45 +286,31 @@ In contrast to the normal and exponential decay, this function actually
|
|||
sets the score to 0 if the field value exceeds twice the user given
|
||||
scale value.
|
||||
|
||||
===== Field Value factor
|
||||
The `field_value_factor` function allows you to use a field from a document to
|
||||
influence the score. It's similar to using the `script_score` function, however,
|
||||
it avoids the overhead of scripting. If used on a multi-valued field, only the
|
||||
first value of the field is used in calculations.
|
||||
===== Multiple values:
|
||||
|
||||
As an example, imagine you have a document indexed with a numeric `popularity`
|
||||
field and wish in influence the score of a document with this field, an example
|
||||
doing so would look like:
|
||||
If a field used for computing the decay contains multiple values, per default the value closest to the origin is chosen for determining the distance.
|
||||
This can be changed by setting `multi_value_mode`.
|
||||
|
||||
[horizontal]
|
||||
`min`:: Distance is the minimum distance
|
||||
`max`:: Distance is the maximum distance
|
||||
`avg`:: Distance is the average distance
|
||||
`sum`:: Distance is the sum of all distances
|
||||
|
||||
Example:
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
"field_value_factor": {
|
||||
"field": "popularity",
|
||||
"factor": 1.2,
|
||||
"modifier": "sqrt"
|
||||
}
|
||||
"DECAY_FUNCTION": {
|
||||
"FIELD_NAME": {
|
||||
"origin": ...,
|
||||
"scale": ...
|
||||
},
|
||||
"multi_value_mode": "avg"
|
||||
}
|
||||
--------------------------------------------------
|
||||
|
||||
Which will translate into the following forumla for scoring:
|
||||
|
||||
`sqrt(1.2 * doc['popularity'].value)`
|
||||
|
||||
There are a number of options for the `field_value_factor` function:
|
||||
|
||||
[cols="<,<",options="header",]
|
||||
|=======================================================================
|
||||
| Parameter |Description
|
||||
|`field` |Field to be extracted from the document.
|
||||
|`factor` |Optional factor to multiply the field value with, defaults to 1.
|
||||
|`modifier` |Modifier to apply to the field value, can be one of: `none`, `log`,
|
||||
`log1p`, `log2p`, `ln`, `ln1p`, `ln2p`, `square`, `sqrt`, or `reciprocal`.
|
||||
Defaults to `none`.
|
||||
|=======================================================================
|
||||
|
||||
Keep in mind that taking the log() of 0, or the square root of a negative number
|
||||
is an illegal operation, and an exception will be thrown. Be sure to limit the
|
||||
values of the field with a range filter to avoid this, or use `log1p` and
|
||||
`ln1p`.
|
||||
|
||||
==== Detailed example
|
||||
|
||||
|
|
|
@ -21,8 +21,10 @@ package org.elasticsearch.index.query.functionscore;
|
|||
|
||||
import org.elasticsearch.ElasticsearchIllegalStateException;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.search.MultiValueMode;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
|
||||
public abstract class DecayFunctionBuilder implements ScoreFunctionBuilder {
|
||||
|
||||
|
@ -36,6 +38,7 @@ public abstract class DecayFunctionBuilder implements ScoreFunctionBuilder {
|
|||
private Object scale;
|
||||
private double decay = -1;
|
||||
private Object offset;
|
||||
private MultiValueMode multiValueMode = null;
|
||||
|
||||
public DecayFunctionBuilder(String fieldName, Object origin, Object scale) {
|
||||
this.fieldName = fieldName;
|
||||
|
@ -71,8 +74,20 @@ public abstract class DecayFunctionBuilder implements ScoreFunctionBuilder {
|
|||
builder.field(OFFSET, offset);
|
||||
}
|
||||
builder.endObject();
|
||||
if (multiValueMode != null) {
|
||||
builder.field(DecayFunctionParser.MULTI_VALUE_MODE.getPreferredName(), multiValueMode.name());
|
||||
}
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
public ScoreFunctionBuilder setMultiValueMode(MultiValueMode multiValueMode) {
|
||||
this.multiValueMode = multiValueMode;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ScoreFunctionBuilder setMultiValueMode(String multiValueMode) {
|
||||
this.multiValueMode = MultiValueMode.fromString(multiValueMode.toUpperCase(Locale.ROOT));
|
||||
return this;
|
||||
}
|
||||
}
|
|
@ -19,11 +19,13 @@
|
|||
|
||||
package org.elasticsearch.index.query.functionscore;
|
||||
|
||||
import com.sun.swing.internal.plaf.metal.resources.metal;
|
||||
import org.apache.lucene.index.AtomicReaderContext;
|
||||
import org.apache.lucene.search.ComplexExplanation;
|
||||
import org.apache.lucene.search.Explanation;
|
||||
import org.elasticsearch.ElasticsearchIllegalArgumentException;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.geo.GeoDistance;
|
||||
import org.elasticsearch.common.geo.GeoPoint;
|
||||
import org.elasticsearch.common.geo.GeoUtils;
|
||||
|
@ -31,6 +33,8 @@ import org.elasticsearch.common.lucene.search.function.CombineFunction;
|
|||
import org.elasticsearch.common.lucene.search.function.ScoreFunction;
|
||||
import org.elasticsearch.common.unit.DistanceUnit;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.fielddata.DoubleValues;
|
||||
import org.elasticsearch.index.fielddata.GeoPointValues;
|
||||
|
@ -45,9 +49,12 @@ import org.elasticsearch.index.query.QueryParseContext;
|
|||
import org.elasticsearch.index.query.QueryParsingException;
|
||||
import org.elasticsearch.index.query.functionscore.gauss.GaussDecayFunctionBuilder;
|
||||
import org.elasticsearch.index.query.functionscore.gauss.GaussDecayFunctionParser;
|
||||
import org.elasticsearch.search.MultiValueMode;
|
||||
import org.elasticsearch.search.aggregations.metrics.MetricsAggregation;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Locale;
|
||||
|
||||
/**
|
||||
* This class provides the basic functionality needed for adding a decay
|
||||
|
@ -92,6 +99,8 @@ import java.io.IOException;
|
|||
|
||||
public abstract class DecayFunctionParser implements ScoreFunctionParser {
|
||||
|
||||
public static final ParseField MULTI_VALUE_MODE = new ParseField("multi_value_mode");
|
||||
|
||||
/**
|
||||
* Override this function if you want to produce your own scorer.
|
||||
* */
|
||||
|
@ -114,28 +123,34 @@ public abstract class DecayFunctionParser implements ScoreFunctionParser {
|
|||
* */
|
||||
@Override
|
||||
public ScoreFunction parse(QueryParseContext parseContext, XContentParser parser) throws IOException, QueryParsingException {
|
||||
String currentFieldName = null;
|
||||
String currentFieldName;
|
||||
XContentParser.Token token;
|
||||
ScoreFunction scoreFunction = null;
|
||||
token = parser.nextToken();
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
AbstractDistanceScoreFunction scoreFunction = null;
|
||||
String multiValueMode = "MIN";
|
||||
XContentBuilder variableContent = XContentFactory.jsonBuilder();
|
||||
String fieldName = null;
|
||||
while ((token = parser.nextToken()) == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
token = parser.nextToken();
|
||||
if (token == XContentParser.Token.START_OBJECT) {
|
||||
// parse per field the origin and scale value
|
||||
scoreFunction = parseVariable(currentFieldName, parser, parseContext);
|
||||
variableContent.copyCurrentStructure(parser);
|
||||
fieldName = currentFieldName;
|
||||
} else if (MULTI_VALUE_MODE.match(currentFieldName)) {
|
||||
multiValueMode = parser.text();
|
||||
} else {
|
||||
throw new ElasticsearchParseException("Malformed score function score parameters.");
|
||||
}
|
||||
} else {
|
||||
}
|
||||
if (fieldName == null) {
|
||||
throw new ElasticsearchParseException("Malformed score function score parameters.");
|
||||
}
|
||||
parser.nextToken();
|
||||
XContentParser variableParser = XContentFactory.xContent(variableContent.string()).createParser(variableContent.string());
|
||||
scoreFunction = parseVariable(fieldName, variableParser, parseContext, MultiValueMode.fromString(multiValueMode.toUpperCase(Locale.ROOT)));
|
||||
return scoreFunction;
|
||||
}
|
||||
|
||||
// parses origin and scale parameter for field "fieldName"
|
||||
private ScoreFunction parseVariable(String fieldName, XContentParser parser, QueryParseContext parseContext) throws IOException {
|
||||
private AbstractDistanceScoreFunction parseVariable(String fieldName, XContentParser parser, QueryParseContext parseContext, MultiValueMode mode) throws IOException {
|
||||
|
||||
// now, the field must exist, else we cannot read the value for
|
||||
// the doc later
|
||||
|
@ -146,20 +161,21 @@ public abstract class DecayFunctionParser implements ScoreFunctionParser {
|
|||
|
||||
FieldMapper<?> mapper = smartMappers.fieldMappers().mapper();
|
||||
// dates and time need special handling
|
||||
parser.nextToken();
|
||||
if (mapper instanceof DateFieldMapper) {
|
||||
return parseDateVariable(fieldName, parser, parseContext, (DateFieldMapper) mapper);
|
||||
return parseDateVariable(fieldName, parser, parseContext, (DateFieldMapper) mapper, mode);
|
||||
} else if (mapper instanceof GeoPointFieldMapper) {
|
||||
return parseGeoVariable(fieldName, parser, parseContext, (GeoPointFieldMapper) mapper);
|
||||
return parseGeoVariable(fieldName, parser, parseContext, (GeoPointFieldMapper) mapper, mode);
|
||||
} else if (mapper instanceof NumberFieldMapper<?>) {
|
||||
return parseNumberVariable(fieldName, parser, parseContext, (NumberFieldMapper<?>) mapper);
|
||||
return parseNumberVariable(fieldName, parser, parseContext, (NumberFieldMapper<?>) mapper, mode);
|
||||
} else {
|
||||
throw new QueryParsingException(parseContext.index(), "Field " + fieldName + " is of type " + mapper.fieldType()
|
||||
+ ", but only numeric types are supported.");
|
||||
}
|
||||
}
|
||||
|
||||
private ScoreFunction parseNumberVariable(String fieldName, XContentParser parser, QueryParseContext parseContext,
|
||||
NumberFieldMapper<?> mapper) throws IOException {
|
||||
private AbstractDistanceScoreFunction parseNumberVariable(String fieldName, XContentParser parser, QueryParseContext parseContext,
|
||||
NumberFieldMapper<?> mapper, MultiValueMode mode) throws IOException {
|
||||
XContentParser.Token token;
|
||||
String parameterName = null;
|
||||
double scale = 0;
|
||||
|
@ -190,11 +206,11 @@ public abstract class DecayFunctionParser implements ScoreFunctionParser {
|
|||
+ " must be set for numeric fields.");
|
||||
}
|
||||
IndexNumericFieldData<?> numericFieldData = parseContext.fieldData().getForField(mapper);
|
||||
return new NumericFieldDataScoreFunction(origin, scale, decay, offset, getDecayFunction(), numericFieldData);
|
||||
return new NumericFieldDataScoreFunction(origin, scale, decay, offset, getDecayFunction(), numericFieldData, mode);
|
||||
}
|
||||
|
||||
private ScoreFunction parseGeoVariable(String fieldName, XContentParser parser, QueryParseContext parseContext,
|
||||
GeoPointFieldMapper mapper) throws IOException {
|
||||
private AbstractDistanceScoreFunction parseGeoVariable(String fieldName, XContentParser parser, QueryParseContext parseContext,
|
||||
GeoPointFieldMapper mapper, MultiValueMode mode) throws IOException {
|
||||
XContentParser.Token token;
|
||||
String parameterName = null;
|
||||
GeoPoint origin = new GeoPoint();
|
||||
|
@ -222,12 +238,12 @@ public abstract class DecayFunctionParser implements ScoreFunctionParser {
|
|||
double scale = DistanceUnit.DEFAULT.parse(scaleString, DistanceUnit.DEFAULT);
|
||||
double offset = DistanceUnit.DEFAULT.parse(offsetString, DistanceUnit.DEFAULT);
|
||||
IndexGeoPointFieldData<?> indexFieldData = parseContext.fieldData().getForField(mapper);
|
||||
return new GeoFieldDataScoreFunction(origin, scale, decay, offset, getDecayFunction(), indexFieldData);
|
||||
return new GeoFieldDataScoreFunction(origin, scale, decay, offset, getDecayFunction(), indexFieldData, mode);
|
||||
|
||||
}
|
||||
|
||||
private ScoreFunction parseDateVariable(String fieldName, XContentParser parser, QueryParseContext parseContext,
|
||||
DateFieldMapper dateFieldMapper) throws IOException {
|
||||
private AbstractDistanceScoreFunction parseDateVariable(String fieldName, XContentParser parser, QueryParseContext parseContext,
|
||||
DateFieldMapper dateFieldMapper, MultiValueMode mode) throws IOException {
|
||||
XContentParser.Token token;
|
||||
String parameterName = null;
|
||||
String scaleString = null;
|
||||
|
@ -262,7 +278,7 @@ public abstract class DecayFunctionParser implements ScoreFunctionParser {
|
|||
val = TimeValue.parseTimeValue(offsetString, TimeValue.timeValueHours(24));
|
||||
double offset = val.getMillis();
|
||||
IndexNumericFieldData<?> numericFieldData = parseContext.fieldData().getForField(dateFieldMapper);
|
||||
return new NumericFieldDataScoreFunction(origin, scale, decay, offset, getDecayFunction(), numericFieldData);
|
||||
return new NumericFieldDataScoreFunction(origin, scale, decay, offset, getDecayFunction(), numericFieldData, mode);
|
||||
}
|
||||
|
||||
static class GeoFieldDataScoreFunction extends AbstractDistanceScoreFunction {
|
||||
|
@ -274,8 +290,8 @@ public abstract class DecayFunctionParser implements ScoreFunctionParser {
|
|||
private static final GeoDistance distFunction = GeoDistance.DEFAULT;
|
||||
|
||||
public GeoFieldDataScoreFunction(GeoPoint origin, double scale, double decay, double offset, DecayFunction func,
|
||||
IndexGeoPointFieldData<?> fieldData) {
|
||||
super(scale, decay, offset, func);
|
||||
IndexGeoPointFieldData<?> fieldData, MultiValueMode mode) {
|
||||
super(scale, decay, offset, func, mode);
|
||||
this.origin = origin;
|
||||
this.fieldData = fieldData;
|
||||
}
|
||||
|
@ -285,28 +301,41 @@ public abstract class DecayFunctionParser implements ScoreFunctionParser {
|
|||
geoPointValues = fieldData.load(context).getGeoPointValues();
|
||||
}
|
||||
|
||||
private final GeoPoint getValue(int doc, GeoPoint missing) {
|
||||
final int num = geoPointValues.setDocument(doc);
|
||||
for (int i = 0; i < num; i++) {
|
||||
return geoPointValues.nextValue();
|
||||
}
|
||||
return missing;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected double distance(int docId) {
|
||||
GeoPoint other = getValue(docId, origin);
|
||||
double distance = Math.abs(distFunction.calculate(origin.lat(), origin.lon(), other.lat(), other.lon(),
|
||||
DistanceUnit.METERS)) - offset;
|
||||
return Math.max(0.0d, distance);
|
||||
final int num = geoPointValues.setDocument(docId);
|
||||
if (num > 0) {
|
||||
double value = mode.startDouble();
|
||||
for (int i = 0; i < num; i++) {
|
||||
GeoPoint other = geoPointValues.nextValue();
|
||||
value = mode.apply(Math.max(0.0d, distFunction.calculate(origin.lat(), origin.lon(), other.lat(), other.lon(),
|
||||
DistanceUnit.METERS) - offset), value);
|
||||
}
|
||||
return mode.reduce(value, num);
|
||||
} else {
|
||||
return 0.0;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getDistanceString(int docId) {
|
||||
final GeoPoint other = getValue(docId, origin);
|
||||
return "arcDistance(" + other + "(=doc value), " + origin + "(=origin)) - " + offset
|
||||
+ "(=offset) < 0.0 ? 0.0: arcDistance(" + other + "(=doc value), " + origin + "(=origin)) - " + offset
|
||||
+ "(=offset)";
|
||||
StringBuilder values = new StringBuilder(mode.name());
|
||||
values.append(" of: [");
|
||||
final int num = geoPointValues.setDocument(docId);
|
||||
if (num > 0) {
|
||||
for (int i = 0; i < num; i++) {
|
||||
GeoPoint value = geoPointValues.nextValue();
|
||||
values.append("Math.max(arcDistance(");
|
||||
values.append(value).append("(=doc value),").append(origin).append("(=origin)) - ").append(offset).append("(=offset), 0)");
|
||||
if (i != num - 1) {
|
||||
values.append(", ");
|
||||
}
|
||||
}
|
||||
} else {
|
||||
values.append("0.0");
|
||||
}
|
||||
values.append("]");
|
||||
return values.toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -322,8 +351,8 @@ public abstract class DecayFunctionParser implements ScoreFunctionParser {
|
|||
private DoubleValues doubleValues;
|
||||
|
||||
public NumericFieldDataScoreFunction(double origin, double scale, double decay, double offset, DecayFunction func,
|
||||
IndexNumericFieldData<?> fieldData) {
|
||||
super(scale, decay, offset, func);
|
||||
IndexNumericFieldData<?> fieldData, MultiValueMode mode) {
|
||||
super(scale, decay, offset, func, mode);
|
||||
this.fieldData = fieldData;
|
||||
this.origin = origin;
|
||||
}
|
||||
|
@ -332,25 +361,42 @@ public abstract class DecayFunctionParser implements ScoreFunctionParser {
|
|||
this.doubleValues = this.fieldData.load(context).getDoubleValues();
|
||||
}
|
||||
|
||||
private final double getValue(int doc, double missing) {
|
||||
final int num = doubleValues.setDocument(doc);
|
||||
for (int i = 0; i < num; i++) {
|
||||
return doubleValues.nextValue();
|
||||
}
|
||||
return missing;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected double distance(int docId) {
|
||||
double distance = Math.abs(getValue(docId, origin) - origin) - offset;
|
||||
return Math.max(0.0d, distance);
|
||||
final int num = doubleValues.setDocument(docId);
|
||||
if (num > 0) {
|
||||
double value = mode.startDouble();
|
||||
for (int i = 0; i < num; i++) {
|
||||
final double other = doubleValues.nextValue();
|
||||
value = mode.apply(Math.max(0.0d, Math.abs(other - origin) - offset), value);
|
||||
}
|
||||
return mode.reduce(value, num);
|
||||
} else {
|
||||
return 0.0;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getDistanceString(int docId) {
|
||||
return "Math.abs(" + getValue(docId, origin) + "(=doc value) - " + origin + "(=origin)) - "
|
||||
+ offset + "(=offset) < 0.0 ? 0.0: Math.abs(" + getValue(docId, origin) + "(=doc value) - "
|
||||
+ origin + ") - " + offset + "(=offset)";
|
||||
|
||||
StringBuilder values = new StringBuilder(mode.name());
|
||||
values.append(" of: [");
|
||||
final int num = doubleValues.setDocument(docId);
|
||||
if (num > 0) {
|
||||
for (int i = 0; i < num; i++) {
|
||||
double value = doubleValues.nextValue();
|
||||
values.append("Math.max(Math.abs(");
|
||||
values.append(value).append("(=doc value) - ").append(origin).append("(=origin))) - ").append(offset).append("(=offset), 0)");
|
||||
if (i != num - 1) {
|
||||
values.append(", ");
|
||||
}
|
||||
}
|
||||
} else {
|
||||
values.append("0.0");
|
||||
}
|
||||
values.append("]");
|
||||
return values.toString();
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -368,9 +414,11 @@ public abstract class DecayFunctionParser implements ScoreFunctionParser {
|
|||
private final double scale;
|
||||
protected final double offset;
|
||||
private final DecayFunction func;
|
||||
protected final MultiValueMode mode;
|
||||
|
||||
public AbstractDistanceScoreFunction(double userSuppiedScale, double decay, double offset, DecayFunction func) {
|
||||
public AbstractDistanceScoreFunction(double userSuppiedScale, double decay, double offset, DecayFunction func, MultiValueMode mode) {
|
||||
super(CombineFunction.MULT);
|
||||
this.mode = mode;
|
||||
if (userSuppiedScale <= 0.0) {
|
||||
throw new ElasticsearchIllegalArgumentException(FunctionScoreQueryParser.NAME + " : scale must be > 0.0.");
|
||||
}
|
||||
|
|
|
@ -51,7 +51,7 @@ public class ExponentialDecayFunctionParser extends DecayFunctionParser {
|
|||
public Explanation explainFunction(String valueExpl, double value, double scale) {
|
||||
ComplexExplanation ce = new ComplexExplanation();
|
||||
ce.setValue((float) evaluate(value, scale));
|
||||
ce.setDescription("exp(- abs(" + valueExpl + ") * " + -1 * scale + ")");
|
||||
ce.setDescription("exp(- " + valueExpl + " * " + -1 * scale + ")");
|
||||
return ce;
|
||||
}
|
||||
|
||||
|
|
|
@ -51,7 +51,7 @@ public class LinearDecayFunctionParser extends DecayFunctionParser {
|
|||
public Explanation explainFunction(String valueExpl, double value, double scale) {
|
||||
ComplexExplanation ce = new ComplexExplanation();
|
||||
ce.setValue((float) evaluate(value, scale));
|
||||
ce.setDescription("max(0.0, ((" + scale + " - abs(" + valueExpl + "))/" + scale + ")");
|
||||
ce.setDescription("max(0.0, ((" + scale + " - " + valueExpl + ")/" + scale + ")");
|
||||
return ce;
|
||||
}
|
||||
|
||||
|
|
|
@ -703,4 +703,100 @@ public class DecayFunctionScoreTests extends ElasticsearchIntegrationTest {
|
|||
response.actionGet();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMultiFieldOptions() throws Exception {
|
||||
assertAcked(prepareCreate("test").addMapping(
|
||||
"type1",
|
||||
jsonBuilder().startObject().startObject("type1").startObject("properties").startObject("test").field("type", "string")
|
||||
.endObject().startObject("loc").field("type", "geo_point").endObject().startObject("num").field("type", "float").endObject().endObject().endObject().endObject()));
|
||||
ensureYellow();
|
||||
|
||||
// Index for testing MIN and MAX
|
||||
IndexRequestBuilder doc1 = client().prepareIndex()
|
||||
.setType("type1")
|
||||
.setId("1")
|
||||
.setIndex("test")
|
||||
.setSource(
|
||||
jsonBuilder().startObject().field("test", "value").startArray("loc").startObject().field("lat", 10).field("lon", 20).endObject().startObject().field("lat", 12).field("lon", 23).endObject().endArray()
|
||||
.endObject());
|
||||
IndexRequestBuilder doc2 = client().prepareIndex()
|
||||
.setType("type1")
|
||||
.setId("2")
|
||||
.setIndex("test")
|
||||
.setSource(
|
||||
jsonBuilder().startObject().field("test", "value").startObject("loc").field("lat", 11).field("lon", 22).endObject()
|
||||
.endObject());
|
||||
|
||||
indexRandom(true, doc1, doc2);
|
||||
|
||||
ActionFuture<SearchResponse> response = client().search(
|
||||
searchRequest().source(
|
||||
searchSource().query(constantScoreQuery(termQuery("test", "value")))));
|
||||
SearchResponse sr = response.actionGet();
|
||||
assertSearchHits(sr, "1", "2");
|
||||
SearchHits sh = sr.getHits();
|
||||
assertThat(sh.getTotalHits(), equalTo((long) (2)));
|
||||
|
||||
List<Float> lonlat = new ArrayList<>();
|
||||
lonlat.add(20f);
|
||||
lonlat.add(10f);
|
||||
response = client().search(
|
||||
searchRequest().source(
|
||||
searchSource().query(
|
||||
functionScoreQuery(constantScoreQuery(termQuery("test", "value")), gaussDecayFunction("loc", lonlat, "1000km").setMultiValueMode("min")))));
|
||||
sr = response.actionGet();
|
||||
assertSearchHits(sr, "1", "2");
|
||||
sh = sr.getHits();
|
||||
|
||||
assertThat(sh.getAt(0).getId(), equalTo("1"));
|
||||
assertThat(sh.getAt(1).getId(), equalTo("2"));
|
||||
response = client().search(
|
||||
searchRequest().source(
|
||||
searchSource().query(
|
||||
functionScoreQuery(constantScoreQuery(termQuery("test", "value")), gaussDecayFunction("loc", lonlat, "1000km").setMultiValueMode("max")))));
|
||||
sr = response.actionGet();
|
||||
assertSearchHits(sr, "1", "2");
|
||||
sh = sr.getHits();
|
||||
|
||||
assertThat(sh.getAt(0).getId(), equalTo("2"));
|
||||
assertThat(sh.getAt(1).getId(), equalTo("1"));
|
||||
|
||||
// Now test AVG and SUM
|
||||
|
||||
doc1 = client().prepareIndex()
|
||||
.setType("type1")
|
||||
.setId("1")
|
||||
.setIndex("test")
|
||||
.setSource(
|
||||
jsonBuilder().startObject().field("test", "value").startArray("num").value(0.0).value(1.0).value(2.0).endArray()
|
||||
.endObject());
|
||||
doc2 = client().prepareIndex()
|
||||
.setType("type1")
|
||||
.setId("2")
|
||||
.setIndex("test")
|
||||
.setSource(
|
||||
jsonBuilder().startObject().field("test", "value").field("num", 1.0)
|
||||
.endObject());
|
||||
|
||||
indexRandom(true, doc1, doc2);
|
||||
response = client().search(
|
||||
searchRequest().source(
|
||||
searchSource().query(
|
||||
functionScoreQuery(constantScoreQuery(termQuery("test", "value")), linearDecayFunction("num", "0", "10").setMultiValueMode("sum")))));
|
||||
sr = response.actionGet();
|
||||
assertSearchHits(sr, "1", "2");
|
||||
sh = sr.getHits();
|
||||
|
||||
assertThat(sh.getAt(0).getId(), equalTo("2"));
|
||||
assertThat(sh.getAt(1).getId(), equalTo("1"));
|
||||
assertThat((double)(1.0 - sh.getAt(0).getScore()), closeTo((double)((1.0 - sh.getAt(1).getScore())/3.0), 1.e-6d));
|
||||
response = client().search(
|
||||
searchRequest().source(
|
||||
searchSource().query(
|
||||
functionScoreQuery(constantScoreQuery(termQuery("test", "value")), linearDecayFunction("num", "0", "10").setMultiValueMode("avg")))));
|
||||
sr = response.actionGet();
|
||||
assertSearchHits(sr, "1", "2");
|
||||
sh = sr.getHits();
|
||||
assertThat((double) (sh.getAt(0).getScore()), closeTo((double) (sh.getAt(1).getScore()), 1.e-6d));
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue