convert explain function score tests to unit tests

This commit is contained in:
Britta Weber 2015-10-05 22:56:19 +02:00
parent ea0c35046b
commit 0915adaa71
6 changed files with 372 additions and 132 deletions

View File

@ -27,7 +27,7 @@ import org.elasticsearch.index.query.functionscore.DecayFunctionBuilder;
public class ExponentialDecayFunctionBuilder extends DecayFunctionBuilder<ExponentialDecayFunctionBuilder> {
private static final DecayFunction EXP_DECAY_FUNCTION = new ExponentialDecayScoreFunction();
public static final DecayFunction EXP_DECAY_FUNCTION = new ExponentialDecayScoreFunction();
public ExponentialDecayFunctionBuilder(String fieldName, Object origin, Object scale, Object offset) {
super(fieldName, origin, scale, offset);

View File

@ -27,7 +27,7 @@ import org.elasticsearch.index.query.functionscore.DecayFunctionBuilder;
public class GaussDecayFunctionBuilder extends DecayFunctionBuilder<GaussDecayFunctionBuilder> {
private static final DecayFunction GAUSS_DECAY_FUNCTION = new GaussScoreFunction();
public static final DecayFunction GAUSS_DECAY_FUNCTION = new GaussScoreFunction();
public GaussDecayFunctionBuilder(String fieldName, Object origin, Object scale, Object offset) {
super(fieldName, origin, scale, offset);

View File

@ -26,7 +26,7 @@ import org.elasticsearch.index.query.functionscore.DecayFunctionBuilder;
public class LinearDecayFunctionBuilder extends DecayFunctionBuilder<LinearDecayFunctionBuilder> {
private static final DecayFunction LINEAR_DECAY_FUNCTION = new LinearDecayScoreFunction();
public static final DecayFunction LINEAR_DECAY_FUNCTION = new LinearDecayScoreFunction();
public LinearDecayFunctionBuilder(String fieldName, Object origin, Object scale, Object offset) {
super(fieldName, origin, scale, offset);

View File

@ -0,0 +1,366 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.query.functionscore;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.FieldType;
import org.apache.lucene.document.TextField;
import org.apache.lucene.index.*;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.Weight;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.lucene.search.function.*;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.fielddata.*;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.query.functionscore.exp.ExponentialDecayFunctionBuilder;
import org.elasticsearch.index.query.functionscore.gauss.GaussDecayFunctionBuilder;
import org.elasticsearch.index.query.functionscore.lin.LinearDecayFunctionBuilder;
import org.elasticsearch.search.MultiValueMode;
import org.elasticsearch.test.ESTestCase;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.io.IOException;
import java.util.Collection;
import static org.hamcrest.core.IsEqual.equalTo;
public class FunctionScoreTests extends ESTestCase {
private static final String UNSUPPORTED = "Method not implemented. This is just a stub for testing.";
class IndexFieldDataStub implements IndexFieldData<AtomicFieldData> {
@Override
public MappedFieldType.Names getFieldNames() {
return new MappedFieldType.Names("test");
}
@Override
public FieldDataType getFieldDataType() {
throw new UnsupportedOperationException(UNSUPPORTED);
}
@Override
public AtomicFieldData load(LeafReaderContext context) {
return new AtomicFieldData() {
@Override
public ScriptDocValues getScriptValues() {
throw new UnsupportedOperationException(UNSUPPORTED);
}
@Override
public SortedBinaryDocValues getBytesValues() {
return new SortedBinaryDocValues() {
@Override
public void setDocument(int docId) {
}
@Override
public int count() {
return 1;
}
@Override
public BytesRef valueAt(int index) {
return new BytesRef("0");
}
};
}
@Override
public long ramBytesUsed() {
throw new UnsupportedOperationException(UNSUPPORTED);
}
@Override
public Collection<Accountable> getChildResources() {
throw new UnsupportedOperationException(UNSUPPORTED);
}
@Override
public void close() {
}
};
}
@Override
public AtomicFieldData loadDirect(LeafReaderContext context) throws Exception {
throw new UnsupportedOperationException(UNSUPPORTED);
}
@Override
public IndexFieldData.XFieldComparatorSource comparatorSource(@Nullable Object missingValue, MultiValueMode sortMode, IndexFieldData.XFieldComparatorSource.Nested nested) {
throw new UnsupportedOperationException(UNSUPPORTED);
}
@Override
public void clear() {
throw new UnsupportedOperationException(UNSUPPORTED);
}
@Override
public void clear(IndexReader reader) {
throw new UnsupportedOperationException(UNSUPPORTED);
}
@Override
public Index index() {
throw new UnsupportedOperationException(UNSUPPORTED);
}
}
class IndexNumericFieldDataStub implements IndexNumericFieldData {
@Override
public NumericType getNumericType() {
throw new UnsupportedOperationException(UNSUPPORTED);
}
@Override
public MappedFieldType.Names getFieldNames() {
return new MappedFieldType.Names("test");
}
@Override
public FieldDataType getFieldDataType() {
throw new UnsupportedOperationException(UNSUPPORTED);
}
@Override
public AtomicNumericFieldData load(LeafReaderContext context) {
return new AtomicNumericFieldData() {
@Override
public SortedNumericDocValues getLongValues() {
throw new UnsupportedOperationException(UNSUPPORTED);
}
@Override
public SortedNumericDoubleValues getDoubleValues() {
return new SortedNumericDoubleValues() {
@Override
public void setDocument(int doc) {
}
@Override
public double valueAt(int index) {
return 1;
}
@Override
public int count() {
return 1;
}
};
}
@Override
public ScriptDocValues getScriptValues() {
throw new UnsupportedOperationException(UNSUPPORTED);
}
@Override
public SortedBinaryDocValues getBytesValues() {
throw new UnsupportedOperationException(UNSUPPORTED);
}
@Override
public long ramBytesUsed() {
throw new UnsupportedOperationException(UNSUPPORTED);
}
@Override
public Collection<Accountable> getChildResources() {
throw new UnsupportedOperationException(UNSUPPORTED);
}
@Override
public void close() {
}
};
}
@Override
public AtomicNumericFieldData loadDirect(LeafReaderContext context) throws Exception {
throw new UnsupportedOperationException(UNSUPPORTED);
}
@Override
public XFieldComparatorSource comparatorSource(@Nullable Object missingValue, MultiValueMode sortMode, XFieldComparatorSource.Nested nested) {
throw new UnsupportedOperationException(UNSUPPORTED);
}
@Override
public void clear() {
throw new UnsupportedOperationException(UNSUPPORTED);
}
@Override
public void clear(IndexReader reader) {
throw new UnsupportedOperationException(UNSUPPORTED);
}
@Override
public Index index() {
throw new UnsupportedOperationException(UNSUPPORTED);
}
}
private static final String TEXT = "The way out is through.";
private static final String FIELD = "test";
private static final Term TERM = new Term(FIELD, "through");
private Directory dir;
private IndexWriter w;
private DirectoryReader reader;
private IndexSearcher searcher;
@Before
public void initSearcher() throws IOException {
dir = newDirectory();
w = new IndexWriter(dir, newIndexWriterConfig(new StandardAnalyzer()));
FieldType ft = new FieldType(TextField.TYPE_STORED);
ft.freeze();
Document d = new Document();
d.add(new TextField(FIELD, TEXT, Field.Store.YES));
d.add(new TextField("_uid", "1", Field.Store.YES));
w.addDocument(d);
w.commit();
reader = DirectoryReader.open(w, true);
searcher = newSearcher(reader);
}
@After
public void closeAllTheReaders() throws IOException {
reader.close();
w.close();
dir.close();
}
@Test
public void testExplainFunctionScoreQuery() throws IOException {
Explanation functionExplanation = getFunctionScoreExplanation(searcher, new RandomScoreFunction(0, 0, new IndexFieldDataStub()));
checkFunctionScoreExplanation(functionExplanation, "random score function (seed: 0)");
assertThat(functionExplanation.getDetails()[0].getDetails().length, equalTo(0));
functionExplanation = getFunctionScoreExplanation(searcher, new FieldValueFactorFunction("test", 1, FieldValueFactorFunction.Modifier.LN, new Double(1), null));
checkFunctionScoreExplanation(functionExplanation, "field value function: ln(doc['test'].value?:1.0 * factor=1.0)");
assertThat(functionExplanation.getDetails()[0].getDetails().length, equalTo(0));
functionExplanation = getFunctionScoreExplanation(searcher, new DecayFunctionBuilder.NumericFieldDataScoreFunction(0, 1, 0.1, 0, GaussDecayFunctionBuilder.GAUSS_DECAY_FUNCTION, new IndexNumericFieldDataStub(), MultiValueMode.MAX));
checkFunctionScoreExplanation(functionExplanation, "Function for field test:");
assertThat(functionExplanation.getDetails()[0].getDetails()[0].toString(), equalTo("0.1 = exp(-0.5*pow(MAX[Math.max(Math.abs(1.0(=doc value) - 0.0(=origin))) - 0.0(=offset), 0)],2.0)/0.21714724095162594)\n"));
assertThat(functionExplanation.getDetails()[0].getDetails()[0].getDetails().length, equalTo(0));
functionExplanation = getFunctionScoreExplanation(searcher, new DecayFunctionBuilder.NumericFieldDataScoreFunction(0, 1, 0.1, 0, ExponentialDecayFunctionBuilder.EXP_DECAY_FUNCTION, new IndexNumericFieldDataStub(), MultiValueMode.MAX));
checkFunctionScoreExplanation(functionExplanation, "Function for field test:");
assertThat(functionExplanation.getDetails()[0].getDetails()[0].toString(), equalTo("0.1 = exp(- MAX[Math.max(Math.abs(1.0(=doc value) - 0.0(=origin))) - 0.0(=offset), 0)] * 2.3025850929940455)\n"));
assertThat(functionExplanation.getDetails()[0].getDetails()[0].getDetails().length, equalTo(0));
functionExplanation = getFunctionScoreExplanation(searcher, new DecayFunctionBuilder.NumericFieldDataScoreFunction(0, 1, 0.1, 0, LinearDecayFunctionBuilder.LINEAR_DECAY_FUNCTION, new IndexNumericFieldDataStub(), MultiValueMode.MAX));
checkFunctionScoreExplanation(functionExplanation, "Function for field test:");
assertThat(functionExplanation.getDetails()[0].getDetails()[0].toString(), equalTo("0.1 = max(0.0, ((1.1111111111111112 - MAX[Math.max(Math.abs(1.0(=doc value) - 0.0(=origin))) - 0.0(=offset), 0)])/1.1111111111111112)\n"));
assertThat(functionExplanation.getDetails()[0].getDetails()[0].getDetails().length, equalTo(0));
}
public Explanation getFunctionScoreExplanation(IndexSearcher searcher, ScoreFunction scoreFunction) throws IOException {
FunctionScoreQuery functionScoreQuery = new FunctionScoreQuery(new TermQuery(TERM), scoreFunction, 0.0f, CombineFunction.AVG, 100);
Weight weight = functionScoreQuery.createWeight(searcher, true);
Explanation explanation = weight.explain(searcher.getIndexReader().leaves().get(0), 0);
return explanation.getDetails()[1];
}
public void checkFunctionScoreExplanation(Explanation randomExplanation, String functionExpl) {
assertThat(randomExplanation.getDescription(), equalTo("min of:"));
assertThat(randomExplanation.getDetails()[0].getDescription(), equalTo(functionExpl));
}
@Test
public void testExplainFiltersFunctionScoreQuery() throws IOException {
Explanation functionExplanation = getFiltersFunctionScoreExplanation(searcher, new RandomScoreFunction(0, 0, new IndexFieldDataStub()));
checkFiltersFunctionScoreExplanation(functionExplanation, "random score function (seed: 0)", 0);
assertThat(functionExplanation.getDetails()[0].getDetails()[0].getDetails()[1].getDetails().length, equalTo(0));
functionExplanation = getFiltersFunctionScoreExplanation(searcher, new FieldValueFactorFunction("test", 1, FieldValueFactorFunction.Modifier.LN, new Double(1), null));
checkFiltersFunctionScoreExplanation(functionExplanation, "field value function: ln(doc['test'].value?:1.0 * factor=1.0)", 0);
assertThat(functionExplanation.getDetails()[0].getDetails()[0].getDetails()[1].getDetails().length, equalTo(0));
functionExplanation = getFiltersFunctionScoreExplanation(searcher, new DecayFunctionBuilder.NumericFieldDataScoreFunction(0, 1, 0.1, 0, GaussDecayFunctionBuilder.GAUSS_DECAY_FUNCTION, new IndexNumericFieldDataStub(), MultiValueMode.MAX));
checkFiltersFunctionScoreExplanation(functionExplanation, "Function for field test:", 0);
assertThat(functionExplanation.getDetails()[0].getDetails()[0].getDetails()[1].getDetails()[0].toString(), equalTo("0.1 = exp(-0.5*pow(MAX[Math.max(Math.abs(1.0(=doc value) - 0.0(=origin))) - 0.0(=offset), 0)],2.0)/0.21714724095162594)\n"));
assertThat(functionExplanation.getDetails()[0].getDetails()[0].getDetails()[1].getDetails()[0].getDetails().length, equalTo(0));
// now test all together
functionExplanation = getFiltersFunctionScoreExplanation(searcher
, new RandomScoreFunction(0, 0, new IndexFieldDataStub())
, new FieldValueFactorFunction("test", 1, FieldValueFactorFunction.Modifier.LN, new Double(1), null)
, new DecayFunctionBuilder.NumericFieldDataScoreFunction(0, 1, 0.1, 0, GaussDecayFunctionBuilder.GAUSS_DECAY_FUNCTION, new IndexNumericFieldDataStub(), MultiValueMode.MAX)
, new DecayFunctionBuilder.NumericFieldDataScoreFunction(0, 1, 0.1, 0, ExponentialDecayFunctionBuilder.EXP_DECAY_FUNCTION, new IndexNumericFieldDataStub(), MultiValueMode.MAX)
, new DecayFunctionBuilder.NumericFieldDataScoreFunction(0, 1, 0.1, 0, LinearDecayFunctionBuilder.LINEAR_DECAY_FUNCTION, new IndexNumericFieldDataStub(), MultiValueMode.MAX)
);
checkFiltersFunctionScoreExplanation(functionExplanation, "random score function (seed: 0)", 0);
assertThat(functionExplanation.getDetails()[0].getDetails()[0].getDetails()[1].getDetails().length, equalTo(0));
checkFiltersFunctionScoreExplanation(functionExplanation, "field value function: ln(doc['test'].value?:1.0 * factor=1.0)", 1);
assertThat(functionExplanation.getDetails()[0].getDetails()[1].getDetails()[1].getDetails().length, equalTo(0));
checkFiltersFunctionScoreExplanation(functionExplanation, "Function for field test:", 2);
assertThat(functionExplanation.getDetails()[0].getDetails()[2].getDetails()[1].getDetails()[0].toString(), equalTo("0.1 = exp(-0.5*pow(MAX[Math.max(Math.abs(1.0(=doc value) - 0.0(=origin))) - 0.0(=offset), 0)],2.0)/0.21714724095162594)\n"));
assertThat(functionExplanation.getDetails()[0].getDetails()[2].getDetails()[1].getDetails()[0].getDetails().length, equalTo(0));
checkFiltersFunctionScoreExplanation(functionExplanation, "Function for field test:", 3);
assertThat(functionExplanation.getDetails()[0].getDetails()[3].getDetails()[1].getDetails()[0].toString(), equalTo("0.1 = exp(- MAX[Math.max(Math.abs(1.0(=doc value) - 0.0(=origin))) - 0.0(=offset), 0)] * 2.3025850929940455)\n"));
assertThat(functionExplanation.getDetails()[0].getDetails()[3].getDetails()[1].getDetails()[0].getDetails().length, equalTo(0));
checkFiltersFunctionScoreExplanation(functionExplanation, "Function for field test:", 4);
assertThat(functionExplanation.getDetails()[0].getDetails()[4].getDetails()[1].getDetails()[0].toString(), equalTo("0.1 = max(0.0, ((1.1111111111111112 - MAX[Math.max(Math.abs(1.0(=doc value) - 0.0(=origin))) - 0.0(=offset), 0)])/1.1111111111111112)\n"));
assertThat(functionExplanation.getDetails()[0].getDetails()[4].getDetails()[1].getDetails()[0].getDetails().length, equalTo(0));
}
public Explanation getFiltersFunctionScoreExplanation(IndexSearcher searcher, ScoreFunction... scoreFunctions) throws IOException {
FiltersFunctionScoreQuery.FilterFunction[] filterFunctions = new FiltersFunctionScoreQuery.FilterFunction[scoreFunctions.length];
for (int i = 0; i < scoreFunctions.length; i++) {
filterFunctions[i] = new FiltersFunctionScoreQuery.FilterFunction(
new TermQuery(TERM), scoreFunctions[i]);
}
FiltersFunctionScoreQuery filtersFunctionScoreQuery = new FiltersFunctionScoreQuery(new TermQuery(TERM), FiltersFunctionScoreQuery.ScoreMode.AVG, filterFunctions, 100, new Float(0.0), CombineFunction.AVG);
Weight weight = filtersFunctionScoreQuery.createWeight(searcher, true);
Explanation explanation = weight.explain(searcher.getIndexReader().leaves().get(0), 0);
return explanation.getDetails()[1];
}
public void checkFiltersFunctionScoreExplanation(Explanation randomExplanation, String functionExpl, int whichFunction) {
assertThat(randomExplanation.getDescription(), equalTo("min of:"));
assertThat(randomExplanation.getDetails()[0].getDescription(), equalTo("function score, score mode [avg]"));
assertThat(randomExplanation.getDetails()[0].getDetails()[whichFunction].getDescription(), equalTo("function score, product of:"));
assertThat(randomExplanation.getDetails()[0].getDetails()[whichFunction].getDetails()[0].getDescription(), equalTo("match filter: " + FIELD + ":" + TERM.text()));
assertThat(randomExplanation.getDetails()[0].getDetails()[whichFunction].getDetails()[1].getDescription(), equalTo(functionExpl));
}
}

View File

@ -823,33 +823,4 @@ public class DecayFunctionScoreIT extends ESIntegTestCase {
}
}
@Test
public void testExplainString() throws IOException, ExecutionException, InterruptedException {
assertAcked(prepareCreate("test").addMapping(
"type1",
jsonBuilder().startObject().startObject("type1").startObject("properties").startObject("test").field("type", "string")
.endObject().startObject("num").field("type", "double").endObject().endObject().endObject().endObject()));
ensureYellow();
client().prepareIndex().setType("type1").setId("1").setIndex("test")
.setSource(jsonBuilder().startObject().field("test", "value").array("num", 0.5, 0.7).endObject()).get();
refresh();
SearchResponse response = client().search(
searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source(
searchSource().explain(true)
.query(functionScoreQuery(termQuery("test", "value"), new FunctionScoreQueryBuilder.FilterFunctionBuilder[]{
new FunctionScoreQueryBuilder.FilterFunctionBuilder(gaussDecayFunction("num", 1.0, 5.0, 1.0)),
new FunctionScoreQueryBuilder.FilterFunctionBuilder(linearDecayFunction("num", 1.0, 5.0, 1.0)),
new FunctionScoreQueryBuilder.FilterFunctionBuilder(exponentialDecayFunction("num", 1.0, 5.0, 1.0))
}).boostMode(CombineFunction.REPLACE)))).get();
String explanation = response.getHits().getAt(0).getExplanation().toString();
assertThat(explanation, containsString(" 1.0 = exp(-0.5*pow(MIN[Math.max(Math.abs(0.5(=doc value) - 1.0(=origin))) - 1.0(=offset), 0), Math.max(Math.abs(0.7(=doc value) - 1.0(=origin))) - 1.0(=offset), 0)],2.0)/18.033688011112044)"));
assertThat(explanation, containsString("1.0 = max(0.0, ((10.0 - MIN[Math.max(Math.abs(0.5(=doc value) - 1.0(=origin))) - 1.0(=offset), 0), Math.max(Math.abs(0.7(=doc value) - 1.0(=origin))) - 1.0(=offset), 0)])/10.0)"));
assertThat(explanation, containsString("1.0 = exp(- MIN[Math.max(Math.abs(0.5(=doc value) - 1.0(=origin))) - 1.0(=offset), 0), Math.max(Math.abs(0.7(=doc value) - 1.0(=origin))) - 1.0(=offset), 0)] * 0.13862943611198905)"));
}
}

View File

@ -22,7 +22,6 @@ package org.elasticsearch.messy.tests;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.lucene.search.function.CombineFunction;
@ -49,24 +48,13 @@ import java.util.concurrent.ExecutionException;
import static org.elasticsearch.client.Requests.searchRequest;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.index.query.QueryBuilders.constantScoreQuery;
import static org.elasticsearch.index.query.QueryBuilders.functionScoreQuery;
import static org.elasticsearch.index.query.QueryBuilders.termQuery;
import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.exponentialDecayFunction;
import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.fieldValueFactorFunction;
import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.gaussDecayFunction;
import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.linearDecayFunction;
import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.randomFunction;
import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.scriptFunction;
import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.weightFactorFunction;
import static org.elasticsearch.index.query.QueryBuilders.*;
import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.*;
import static org.elasticsearch.search.aggregations.AggregationBuilders.terms;
import static org.elasticsearch.search.builder.SearchSourceBuilder.searchSource;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchResponse;
import static org.hamcrest.Matchers.closeTo;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.*;
public class FunctionScoreTests extends ESIntegTestCase {
@ -82,58 +70,6 @@ public class FunctionScoreTests extends ESIntegTestCase {
protected Collection<Class<? extends Plugin>> nodePlugins() {
return Collections.singleton(GroovyPlugin.class);
}
@Test
public void testExplainQueryOnlyOnce() throws IOException, ExecutionException, InterruptedException {
assertAcked(prepareCreate("test").addMapping(
"type1",
jsonBuilder().startObject().startObject("type1").startObject("properties").startObject("test").field("type", "string")
.endObject().startObject("num").field("type", "float").endObject().endObject().endObject().endObject()));
ensureYellow();
client().prepareIndex()
.setType("type1")
.setId("1")
.setIndex("test")
.setSource(
jsonBuilder().startObject().field("test", "value").field("num", 10).endObject()).get();
refresh();
SearchResponse response = client().search(
searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source(
searchSource().explain(true).query(
functionScoreQuery(termQuery("test", "value"), new FunctionScoreQueryBuilder.FilterFunctionBuilder[]{
new FunctionScoreQueryBuilder.FilterFunctionBuilder(gaussDecayFunction("num", 5, 5)),
new FunctionScoreQueryBuilder.FilterFunctionBuilder(exponentialDecayFunction("num", 5, 5)),
new FunctionScoreQueryBuilder.FilterFunctionBuilder(linearDecayFunction("num", 5, 5))
})))).get();
String explanation = response.getHits().getAt(0).explanation().toString();
checkQueryExplanationAppearsOnlyOnce(explanation);
response = client().search(
searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source(
searchSource().explain(true).query(
functionScoreQuery(termQuery("test", "value"), fieldValueFactorFunction("num"))))).get();
explanation = response.getHits().getAt(0).explanation().toString();
checkQueryExplanationAppearsOnlyOnce(explanation);
response = client().search(
searchRequest().searchType(SearchType.QUERY_THEN_FETCH).source(
searchSource().explain(true).query(
functionScoreQuery(termQuery("test", "value"), randomFunction(10))))).get();
explanation = response.getHits().getAt(0).explanation().toString();
checkQueryExplanationAppearsOnlyOnce(explanation);
}
private void checkQueryExplanationAppearsOnlyOnce(String explanation) {
// use some substring of the query explanation and see if it appears twice
String queryExplanation = "idf(docFreq=1, maxDocs=1)";
int queryExplanationIndex = explanation.indexOf(queryExplanation, 0);
assertThat(queryExplanationIndex, greaterThan(-1));
queryExplanationIndex = explanation.indexOf(queryExplanation, queryExplanationIndex + 1);
assertThat(queryExplanationIndex, equalTo(-1));
}
static {
XContentBuilder simpleDoc;
@ -174,39 +110,6 @@ public class FunctionScoreTests extends ESIntegTestCase {
MAPPING_WITH_DOUBLE_AND_GEO_POINT_AND_TEXT_FIELD = mappingWithDoubleAndGeoPointAndTestField;
}
@Test
public void testExplain() throws IOException, ExecutionException, InterruptedException {
assertAcked(prepareCreate(INDEX).addMapping(
TYPE, MAPPING_WITH_DOUBLE_AND_GEO_POINT_AND_TEXT_FIELD
));
ensureYellow();
index(INDEX, TYPE, "1", SIMPLE_DOC);
refresh();
SearchResponse responseWithWeights = client().search(
searchRequest().source(
searchSource().query(
functionScoreQuery(constantScoreQuery(termQuery(TEXT_FIELD, "value")), new FunctionScoreQueryBuilder.FilterFunctionBuilder[]{
new FunctionScoreQueryBuilder.FilterFunctionBuilder(gaussDecayFunction(GEO_POINT_FIELD, new GeoPoint(10, 20), "1000km")),
new FunctionScoreQueryBuilder.FilterFunctionBuilder(fieldValueFactorFunction(DOUBLE_FIELD).modifier(FieldValueFactorFunction.Modifier.LN).setWeight(2)),
new FunctionScoreQueryBuilder.FilterFunctionBuilder(scriptFunction(new Script("_index['" + TEXT_FIELD + "']['value'].tf()")).setWeight(3))
})).explain(true))).actionGet();
assertThat(
responseWithWeights.getHits().getAt(0).getExplanation().toString(),
equalTo("6.0 = function score, product of:\n 1.0 = ConstantScore(text_field:value), product of:\n 1.0 = boost\n 1.0 = queryNorm\n 6.0 = min of:\n 6.0 = function score, score mode [multiply]\n 1.0 = function score, product of:\n 1.0 = match filter: *:*\n 1.0 = Function for field geo_point_field:\n 1.0 = exp(-0.5*pow(MIN of: [Math.max(arcDistance([10.0, 20.0](=doc value),[10.0, 20.0](=origin)) - 0.0(=offset), 0)],2.0)/7.213475204444817E11)\n 2.0 = function score, product of:\n 1.0 = match filter: *:*\n 2.0 = product of:\n 1.0 = field value function: ln(doc['double_field'].value * factor=1.0)\n 2.0 = weight\n 3.0 = function score, product of:\n 1.0 = match filter: *:*\n 3.0 = product of:\n 1.0 = script score function, computed with script:\"[script: _index['text_field']['value'].tf(), type: inline, lang: null, params: null]\n 1.0 = _score: \n 1.0 = ConstantScore(text_field:value), product of:\n 1.0 = boost\n 1.0 = queryNorm\n 3.0 = weight\n 3.4028235E38 = maxBoost\n"));
responseWithWeights = client().search(
searchRequest().source(
searchSource().query(
functionScoreQuery(constantScoreQuery(termQuery(TEXT_FIELD, "value")), weightFactorFunction(4.0f)))
.explain(true))).actionGet();
assertThat(
responseWithWeights.getHits().getAt(0).getExplanation().toString(),
equalTo("4.0 = function score, product of:\n 1.0 = ConstantScore(text_field:value), product of:\n 1.0 = boost\n 1.0 = queryNorm\n 4.0 = min of:\n 4.0 = product of:\n 1.0 = constant score 1.0 - no function provided\n 4.0 = weight\n 3.4028235E38 = maxBoost\n"));
}
@Test
public void simpleWeightedFunctionsTest() throws IOException, ExecutionException, InterruptedException {
assertAcked(prepareCreate(INDEX).addMapping(