script handling refactoring, better support for languages that perform better with some caching on a per search aspect (and execution per doc)

This commit is contained in:
kimchy 2010-10-02 17:32:29 +02:00
parent 7a78374398
commit 31ebed11fd
18 changed files with 223 additions and 266 deletions

View File

@ -1,45 +0,0 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.field.function;
import org.apache.lucene.index.IndexReader;
import java.util.Map;
/**
* @author kimchy (shay.banon)
*/
public interface FieldsFunction {
void setNextReader(IndexReader reader);
/**
* @param docId The doc id
* @param vars The vars providing additional parameters, should be reused and has values added to it in execute
*/
Object execute(int docId, Map<String, Object> vars);
/**
* @param docId The doc id
* @param vars The vars providing additional parameters, should be reused and has values added to it in execute
* @param sameDocCache If executing against the same doc id several times (possibly with different scripts), pass this across the invocations
*/
Object execute(int docId, Map<String, Object> vars, Map<String, Object> sameDocCache);
}

View File

@ -22,10 +22,9 @@ package org.elasticsearch.index.field.function.sort;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.FieldComparator;
import org.apache.lucene.search.FieldComparatorSource;
import org.elasticsearch.index.field.function.FieldsFunction;
import org.elasticsearch.script.search.SearchScript;
import java.io.IOException;
import java.util.Map;
/**
* @author kimchy (shay.banon)
@ -33,41 +32,35 @@ import java.util.Map;
// LUCENE MONITOR: Monitor against FieldComparator.Double
public class DoubleFieldsFunctionDataComparator extends FieldComparator {
public static FieldComparatorSource comparatorSource(FieldsFunction fieldsFunction, Map<String, Object> params) {
return new InnerSource(fieldsFunction, params);
public static FieldComparatorSource comparatorSource(SearchScript script) {
return new InnerSource(script);
}
private static class InnerSource extends FieldComparatorSource {
private final FieldsFunction fieldsFunction;
private final SearchScript script;
private final Map<String, Object> params;
private InnerSource(FieldsFunction fieldsFunction, Map<String, Object> params) {
this.fieldsFunction = fieldsFunction;
this.params = params;
private InnerSource(SearchScript script) {
this.script = script;
}
@Override public FieldComparator newComparator(String fieldname, int numHits, int sortPos, boolean reversed) throws IOException {
return new DoubleFieldsFunctionDataComparator(numHits, fieldsFunction, params);
return new DoubleFieldsFunctionDataComparator(numHits, script);
}
}
private final FieldsFunction fieldsFunction;
private final Map<String, Object> params;
private final SearchScript script;
private final double[] values;
private double bottom;
public DoubleFieldsFunctionDataComparator(int numHits, FieldsFunction fieldsFunction, Map<String, Object> params) {
this.fieldsFunction = fieldsFunction;
this.params = params;
public DoubleFieldsFunctionDataComparator(int numHits, SearchScript script) {
this.script = script;
values = new double[numHits];
}
@Override public void setNextReader(IndexReader reader, int docBase) throws IOException {
fieldsFunction.setNextReader(reader);
script.setNextReader(reader);
}
@Override public int compare(int slot1, int slot2) {
@ -83,7 +76,7 @@ public class DoubleFieldsFunctionDataComparator extends FieldComparator {
}
@Override public int compareBottom(int doc) {
final double v2 = ((Number) fieldsFunction.execute(doc, params)).doubleValue();
final double v2 = ((Number) script.execute(doc)).doubleValue();
if (bottom > v2) {
return 1;
} else if (bottom < v2) {
@ -94,7 +87,7 @@ public class DoubleFieldsFunctionDataComparator extends FieldComparator {
}
@Override public void copy(int slot, int doc) {
values[slot] = ((Number) fieldsFunction.execute(doc, params)).doubleValue();
values[slot] = ((Number) script.execute(doc)).doubleValue();
}
@Override public void setBottom(final int bottom) {

View File

@ -22,52 +22,45 @@ package org.elasticsearch.index.field.function.sort;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.FieldComparator;
import org.apache.lucene.search.FieldComparatorSource;
import org.elasticsearch.index.field.function.FieldsFunction;
import org.elasticsearch.script.search.SearchScript;
import java.io.IOException;
import java.util.Map;
/**
* @author kimchy (shay.banon)
*/
public class StringFieldsFunctionDataComparator extends FieldComparator {
public static FieldComparatorSource comparatorSource(FieldsFunction fieldsFunction, Map<String, Object> params) {
return new InnerSource(fieldsFunction, params);
public static FieldComparatorSource comparatorSource(SearchScript script) {
return new InnerSource(script);
}
private static class InnerSource extends FieldComparatorSource {
private final FieldsFunction fieldsFunction;
private final SearchScript script;
private final Map<String, Object> params;
private InnerSource(FieldsFunction fieldsFunction, Map<String, Object> params) {
this.fieldsFunction = fieldsFunction;
this.params = params;
private InnerSource(SearchScript script) {
this.script = script;
}
@Override public FieldComparator newComparator(String fieldname, int numHits, int sortPos, boolean reversed) throws IOException {
return new StringFieldsFunctionDataComparator(numHits, fieldsFunction, params);
return new StringFieldsFunctionDataComparator(numHits, script);
}
}
private final FieldsFunction fieldsFunction;
private final Map<String, Object> params;
private final SearchScript script;
private String[] values;
private String bottom;
public StringFieldsFunctionDataComparator(int numHits, FieldsFunction fieldsFunction, Map<String, Object> params) {
this.fieldsFunction = fieldsFunction;
this.params = params;
public StringFieldsFunctionDataComparator(int numHits, SearchScript script) {
this.script = script;
values = new String[numHits];
}
@Override public void setNextReader(IndexReader reader, int docBase) throws IOException {
fieldsFunction.setNextReader(reader);
script.setNextReader(reader);
}
@Override public int compare(int slot1, int slot2) {
@ -86,7 +79,7 @@ public class StringFieldsFunctionDataComparator extends FieldComparator {
}
@Override public int compareBottom(int doc) {
final String val2 = fieldsFunction.execute(doc, params).toString();
final String val2 = script.execute(doc).toString();
if (bottom == null) {
if (val2 == null) {
return 0;
@ -99,7 +92,7 @@ public class StringFieldsFunctionDataComparator extends FieldComparator {
}
@Override public void copy(int slot, int doc) {
values[slot] = fieldsFunction.execute(doc, params).toString();
values[slot] = script.execute(doc).toString();
}
@Override public void setBottom(final int bottom) {

View File

@ -23,20 +23,19 @@ import org.apache.lucene.index.IndexReader;
import org.apache.lucene.search.Explanation;
import org.apache.lucene.search.Query;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.Maps;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.search.function.FunctionScoreQuery;
import org.elasticsearch.common.lucene.search.function.ScoreFunction;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.thread.ThreadLocals;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.AbstractIndexComponent;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.field.function.script.ScriptFieldsFunction;
import org.elasticsearch.index.query.QueryParsingException;
import org.elasticsearch.index.settings.IndexSettings;
import org.elasticsearch.script.search.SearchScript;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
/**
@ -90,41 +89,31 @@ public class CustomScoreQueryParser extends AbstractIndexComponent implements XC
if (script == null) {
throw new QueryParsingException(index, "[custom_score] requires 'script' field");
}
FunctionScoreQuery functionScoreQuery = new FunctionScoreQuery(query,
new ScriptScoreFunction(new ScriptFieldsFunction(scriptLang, script, parseContext.scriptService(), parseContext.mapperService(), parseContext.indexCache().fieldData()), vars));
SearchScript searchScript = new SearchScript(scriptLang, script, vars, parseContext.scriptService(), parseContext.mapperService(), parseContext.indexCache().fieldData());
FunctionScoreQuery functionScoreQuery = new FunctionScoreQuery(query, new ScriptScoreFunction(searchScript));
functionScoreQuery.setBoost(boost);
return functionScoreQuery;
}
private static ThreadLocal<ThreadLocals.CleanableValue<Map<String, Object>>> cachedVars = new ThreadLocal<ThreadLocals.CleanableValue<Map<String, Object>>>() {
@Override protected ThreadLocals.CleanableValue<Map<String, Object>> initialValue() {
return new ThreadLocals.CleanableValue<Map<String, Object>>(new HashMap<String, Object>());
}
};
public static class ScriptScoreFunction implements ScoreFunction {
private final ScriptFieldsFunction scriptFieldsFunction;
private final SearchScript script;
private Map<String, Object> vars;
private Map<String, Object> vars = Maps.newHashMapWithExpectedSize(2);
private ScriptScoreFunction(ScriptFieldsFunction scriptFieldsFunction, Map<String, Object> vars) {
this.scriptFieldsFunction = scriptFieldsFunction;
this.vars = vars;
private ScriptScoreFunction(SearchScript script) {
this.script = script;
}
@Override public void setNextReader(IndexReader reader) {
scriptFieldsFunction.setNextReader(reader);
if (vars == null) {
vars = cachedVars.get().get();
vars.clear();
}
script.setNextReader(reader);
}
@Override public float score(int docId, float subQueryScore) {
vars.put("score", subQueryScore);
vars.put("_score", subQueryScore);
return ((Number) scriptFieldsFunction.execute(docId, vars)).floatValue();
return ((Number) script.execute(docId, vars)).floatValue();
}
@Override public Explanation explain(int docId, Explanation subQueryExpl) {

View File

@ -30,11 +30,11 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.AbstractIndexComponent;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.cache.field.data.FieldDataCache;
import org.elasticsearch.index.field.function.script.ScriptFieldsFunction;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.query.QueryParsingException;
import org.elasticsearch.index.settings.IndexSettings;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.script.search.SearchScript;
import java.io.IOException;
import java.util.Map;
@ -148,15 +148,15 @@ public class ScriptFilterParser extends AbstractIndexComponent implements XConte
}
@Override public DocIdSet getDocIdSet(final IndexReader reader) throws IOException {
final ScriptFieldsFunction function = new ScriptFieldsFunction(scriptLang, script, scriptService, mapperService, fieldDataCache);
function.setNextReader(reader);
final SearchScript searchScript = new SearchScript(scriptLang, script, params, scriptService, mapperService, fieldDataCache);
searchScript.setNextReader(reader);
return new GetDocSet(reader.maxDoc()) {
@Override public boolean isCacheable() {
return false; // though it is, we want to cache it into in memory rep so it will be faster
}
@Override public boolean get(int doc) throws IOException {
Object val = function.execute(doc, params);
Object val = searchScript.execute(doc, params);
if (val == null) {
return false;
}

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.index.field.function.script;
package org.elasticsearch.script.search;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Fieldable;
@ -25,96 +25,69 @@ import org.apache.lucene.index.IndexReader;
import org.elasticsearch.ElasticSearchException;
import org.elasticsearch.ElasticSearchIllegalArgumentException;
import org.elasticsearch.ElasticSearchParseException;
import org.elasticsearch.common.collect.ImmutableMap;
import org.elasticsearch.common.collect.Maps;
import org.elasticsearch.common.compress.lzf.LZFDecoder;
import org.elasticsearch.common.io.stream.BytesStreamInput;
import org.elasticsearch.common.io.stream.CachedStreamInput;
import org.elasticsearch.common.io.stream.LZFStreamInput;
import org.elasticsearch.common.thread.ThreadLocals;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.cache.field.data.FieldDataCache;
import org.elasticsearch.index.field.data.FieldData;
import org.elasticsearch.index.field.function.FieldsFunction;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.SourceFieldMapper;
import org.elasticsearch.index.mapper.SourceFieldSelector;
import org.elasticsearch.script.CompiledScript;
import org.elasticsearch.script.ScriptService;
import javax.annotation.Nullable;
import java.io.IOException;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
/**
* @author kimchy (shay.banon)
*/
public class ScriptFieldsFunction implements FieldsFunction {
private static ThreadLocal<ThreadLocals.CleanableValue<Map<String, FieldData>>> cachedFieldData = new ThreadLocal<ThreadLocals.CleanableValue<Map<String, FieldData>>>() {
@Override protected ThreadLocals.CleanableValue<Map<String, FieldData>> initialValue() {
return new ThreadLocals.CleanableValue<Map<String, FieldData>>(new HashMap<String, FieldData>());
}
};
private static ThreadLocal<ThreadLocals.CleanableValue<Map<String, Object>>> cachedVars = new ThreadLocal<ThreadLocals.CleanableValue<Map<String, Object>>>() {
@Override protected ThreadLocals.CleanableValue<Map<String, Object>> initialValue() {
return new ThreadLocals.CleanableValue<java.util.Map<java.lang.String, java.lang.Object>>(new HashMap<String, Object>());
}
};
final ScriptService scriptService;
final CompiledScript script;
public class ScriptSearchLookup {
final DocMap docMap;
final SourceMap sourceMap;
public ScriptFieldsFunction(String scriptLang, String script, ScriptService scriptService, MapperService mapperService, FieldDataCache fieldDataCache) {
this.scriptService = scriptService;
this.script = scriptService.compile(scriptLang, script);
this.docMap = new DocMap(cachedFieldData.get().get(), mapperService, fieldDataCache);
this.sourceMap = new SourceMap();
final Map<String, Object> scriptVars;
public ScriptSearchLookup(MapperService mapperService, FieldDataCache fieldDataCache) {
docMap = new DocMap(mapperService, fieldDataCache);
sourceMap = new SourceMap();
scriptVars = ImmutableMap.<String, Object>of("doc", docMap, "_source", sourceMap);
}
@Override public void setNextReader(IndexReader reader) {
public Map<String, Object> processScriptParams(@Nullable Map<String, Object> params) {
if (params == null) {
return scriptVars;
}
params.put("doc", docMap);
params.put("_source", sourceMap);
return params;
}
public void setNextReader(IndexReader reader) {
docMap.setNextReader(reader);
sourceMap.setNextReader(reader);
}
@Override public Object execute(int docId, Map<String, Object> vars) {
return execute(docId, vars, null);
}
@Override public Object execute(int docId, Map<String, Object> vars, @Nullable Map<String, Object> sameDocCache) {
public void setNextDocId(int docId) {
docMap.setNextDocId(docId);
sourceMap.setNextDocId(docId);
if (sameDocCache != null) {
sourceMap.parsedSource((Map<String, Object>) sameDocCache.get("parsedSource"));
}
if (vars == null) {
vars = cachedVars.get().get();
vars.clear();
}
vars.put("doc", docMap);
vars.put("_source", sourceMap);
Object retVal = scriptService.execute(script, vars);
if (sameDocCache != null) {
sameDocCache.put("parsedSource", sourceMap.parsedSource());
}
return retVal;
}
static class SourceMap implements Map {
private IndexReader reader;
private int docId;
private int docId = -1;
private Map<String, Object> source;
@ -157,11 +130,18 @@ public class ScriptFieldsFunction implements FieldsFunction {
}
public void setNextReader(IndexReader reader) {
if (this.reader == reader) { // if we are called with the same reader, don't invalidate source
return;
}
this.reader = reader;
this.source = null;
this.docId = -1;
}
public void setNextDocId(int docId) {
if (this.docId == docId) { // if we are called with the same docId, don't invalidate source
return;
}
this.docId = docId;
this.source = null;
}
@ -219,7 +199,7 @@ public class ScriptFieldsFunction implements FieldsFunction {
static class DocMap implements Map {
private final Map<String, FieldData> localCacheFieldData;
private final Map<String, FieldData> localCacheFieldData = Maps.newHashMapWithExpectedSize(4);
private final MapperService mapperService;
@ -227,16 +207,19 @@ public class ScriptFieldsFunction implements FieldsFunction {
private IndexReader reader;
private int docId;
private int docId = -1;
DocMap(Map<String, FieldData> localCacheFieldData, MapperService mapperService, FieldDataCache fieldDataCache) {
this.localCacheFieldData = localCacheFieldData;
DocMap(MapperService mapperService, FieldDataCache fieldDataCache) {
this.mapperService = mapperService;
this.fieldDataCache = fieldDataCache;
}
public void setNextReader(IndexReader reader) {
if (this.reader == reader) { // if we are called with the same reader, don't invalidate source
return;
}
this.reader = reader;
this.docId = -1;
localCacheFieldData.clear();
}

View File

@ -0,0 +1,68 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.script.search;
import org.apache.lucene.index.IndexReader;
import org.elasticsearch.index.cache.field.data.FieldDataCache;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.script.ExecutableScript;
import org.elasticsearch.script.ScriptService;
import javax.annotation.Nullable;
import java.util.Map;
/**
* @author kimchy (shay.banon)
*/
public class SearchScript {
private final ScriptSearchLookup searchLookup;
private final ExecutableScript script;
public SearchScript(ScriptSearchLookup searchLookup, ExecutableScript script) {
this.searchLookup = searchLookup;
this.script = script;
}
public SearchScript(ScriptSearchLookup searchLookup, String lang, String script, @Nullable Map<String, Object> params, ScriptService scriptService) {
this.searchLookup = searchLookup;
this.script = scriptService.executable(lang, script, searchLookup.processScriptParams(params));
}
public SearchScript(String lang, String script, @Nullable Map<String, Object> params, ScriptService scriptService, MapperService mapperService, FieldDataCache fieldDataCache) {
this.searchLookup = new ScriptSearchLookup(mapperService, fieldDataCache);
this.script = scriptService.executable(lang, script, searchLookup.processScriptParams(params));
}
public void setNextReader(IndexReader reader) {
searchLookup.setNextReader(reader);
}
public Object execute(int docId) {
searchLookup.setNextDocId(docId);
return script.run();
}
public Object execute(int docId, Map params) {
searchLookup.setNextDocId(docId);
return script.run(params);
}
}

View File

@ -22,8 +22,7 @@ package org.elasticsearch.search.facets.geodistance;
import org.apache.lucene.index.IndexReader;
import org.elasticsearch.common.lucene.geo.GeoDistance;
import org.elasticsearch.common.unit.DistanceUnit;
import org.elasticsearch.index.field.function.FieldsFunction;
import org.elasticsearch.index.field.function.script.ScriptFieldsFunction;
import org.elasticsearch.script.search.SearchScript;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
@ -34,22 +33,19 @@ import java.util.Map;
*/
public class ScriptGeoDistanceFacetCollector extends GeoDistanceFacetCollector {
private final FieldsFunction valueFunction;
private final Map<String, Object> params;
private final SearchScript script;
public ScriptGeoDistanceFacetCollector(String facetName, String fieldName, double lat, double lon, DistanceUnit unit, GeoDistance geoDistance,
GeoDistanceFacet.Entry[] entries, SearchContext context,
String scriptLang, String script, Map<String, Object> params) {
super(facetName, fieldName, lat, lon, unit, geoDistance, entries, context);
this.params = params;
this.valueFunction = new ScriptFieldsFunction(scriptLang, script, context.scriptService(), context.mapperService(), context.fieldDataCache());
this.script = new SearchScript(context.scriptSearchLookup(), scriptLang, script, params, context.scriptService());
}
@Override protected void doSetNextReader(IndexReader reader, int docBase) throws IOException {
super.doSetNextReader(reader, docBase);
valueFunction.setNextReader(reader);
script.setNextReader(reader);
}
@Override protected void doCollect(int doc) throws IOException {
@ -57,7 +53,7 @@ public class ScriptGeoDistanceFacetCollector extends GeoDistanceFacetCollector {
return;
}
double value = ((Number) valueFunction.execute(doc, params)).doubleValue();
double value = ((Number) script.execute(doc)).doubleValue();
if (latFieldData.multiValued()) {
double[] lats = latFieldData.doubleValues(doc);

View File

@ -22,8 +22,7 @@ package org.elasticsearch.search.facets.histogram;
import org.apache.lucene.index.IndexReader;
import org.elasticsearch.common.trove.TLongDoubleHashMap;
import org.elasticsearch.common.trove.TLongLongHashMap;
import org.elasticsearch.index.field.function.FieldsFunction;
import org.elasticsearch.index.field.function.script.ScriptFieldsFunction;
import org.elasticsearch.script.search.SearchScript;
import org.elasticsearch.search.facets.Facet;
import org.elasticsearch.search.facets.support.AbstractFacetCollector;
import org.elasticsearch.search.internal.SearchContext;
@ -36,11 +35,9 @@ import java.util.Map;
*/
public class ScriptHistogramFacetCollector extends AbstractFacetCollector {
private final FieldsFunction keyFunction;
private final SearchScript keyScript;
private final FieldsFunction valueFunction;
private final Map<String, Object> params;
private final SearchScript valueScript;
private final long interval;
@ -52,29 +49,28 @@ public class ScriptHistogramFacetCollector extends AbstractFacetCollector {
public ScriptHistogramFacetCollector(String facetName, String scriptLang, String keyScript, String valueScript, Map<String, Object> params, long interval, HistogramFacet.ComparatorType comparatorType, SearchContext context) {
super(facetName);
this.keyFunction = new ScriptFieldsFunction(scriptLang, keyScript, context.scriptService(), context.mapperService(), context.fieldDataCache());
this.valueFunction = new ScriptFieldsFunction(scriptLang, valueScript, context.scriptService(), context.mapperService(), context.fieldDataCache());
this.keyScript = new SearchScript(context.scriptSearchLookup(), scriptLang, keyScript, params, context.scriptService());
this.valueScript = new SearchScript(context.scriptSearchLookup(), scriptLang, valueScript, params, context.scriptService());
this.interval = interval > 0 ? interval : 0;
this.params = params;
this.comparatorType = comparatorType;
}
@Override protected void doCollect(int doc) throws IOException {
Number keyValue = (Number) keyFunction.execute(doc, params);
Number keyValue = (Number) keyScript.execute(doc);
long bucket;
if (interval == 0) {
bucket = keyValue.longValue();
} else {
bucket = bucket(keyValue.doubleValue(), interval);
}
double value = ((Number) valueFunction.execute(doc, params)).doubleValue();
double value = ((Number) valueScript.execute(doc)).doubleValue();
counts.adjustOrPutValue(bucket, 1, 1);
totals.adjustOrPutValue(bucket, value, value);
}
@Override protected void doSetNextReader(IndexReader reader, int docBase) throws IOException {
keyFunction.setNextReader(reader);
valueFunction.setNextReader(reader);
keyScript.setNextReader(reader);
valueScript.setNextReader(reader);
}
@Override public Facet facet() {

View File

@ -20,8 +20,7 @@
package org.elasticsearch.search.facets.range;
import org.apache.lucene.index.IndexReader;
import org.elasticsearch.index.field.function.FieldsFunction;
import org.elasticsearch.index.field.function.script.ScriptFieldsFunction;
import org.elasticsearch.script.search.SearchScript;
import org.elasticsearch.search.facets.Facet;
import org.elasticsearch.search.facets.support.AbstractFacetCollector;
import org.elasticsearch.search.internal.SearchContext;
@ -34,30 +33,27 @@ import java.util.Map;
*/
public class ScriptRangeFacetCollector extends AbstractFacetCollector {
private final FieldsFunction keyFunction;
private final SearchScript keyScript;
private final FieldsFunction valueFunction;
private final Map<String, Object> params;
private final SearchScript valueScript;
private final RangeFacet.Entry[] entries;
public ScriptRangeFacetCollector(String facetName, String scriptLang, String keyScript, String valueScript, Map<String, Object> params, RangeFacet.Entry[] entries, SearchContext context) {
super(facetName);
this.keyFunction = new ScriptFieldsFunction(scriptLang, keyScript, context.scriptService(), context.mapperService(), context.fieldDataCache());
this.valueFunction = new ScriptFieldsFunction(scriptLang, valueScript, context.scriptService(), context.mapperService(), context.fieldDataCache());
this.params = params;
this.keyScript = new SearchScript(context.scriptSearchLookup(), scriptLang, keyScript, params, context.scriptService());
this.valueScript = new SearchScript(context.scriptSearchLookup(), scriptLang, valueScript, params, context.scriptService());
this.entries = entries;
}
@Override protected void doSetNextReader(IndexReader reader, int docBase) throws IOException {
keyFunction.setNextReader(reader);
valueFunction.setNextReader(reader);
keyScript.setNextReader(reader);
valueScript.setNextReader(reader);
}
@Override protected void doCollect(int doc) throws IOException {
double key = ((Number) keyFunction.execute(doc, params)).doubleValue();
double value = ((Number) valueFunction.execute(doc, params)).doubleValue();
double key = ((Number) keyScript.execute(doc)).doubleValue();
double value = ((Number) valueScript.execute(doc)).doubleValue();
for (RangeFacet.Entry entry : entries) {
if (key >= entry.getFrom() && key < entry.getTo()) {

View File

@ -20,8 +20,7 @@
package org.elasticsearch.search.facets.statistical;
import org.apache.lucene.index.IndexReader;
import org.elasticsearch.index.field.function.FieldsFunction;
import org.elasticsearch.index.field.function.script.ScriptFieldsFunction;
import org.elasticsearch.script.search.SearchScript;
import org.elasticsearch.search.facets.Facet;
import org.elasticsearch.search.facets.support.AbstractFacetCollector;
import org.elasticsearch.search.internal.SearchContext;
@ -34,9 +33,7 @@ import java.util.Map;
*/
public class ScriptStatisticalFacetCollector extends AbstractFacetCollector {
private final FieldsFunction function;
private final Map<String, Object> params;
private final SearchScript script;
private double min = Double.NaN;
@ -50,12 +47,11 @@ public class ScriptStatisticalFacetCollector extends AbstractFacetCollector {
public ScriptStatisticalFacetCollector(String facetName, String scriptLang, String script, Map<String, Object> params, SearchContext context) {
super(facetName);
this.params = params;
this.function = new ScriptFieldsFunction(scriptLang, script, context.scriptService(), context.mapperService(), context.fieldDataCache());
this.script = new SearchScript(context.scriptSearchLookup(), scriptLang, script, params, context.scriptService());
}
@Override protected void doCollect(int doc) throws IOException {
double value = ((Number) function.execute(doc, params)).doubleValue();
double value = ((Number) script.execute(doc)).doubleValue();
if (value < min || Double.isNaN(min)) {
min = value;
}
@ -68,7 +64,7 @@ public class ScriptStatisticalFacetCollector extends AbstractFacetCollector {
}
@Override protected void doSetNextReader(IndexReader reader, int docBase) throws IOException {
function.setNextReader(reader);
script.setNextReader(reader);
}
@Override public Facet facet() {

View File

@ -29,9 +29,8 @@ import org.elasticsearch.common.trove.TObjectIntHashMap;
import org.elasticsearch.common.trove.TObjectIntIterator;
import org.elasticsearch.index.cache.field.data.FieldDataCache;
import org.elasticsearch.index.field.data.FieldData;
import org.elasticsearch.index.field.function.FieldsFunction;
import org.elasticsearch.index.field.function.script.ScriptFieldsFunction;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.script.search.SearchScript;
import org.elasticsearch.search.facets.Facet;
import org.elasticsearch.search.facets.support.AbstractFacetCollector;
import org.elasticsearch.search.internal.SearchContext;
@ -73,7 +72,7 @@ public class TermsFacetCollector extends AbstractFacetCollector {
private final StaticAggregatorValueProc aggregator;
private final FieldsFunction scriptFunction;
private final SearchScript script;
public TermsFacetCollector(String facetName, String fieldName, int size, InternalTermsFacet.ComparatorType comparatorType, SearchContext context,
ImmutableSet<String> excluded, Pattern pattern, String scriptLang, String script, Map<String, Object> params) {
@ -100,26 +99,22 @@ public class TermsFacetCollector extends AbstractFacetCollector {
}
if (script != null) {
scriptFunction = new ScriptFieldsFunction(scriptLang, script, context.scriptService(), context.mapperService(), fieldDataCache);
if (params == null) {
params = Maps.newHashMapWithExpectedSize(1);
}
this.script = new SearchScript(context.scriptSearchLookup(), scriptLang, script, params, context.scriptService());
} else {
params = null;
scriptFunction = null;
this.script = null;
}
if (excluded.isEmpty() && pattern == null && scriptFunction == null) {
if (excluded.isEmpty() && pattern == null && this.script == null) {
aggregator = new StaticAggregatorValueProc(popFacets());
} else {
aggregator = new AggregatorValueProc(popFacets(), excluded, pattern, this.scriptFunction, params);
aggregator = new AggregatorValueProc(popFacets(), excluded, pattern, this.script);
}
}
@Override protected void doSetNextReader(IndexReader reader, int docBase) throws IOException {
fieldData = fieldDataCache.cache(fieldDataType, reader, indexFieldName);
if (scriptFunction != null) {
scriptFunction.setNextReader(reader);
if (script != null) {
script.setNextReader(reader);
}
}
@ -168,17 +163,20 @@ public class TermsFacetCollector extends AbstractFacetCollector {
private final Matcher matcher;
private final FieldsFunction scriptFunction;
private final SearchScript script;
private final Map<String, Object> params;
private final Map<String, Object> scriptParams;
public AggregatorValueProc(TObjectIntHashMap<String> facets, ImmutableSet<String> excluded, Pattern pattern,
FieldsFunction scriptFunction, Map<String, Object> params) {
public AggregatorValueProc(TObjectIntHashMap<String> facets, ImmutableSet<String> excluded, Pattern pattern, SearchScript script) {
super(facets);
this.excluded = excluded;
this.matcher = pattern != null ? pattern.matcher("") : null;
this.scriptFunction = scriptFunction;
this.params = params;
this.script = script;
if (script != null) {
scriptParams = Maps.newHashMapWithExpectedSize(4);
} else {
scriptParams = null;
}
}
@Override public void onValue(int docId, String value) {
@ -188,9 +186,9 @@ public class TermsFacetCollector extends AbstractFacetCollector {
if (matcher != null && !matcher.reset(value).matches()) {
return;
}
if (scriptFunction != null) {
params.put("term", value);
Object scriptValue = scriptFunction.execute(docId, params);
if (script != null) {
scriptParams.put("term", value);
Object scriptValue = script.execute(docId, scriptParams);
if (scriptValue == null) {
return;
}

View File

@ -29,7 +29,6 @@ import org.elasticsearch.common.collect.ImmutableMap;
import org.elasticsearch.common.collect.Lists;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.lucene.docset.DocSet;
import org.elasticsearch.common.thread.ThreadLocals;
import org.elasticsearch.index.mapper.*;
import org.elasticsearch.search.SearchHitField;
import org.elasticsearch.search.SearchParseElement;
@ -53,12 +52,6 @@ import java.util.Map;
*/
public class FetchPhase implements SearchPhase {
private static ThreadLocal<ThreadLocals.CleanableValue<Map<String, Object>>> cachedSameDocScriptCache = new ThreadLocal<ThreadLocals.CleanableValue<Map<String, Object>>>() {
@Override protected ThreadLocals.CleanableValue<Map<String, Object>> initialValue() {
return new ThreadLocals.CleanableValue<java.util.Map<java.lang.String, java.lang.Object>>(new HashMap<String, Object>());
}
};
private final HighlightPhase highlightPhase;
@Inject public FetchPhase(HighlightPhase highlightPhase) {
@ -82,8 +75,6 @@ public class FetchPhase implements SearchPhase {
public void execute(SearchContext context) {
FieldSelector fieldSelector = buildFieldSelectors(context);
Map<String, Object> sameDocCache = cachedSameDocScriptCache.get().get();
InternalSearchHit[] hits = new InternalSearchHit[context.docIdsToLoadSize()];
for (int index = 0; index < context.docIdsToLoadSize(); index++) {
int docId = context.docIdsToLoad()[context.docIdsToLoadFrom() + index];
@ -141,14 +132,13 @@ public class FetchPhase implements SearchPhase {
}
if (context.hasScriptFields()) {
sameDocCache.clear();
int readerIndex = context.searcher().readerIndex(docId);
IndexReader subReader = context.searcher().subReaders()[readerIndex];
int subDoc = docId - context.searcher().docStarts()[readerIndex];
for (ScriptFieldsContext.ScriptField scriptField : context.scriptFields().fields()) {
scriptField.scriptFieldsFunction().setNextReader(subReader);
scriptField.script().setNextReader(subReader);
Object value = scriptField.scriptFieldsFunction().execute(subDoc, scriptField.params(), sameDocCache);
Object value = scriptField.script().execute(subDoc);
if (searchHit.fields() == null) {
searchHit.fields(new HashMap<String, SearchHitField>(2));
@ -161,7 +151,6 @@ public class FetchPhase implements SearchPhase {
}
hitField.values().add(value);
}
sameDocCache.clear();
}
if (!context.parsedQuery().namedFilters().isEmpty()) {

View File

@ -20,7 +20,7 @@
package org.elasticsearch.search.fetch;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.field.function.script.ScriptFieldsFunction;
import org.elasticsearch.script.search.SearchScript;
import org.elasticsearch.search.SearchParseElement;
import org.elasticsearch.search.fetch.script.ScriptFieldsContext;
import org.elasticsearch.search.internal.SearchContext;
@ -39,7 +39,8 @@ public class FieldsParseElement implements SearchParseElement {
String name = parser.text();
if (name.contains("_source.") || name.contains("doc[")) {
// script field to load from source
context.scriptFields().add(new ScriptFieldsContext.ScriptField(name, new ScriptFieldsFunction(null, name, context.scriptService(), context.mapperService(), context.fieldDataCache()), null));
SearchScript searchScript = new SearchScript(context.scriptSearchLookup(), null, name, null, context.scriptService());
context.scriptFields().add(new ScriptFieldsContext.ScriptField(name, searchScript));
} else {
context.fieldNames().add(name);
}
@ -51,7 +52,8 @@ public class FieldsParseElement implements SearchParseElement {
String name = parser.text();
if (name.contains("_source.") || name.contains("doc[")) {
// script field to load from source
context.scriptFields().add(new ScriptFieldsContext.ScriptField(name, new ScriptFieldsFunction(null, name, context.scriptService(), context.mapperService(), context.fieldDataCache()), null));
SearchScript searchScript = new SearchScript(context.scriptSearchLookup(), null, name, null, context.scriptService());
context.scriptFields().add(new ScriptFieldsContext.ScriptField(name, searchScript));
} else {
context.fieldNames().add(name);
}

View File

@ -20,10 +20,9 @@
package org.elasticsearch.search.fetch.script;
import org.elasticsearch.common.collect.Lists;
import org.elasticsearch.index.field.function.script.ScriptFieldsFunction;
import org.elasticsearch.script.search.SearchScript;
import java.util.List;
import java.util.Map;
/**
* @author kimchy (shay.banon)
@ -32,25 +31,19 @@ public class ScriptFieldsContext {
public static class ScriptField {
private final String name;
private final ScriptFieldsFunction scriptFieldsFunction;
private final Map<String, Object> params;
private final SearchScript script;
public ScriptField(String name, ScriptFieldsFunction scriptFieldsFunction, Map<String, Object> params) {
public ScriptField(String name, SearchScript script) {
this.name = name;
this.scriptFieldsFunction = scriptFieldsFunction;
this.params = params;
this.script = script;
}
public String name() {
return name;
}
public ScriptFieldsFunction scriptFieldsFunction() {
return scriptFieldsFunction;
}
public Map<String, Object> params() {
return params;
public SearchScript script() {
return this.script;
}
}

View File

@ -20,7 +20,7 @@
package org.elasticsearch.search.fetch.script;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.field.function.script.ScriptFieldsFunction;
import org.elasticsearch.script.search.SearchScript;
import org.elasticsearch.search.SearchParseElement;
import org.elasticsearch.search.internal.SearchContext;
@ -66,7 +66,8 @@ public class ScriptFieldsParseElement implements SearchParseElement {
}
}
}
context.scriptFields().add(new ScriptFieldsContext.ScriptField(fieldName, new ScriptFieldsFunction(scriptLang, script, context.scriptService(), context.mapperService(), context.fieldDataCache()), params));
SearchScript searchScript = new SearchScript(context.scriptSearchLookup(), scriptLang, script, params, context.scriptService());
context.scriptFields().add(new ScriptFieldsContext.ScriptField(fieldName, searchScript));
}
}
}

View File

@ -38,6 +38,7 @@ import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.index.service.IndexService;
import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.script.search.ScriptSearchLookup;
import org.elasticsearch.search.Scroll;
import org.elasticsearch.search.SearchShardTarget;
import org.elasticsearch.search.dfs.DfsSearchResult;
@ -111,6 +112,8 @@ public class SearchContext implements Releasable {
private ScriptFieldsContext scriptFields;
private ScriptSearchLookup scriptSearchLookup;
private boolean queryRewritten;
private volatile TimeValue keepAlive;
@ -391,6 +394,13 @@ public class SearchContext implements Releasable {
return this.keepAliveTimeout;
}
public ScriptSearchLookup scriptSearchLookup() {
if (scriptSearchLookup == null) {
scriptSearchLookup = new ScriptSearchLookup(mapperService(), fieldDataCache());
}
return scriptSearchLookup;
}
public DfsSearchResult dfsResult() {
return dfsResult;
}

View File

@ -22,10 +22,9 @@ package org.elasticsearch.search.sort;
import org.apache.lucene.search.FieldComparatorSource;
import org.apache.lucene.search.SortField;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.field.function.FieldsFunction;
import org.elasticsearch.index.field.function.script.ScriptFieldsFunction;
import org.elasticsearch.index.field.function.sort.DoubleFieldsFunctionDataComparator;
import org.elasticsearch.index.field.function.sort.StringFieldsFunctionDataComparator;
import org.elasticsearch.script.search.SearchScript;
import org.elasticsearch.search.SearchParseException;
import org.elasticsearch.search.internal.SearchContext;
@ -75,12 +74,12 @@ public class ScriptSortParser implements SortParser {
if (type == null) {
throw new SearchParseException(context, "_script sorting requires setting the type of the script");
}
FieldsFunction fieldsFunction = new ScriptFieldsFunction(scriptLang, script, context.scriptService(), context.mapperService(), context.fieldDataCache());
SearchScript searchScript = new SearchScript(context.scriptSearchLookup(), scriptLang, script, params, context.scriptService());
FieldComparatorSource fieldComparatorSource;
if ("string".equals(type)) {
fieldComparatorSource = StringFieldsFunctionDataComparator.comparatorSource(fieldsFunction, params);
fieldComparatorSource = StringFieldsFunctionDataComparator.comparatorSource(searchScript);
} else if ("number".equals(type)) {
fieldComparatorSource = DoubleFieldsFunctionDataComparator.comparatorSource(fieldsFunction, params);
fieldComparatorSource = DoubleFieldsFunctionDataComparator.comparatorSource(searchScript);
} else {
throw new SearchParseException(context, "custom script sort type [" + type + "] not supported");
}