externalize script search lookup into a generalized lookup that can be used outside of scripts

This commit is contained in:
kimchy 2010-12-12 01:01:56 +02:00
parent a914865c45
commit be4b2e2de6
21 changed files with 661 additions and 528 deletions

View File

@ -96,7 +96,7 @@ public class CustomScoreQueryParser extends AbstractIndexComponent implements XC
if (context == null) { if (context == null) {
throw new ElasticSearchIllegalStateException("No search context on going..."); throw new ElasticSearchIllegalStateException("No search context on going...");
} }
SearchScript searchScript = new SearchScript(context.scriptSearchLookup(), scriptLang, script, vars, parseContext.scriptService()); SearchScript searchScript = new SearchScript(context.lookup(), scriptLang, script, vars, parseContext.scriptService());
FunctionScoreQuery functionScoreQuery = new FunctionScoreQuery(query, new ScriptScoreFunction(searchScript)); FunctionScoreQuery functionScoreQuery = new FunctionScoreQuery(query, new ScriptScoreFunction(searchScript));
functionScoreQuery.setBoost(boost); functionScoreQuery.setBoost(boost);
return functionScoreQuery; return functionScoreQuery;

View File

@ -121,7 +121,7 @@ public class ScriptFilterParser extends AbstractIndexComponent implements XConte
throw new ElasticSearchIllegalStateException("No search context on going..."); throw new ElasticSearchIllegalStateException("No search context on going...");
} }
this.searchScript = new SearchScript(context.scriptSearchLookup(), scriptLang, script, params, scriptService); this.searchScript = new SearchScript(context.lookup(), scriptLang, script, params, scriptService);
} }
@Override public String toString() { @Override public String toString() {

View File

@ -1,500 +0,0 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.script.search;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Fieldable;
import org.apache.lucene.index.IndexReader;
import org.elasticsearch.ElasticSearchException;
import org.elasticsearch.ElasticSearchIllegalArgumentException;
import org.elasticsearch.ElasticSearchParseException;
import org.elasticsearch.common.collect.ImmutableMap;
import org.elasticsearch.common.collect.Maps;
import org.elasticsearch.common.compress.lzf.LZFDecoder;
import org.elasticsearch.common.io.stream.BytesStreamInput;
import org.elasticsearch.common.io.stream.CachedStreamInput;
import org.elasticsearch.common.io.stream.LZFStreamInput;
import org.elasticsearch.common.lucene.document.SingleFieldSelector;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.cache.field.data.FieldDataCache;
import org.elasticsearch.index.field.data.FieldData;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.SourceFieldMapper;
import org.elasticsearch.index.mapper.SourceFieldSelector;
import javax.annotation.Nullable;
import java.io.IOException;
import java.util.*;
/**
* @author kimchy (shay.banon)
*/
public class ScriptSearchLookup {
final DocMap docMap;
final SourceMap sourceMap;
final FieldsMap fieldsMap;
final Map<String, Object> scriptVars;
public ScriptSearchLookup(MapperService mapperService, FieldDataCache fieldDataCache) {
docMap = new DocMap(mapperService, fieldDataCache);
sourceMap = new SourceMap();
fieldsMap = new FieldsMap(mapperService);
scriptVars = ImmutableMap.<String, Object>of("doc", docMap, "_source", sourceMap, "_fields", fieldsMap);
}
public Map<String, Object> processScriptParams(@Nullable Map<String, Object> params) {
if (params == null) {
return scriptVars;
}
params.put("doc", docMap);
params.put("_source", sourceMap);
params.put("_fields", fieldsMap);
return params;
}
public void setNextReader(IndexReader reader) {
docMap.setNextReader(reader);
sourceMap.setNextReader(reader);
fieldsMap.setNextReader(reader);
}
public void setNextDocId(int docId) {
docMap.setNextDocId(docId);
sourceMap.setNextDocId(docId);
fieldsMap.setNextDocId(docId);
}
static class SourceMap implements Map {
private IndexReader reader;
private int docId = -1;
private Map<String, Object> source;
public Map<String, Object> parsedSource() {
return source;
}
public void parsedSource(Map<String, Object> source) {
this.source = source;
}
private Map<String, Object> loadSourceIfNeeded() {
if (source != null) {
return source;
}
XContentParser parser = null;
try {
Document doc = reader.document(docId, SourceFieldSelector.INSTANCE);
Fieldable sourceField = doc.getFieldable(SourceFieldMapper.NAME);
byte[] source = sourceField.getBinaryValue();
if (LZFDecoder.isCompressed(source)) {
BytesStreamInput siBytes = new BytesStreamInput(source);
LZFStreamInput siLzf = CachedStreamInput.cachedLzf(siBytes);
XContentType contentType = XContentFactory.xContentType(siLzf);
siLzf.resetToBufferStart();
parser = XContentFactory.xContent(contentType).createParser(siLzf);
this.source = parser.map();
} else {
parser = XContentFactory.xContent(source).createParser(source);
this.source = parser.map();
}
} catch (Exception e) {
throw new ElasticSearchParseException("failed to parse / load source", e);
} finally {
if (parser != null) {
parser.close();
}
}
return this.source;
}
public void setNextReader(IndexReader reader) {
if (this.reader == reader) { // if we are called with the same reader, don't invalidate source
return;
}
this.reader = reader;
this.source = null;
this.docId = -1;
}
public void setNextDocId(int docId) {
if (this.docId == docId) { // if we are called with the same docId, don't invalidate source
return;
}
this.docId = docId;
this.source = null;
}
@Override public Object get(Object key) {
return loadSourceIfNeeded().get(key);
}
@Override public int size() {
return loadSourceIfNeeded().size();
}
@Override public boolean isEmpty() {
return loadSourceIfNeeded().isEmpty();
}
@Override public boolean containsKey(Object key) {
return loadSourceIfNeeded().containsKey(key);
}
@Override public boolean containsValue(Object value) {
return loadSourceIfNeeded().containsValue(value);
}
@Override public Set keySet() {
return loadSourceIfNeeded().keySet();
}
@Override public Collection values() {
return loadSourceIfNeeded().values();
}
@Override public Set entrySet() {
return loadSourceIfNeeded().entrySet();
}
@Override public Object put(Object key, Object value) {
throw new UnsupportedOperationException();
}
@Override public Object remove(Object key) {
throw new UnsupportedOperationException();
}
@Override public void putAll(Map m) {
throw new UnsupportedOperationException();
}
@Override public void clear() {
throw new UnsupportedOperationException();
}
}
public static class FieldsMap implements Map {
private final MapperService mapperService;
private IndexReader reader;
private int docId = -1;
private final Map<String, FieldData> cachedFieldData = Maps.newHashMap();
private final SingleFieldSelector fieldSelector = new SingleFieldSelector();
FieldsMap(MapperService mapperService) {
this.mapperService = mapperService;
}
public void setNextReader(IndexReader reader) {
if (this.reader == reader) { // if we are called with the same reader, don't invalidate source
return;
}
this.reader = reader;
clearCache();
this.docId = -1;
}
public void setNextDocId(int docId) {
if (this.docId == docId) { // if we are called with the same docId, don't invalidate source
return;
}
this.docId = docId;
clearCache();
}
@Override public Object get(Object key) {
return loadFieldData(key.toString());
}
@Override public boolean containsKey(Object key) {
try {
loadFieldData(key.toString());
return true;
} catch (Exception e) {
return false;
}
}
@Override public int size() {
throw new UnsupportedOperationException();
}
@Override public boolean isEmpty() {
throw new UnsupportedOperationException();
}
@Override public Set keySet() {
throw new UnsupportedOperationException();
}
@Override public Collection values() {
throw new UnsupportedOperationException();
}
@Override public Set entrySet() {
throw new UnsupportedOperationException();
}
@Override public Object put(Object key, Object value) {
throw new UnsupportedOperationException();
}
@Override public Object remove(Object key) {
throw new UnsupportedOperationException();
}
@Override public void clear() {
throw new UnsupportedOperationException();
}
@Override public void putAll(Map m) {
throw new UnsupportedOperationException();
}
@Override public boolean containsValue(Object value) {
throw new UnsupportedOperationException();
}
private FieldData loadFieldData(String name) {
FieldData data = cachedFieldData.get(name);
if (data == null) {
FieldMapper mapper = mapperService.smartNameFieldMapper(name);
if (mapper == null) {
throw new ElasticSearchIllegalArgumentException("No field found for [" + name + "]");
}
data = new FieldData(mapper);
cachedFieldData.put(name, data);
}
if (data.doc() == null) {
fieldSelector.name(data.mapper().names().indexName());
try {
data.doc(reader.document(docId, fieldSelector));
} catch (IOException e) {
throw new ElasticSearchParseException("failed to load field [" + name + "]", e);
}
}
return data;
}
private void clearCache() {
for (Entry<String, FieldData> entry : cachedFieldData.entrySet()) {
entry.getValue().clear();
}
}
public static class FieldData {
// we can cached mapper completely per name, since its on an index/shard level (the lookup, and it does not change within the scope of a search request)
private final FieldMapper mapper;
private Document doc;
private Object value;
private boolean valueLoaded = false;
private List<Object> values = new ArrayList<Object>();
private boolean valuesLoaded = false;
FieldData(FieldMapper mapper) {
this.mapper = mapper;
}
public FieldMapper mapper() {
return mapper;
}
public Document doc() {
return doc;
}
public void doc(Document doc) {
this.doc = doc;
}
public void clear() {
value = null;
valueLoaded = false;
values.clear();
valuesLoaded = true;
doc = null;
}
public boolean isEmpty() {
if (valueLoaded) {
return value == null;
}
if (valuesLoaded) {
return values.isEmpty();
}
return getValue() == null;
}
public Object getValue() {
if (valueLoaded) {
return value;
}
valueLoaded = true;
value = null;
Fieldable field = doc.getFieldable(mapper.names().indexName());
if (field == null) {
return null;
}
value = mapper.value(field);
return value;
}
public List<Object> getValues() {
if (valuesLoaded) {
return values;
}
valuesLoaded = true;
values.clear();
Fieldable[] fields = doc.getFieldables(mapper.names().indexName());
for (Fieldable field : fields) {
values.add(mapper.value(field));
}
return values;
}
}
}
// --- Map implementation for doc field data lookup
public static class DocMap implements Map {
private final Map<String, FieldData> localCacheFieldData = Maps.newHashMapWithExpectedSize(4);
private final MapperService mapperService;
private final FieldDataCache fieldDataCache;
private IndexReader reader;
private int docId = -1;
DocMap(MapperService mapperService, FieldDataCache fieldDataCache) {
this.mapperService = mapperService;
this.fieldDataCache = fieldDataCache;
}
public void setNextReader(IndexReader reader) {
if (this.reader == reader) { // if we are called with the same reader, don't invalidate source
return;
}
this.reader = reader;
this.docId = -1;
localCacheFieldData.clear();
}
public void setNextDocId(int docId) {
this.docId = docId;
}
@Override public Object get(Object key) {
// assume its a string...
String fieldName = key.toString();
FieldData fieldData = localCacheFieldData.get(fieldName);
if (fieldData == null) {
FieldMapper mapper = mapperService.smartNameFieldMapper(fieldName);
if (mapper == null) {
throw new ElasticSearchIllegalArgumentException("No field found for [" + fieldName + "]");
}
try {
fieldData = fieldDataCache.cache(mapper.fieldDataType(), reader, mapper.names().indexName());
} catch (IOException e) {
throw new ElasticSearchException("Failed to load field data for [" + fieldName + "]", e);
}
localCacheFieldData.put(fieldName, fieldData);
}
return fieldData.docFieldData(docId);
}
public boolean containsKey(Object key) {
// assume its a string...
String fieldName = key.toString();
FieldData fieldData = localCacheFieldData.get(fieldName);
if (fieldData == null) {
FieldMapper mapper = mapperService.smartNameFieldMapper(fieldName);
if (mapper == null) {
return false;
}
}
return true;
}
public int size() {
throw new UnsupportedOperationException();
}
public boolean isEmpty() {
throw new UnsupportedOperationException();
}
public boolean containsValue(Object value) {
throw new UnsupportedOperationException();
}
public Object put(Object key, Object value) {
throw new UnsupportedOperationException();
}
public Object remove(Object key) {
throw new UnsupportedOperationException();
}
public void putAll(Map m) {
throw new UnsupportedOperationException();
}
public void clear() {
throw new UnsupportedOperationException();
}
public Set keySet() {
throw new UnsupportedOperationException();
}
public Collection values() {
throw new UnsupportedOperationException();
}
public Set entrySet() {
throw new UnsupportedOperationException();
}
}
}

View File

@ -24,6 +24,7 @@ import org.elasticsearch.index.cache.field.data.FieldDataCache;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.ExecutableScript;
import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.lookup.SearchLookup;
import javax.annotation.Nullable; import javax.annotation.Nullable;
import java.util.Map; import java.util.Map;
@ -33,23 +34,23 @@ import java.util.Map;
*/ */
public class SearchScript { public class SearchScript {
private final ScriptSearchLookup searchLookup; private final SearchLookup searchLookup;
private final ExecutableScript script; private final ExecutableScript script;
public SearchScript(ScriptSearchLookup searchLookup, ExecutableScript script) { public SearchScript(SearchLookup searchLookup, ExecutableScript script) {
this.searchLookup = searchLookup; this.searchLookup = searchLookup;
this.script = script; this.script = script;
} }
public SearchScript(ScriptSearchLookup searchLookup, String lang, String script, @Nullable Map<String, Object> params, ScriptService scriptService) { public SearchScript(SearchLookup searchLookup, String lang, String script, @Nullable Map<String, Object> params, ScriptService scriptService) {
this.searchLookup = searchLookup; this.searchLookup = searchLookup;
this.script = scriptService.executable(lang, script, searchLookup.processScriptParams(params)); this.script = scriptService.executable(lang, script, searchLookup.processAsMap(params));
} }
public SearchScript(String lang, String script, @Nullable Map<String, Object> params, ScriptService scriptService, MapperService mapperService, FieldDataCache fieldDataCache) { public SearchScript(String lang, String script, @Nullable Map<String, Object> params, ScriptService scriptService, MapperService mapperService, FieldDataCache fieldDataCache) {
this.searchLookup = new ScriptSearchLookup(mapperService, fieldDataCache); this.searchLookup = new SearchLookup(mapperService, fieldDataCache);
this.script = scriptService.executable(lang, script, searchLookup.processScriptParams(params)); this.script = scriptService.executable(lang, script, searchLookup.processAsMap(params));
} }
public void setNextReader(IndexReader reader) { public void setNextReader(IndexReader reader) {

View File

@ -41,7 +41,7 @@ public class ScriptGeoDistanceFacetCollector extends GeoDistanceFacetCollector {
String scriptLang, String script, Map<String, Object> params) { String scriptLang, String script, Map<String, Object> params) {
super(facetName, fieldName, lat, lon, unit, geoDistance, entries, context); super(facetName, fieldName, lat, lon, unit, geoDistance, entries, context);
this.script = new SearchScript(context.scriptSearchLookup(), scriptLang, script, params, context.scriptService()); this.script = new SearchScript(context.lookup(), scriptLang, script, params, context.scriptService());
} }
@Override protected void doSetNextReader(IndexReader reader, int docBase) throws IOException { @Override protected void doSetNextReader(IndexReader reader, int docBase) throws IOException {

View File

@ -79,7 +79,7 @@ public class KeyValueScriptHistogramFacetCollector extends AbstractFacetCollecto
setFilter(context.filterCache().cache(smartMappers.docMapper().typeFilter())); setFilter(context.filterCache().cache(smartMappers.docMapper().typeFilter()));
} }
this.valueScript = new SearchScript(context.scriptSearchLookup(), scriptLang, valueScript, params, context.scriptService()); this.valueScript = new SearchScript(context.lookup(), scriptLang, valueScript, params, context.scriptService());
FieldMapper mapper = smartMappers.mapper(); FieldMapper mapper = smartMappers.mapper();

View File

@ -49,8 +49,8 @@ public class ScriptHistogramFacetCollector extends AbstractFacetCollector {
public ScriptHistogramFacetCollector(String facetName, String scriptLang, String keyScript, String valueScript, Map<String, Object> params, long interval, HistogramFacet.ComparatorType comparatorType, SearchContext context) { public ScriptHistogramFacetCollector(String facetName, String scriptLang, String keyScript, String valueScript, Map<String, Object> params, long interval, HistogramFacet.ComparatorType comparatorType, SearchContext context) {
super(facetName); super(facetName);
this.keyScript = new SearchScript(context.scriptSearchLookup(), scriptLang, keyScript, params, context.scriptService()); this.keyScript = new SearchScript(context.lookup(), scriptLang, keyScript, params, context.scriptService());
this.valueScript = new SearchScript(context.scriptSearchLookup(), scriptLang, valueScript, params, context.scriptService()); this.valueScript = new SearchScript(context.lookup(), scriptLang, valueScript, params, context.scriptService());
this.interval = interval > 0 ? interval : 0; this.interval = interval > 0 ? interval : 0;
this.comparatorType = comparatorType; this.comparatorType = comparatorType;
} }

View File

@ -41,8 +41,8 @@ public class ScriptRangeFacetCollector extends AbstractFacetCollector {
public ScriptRangeFacetCollector(String facetName, String scriptLang, String keyScript, String valueScript, Map<String, Object> params, RangeFacet.Entry[] entries, SearchContext context) { public ScriptRangeFacetCollector(String facetName, String scriptLang, String keyScript, String valueScript, Map<String, Object> params, RangeFacet.Entry[] entries, SearchContext context) {
super(facetName); super(facetName);
this.keyScript = new SearchScript(context.scriptSearchLookup(), scriptLang, keyScript, params, context.scriptService()); this.keyScript = new SearchScript(context.lookup(), scriptLang, keyScript, params, context.scriptService());
this.valueScript = new SearchScript(context.scriptSearchLookup(), scriptLang, valueScript, params, context.scriptService()); this.valueScript = new SearchScript(context.lookup(), scriptLang, valueScript, params, context.scriptService());
this.entries = entries; this.entries = entries;
} }

View File

@ -47,7 +47,7 @@ public class ScriptStatisticalFacetCollector extends AbstractFacetCollector {
public ScriptStatisticalFacetCollector(String facetName, String scriptLang, String script, Map<String, Object> params, SearchContext context) { public ScriptStatisticalFacetCollector(String facetName, String scriptLang, String script, Map<String, Object> params, SearchContext context) {
super(facetName); super(facetName);
this.script = new SearchScript(context.scriptSearchLookup(), scriptLang, script, params, context.scriptService()); this.script = new SearchScript(context.lookup(), scriptLang, script, params, context.scriptService());
} }
@Override protected void doCollect(int doc) throws IOException { @Override protected void doCollect(int doc) throws IOException {

View File

@ -100,7 +100,7 @@ public class TermsFacetCollector extends AbstractFacetCollector {
} }
if (script != null) { if (script != null) {
this.script = new SearchScript(context.scriptSearchLookup(), scriptLang, script, params, context.scriptService()); this.script = new SearchScript(context.lookup(), scriptLang, script, params, context.scriptService());
} else { } else {
this.script = null; this.script = null;
} }

View File

@ -94,7 +94,7 @@ public class TermsFieldsFacetCollector extends AbstractFacetCollector {
} }
if (script != null) { if (script != null) {
this.script = new SearchScript(context.scriptSearchLookup(), scriptLang, script, params, context.scriptService()); this.script = new SearchScript(context.lookup(), scriptLang, script, params, context.scriptService());
} else { } else {
this.script = null; this.script = null;
} }

View File

@ -65,7 +65,7 @@ public class TermsScriptFieldFacetCollector extends AbstractFacetCollector {
this.comparatorType = comparatorType; this.comparatorType = comparatorType;
this.numberOfShards = context.numberOfShards(); this.numberOfShards = context.numberOfShards();
this.sScript = script; this.sScript = script;
this.script = new SearchScript(context.scriptSearchLookup(), scriptLang, script, params, context.scriptService()); this.script = new SearchScript(context.lookup(), scriptLang, script, params, context.scriptService());
this.excluded = excluded; this.excluded = excluded;
this.matcher = pattern != null ? pattern.matcher("") : null; this.matcher = pattern != null ? pattern.matcher("") : null;

View File

@ -39,7 +39,7 @@ public class FieldsParseElement implements SearchParseElement {
String name = parser.text(); String name = parser.text();
if (name.contains("_source.") || name.contains("doc[")) { if (name.contains("_source.") || name.contains("doc[")) {
// script field to load from source // script field to load from source
SearchScript searchScript = new SearchScript(context.scriptSearchLookup(), null, name, null, context.scriptService()); SearchScript searchScript = new SearchScript(context.lookup(), null, name, null, context.scriptService());
context.scriptFields().add(new ScriptFieldsContext.ScriptField(name, searchScript)); context.scriptFields().add(new ScriptFieldsContext.ScriptField(name, searchScript));
} else { } else {
context.fieldNames().add(name); context.fieldNames().add(name);
@ -52,7 +52,7 @@ public class FieldsParseElement implements SearchParseElement {
String name = parser.text(); String name = parser.text();
if (name.contains("_source.") || name.contains("doc[")) { if (name.contains("_source.") || name.contains("doc[")) {
// script field to load from source // script field to load from source
SearchScript searchScript = new SearchScript(context.scriptSearchLookup(), null, name, null, context.scriptService()); SearchScript searchScript = new SearchScript(context.lookup(), null, name, null, context.scriptService());
context.scriptFields().add(new ScriptFieldsContext.ScriptField(name, searchScript)); context.scriptFields().add(new ScriptFieldsContext.ScriptField(name, searchScript));
} else { } else {
context.fieldNames().add(name); context.fieldNames().add(name);

View File

@ -66,7 +66,7 @@ public class ScriptFieldsParseElement implements SearchParseElement {
} }
} }
} }
SearchScript searchScript = new SearchScript(context.scriptSearchLookup(), scriptLang, script, params, context.scriptService()); SearchScript searchScript = new SearchScript(context.lookup(), scriptLang, script, params, context.scriptService());
context.scriptFields().add(new ScriptFieldsContext.ScriptField(fieldName, searchScript)); context.scriptFields().add(new ScriptFieldsContext.ScriptField(fieldName, searchScript));
} }
} }

View File

@ -38,7 +38,6 @@ import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.index.service.IndexService; import org.elasticsearch.index.service.IndexService;
import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptService;
import org.elasticsearch.script.search.ScriptSearchLookup;
import org.elasticsearch.search.Scroll; import org.elasticsearch.search.Scroll;
import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.SearchShardTarget;
import org.elasticsearch.search.dfs.DfsSearchResult; import org.elasticsearch.search.dfs.DfsSearchResult;
@ -46,6 +45,7 @@ import org.elasticsearch.search.facet.SearchContextFacets;
import org.elasticsearch.search.fetch.FetchSearchResult; import org.elasticsearch.search.fetch.FetchSearchResult;
import org.elasticsearch.search.fetch.script.ScriptFieldsContext; import org.elasticsearch.search.fetch.script.ScriptFieldsContext;
import org.elasticsearch.search.highlight.SearchContextHighlight; import org.elasticsearch.search.highlight.SearchContextHighlight;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.query.QuerySearchResult;
import java.util.List; import java.util.List;
@ -126,7 +126,7 @@ public class SearchContext implements Releasable {
private ScriptFieldsContext scriptFields; private ScriptFieldsContext scriptFields;
private ScriptSearchLookup scriptSearchLookup; private SearchLookup searchLookup;
private boolean queryRewritten; private boolean queryRewritten;
@ -405,11 +405,11 @@ public class SearchContext implements Releasable {
this.keepAlive = keepAlive; this.keepAlive = keepAlive;
} }
public ScriptSearchLookup scriptSearchLookup() { public SearchLookup lookup() {
if (scriptSearchLookup == null) { if (searchLookup == null) {
scriptSearchLookup = new ScriptSearchLookup(mapperService(), fieldDataCache()); searchLookup = new SearchLookup(mapperService(), fieldDataCache());
} }
return scriptSearchLookup; return searchLookup;
} }
public DfsSearchResult dfsResult() { public DfsSearchResult dfsResult() {

View File

@ -0,0 +1,140 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.lookup;
import org.apache.lucene.index.IndexReader;
import org.elasticsearch.ElasticSearchException;
import org.elasticsearch.ElasticSearchIllegalArgumentException;
import org.elasticsearch.common.collect.Maps;
import org.elasticsearch.index.cache.field.data.FieldDataCache;
import org.elasticsearch.index.field.data.FieldData;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MapperService;
import java.io.IOException;
import java.util.Collection;
import java.util.Map;
import java.util.Set;
/**
* @author kimchy (shay.banon)
*/
public class DocLookup implements Map {
private final Map<String, FieldData> localCacheFieldData = Maps.newHashMapWithExpectedSize(4);
private final MapperService mapperService;
private final FieldDataCache fieldDataCache;
private IndexReader reader;
private int docId = -1;
DocLookup(MapperService mapperService, FieldDataCache fieldDataCache) {
this.mapperService = mapperService;
this.fieldDataCache = fieldDataCache;
}
public void setNextReader(IndexReader reader) {
if (this.reader == reader) { // if we are called with the same reader, don't invalidate source
return;
}
this.reader = reader;
this.docId = -1;
localCacheFieldData.clear();
}
public void setNextDocId(int docId) {
this.docId = docId;
}
@Override public Object get(Object key) {
// assume its a string...
String fieldName = key.toString();
FieldData fieldData = localCacheFieldData.get(fieldName);
if (fieldData == null) {
FieldMapper mapper = mapperService.smartNameFieldMapper(fieldName);
if (mapper == null) {
throw new ElasticSearchIllegalArgumentException("No field found for [" + fieldName + "]");
}
try {
fieldData = fieldDataCache.cache(mapper.fieldDataType(), reader, mapper.names().indexName());
} catch (IOException e) {
throw new ElasticSearchException("Failed to load field data for [" + fieldName + "]", e);
}
localCacheFieldData.put(fieldName, fieldData);
}
return fieldData.docFieldData(docId);
}
public boolean containsKey(Object key) {
// assume its a string...
String fieldName = key.toString();
FieldData fieldData = localCacheFieldData.get(fieldName);
if (fieldData == null) {
FieldMapper mapper = mapperService.smartNameFieldMapper(fieldName);
if (mapper == null) {
return false;
}
}
return true;
}
public int size() {
throw new UnsupportedOperationException();
}
public boolean isEmpty() {
throw new UnsupportedOperationException();
}
public boolean containsValue(Object value) {
throw new UnsupportedOperationException();
}
public Object put(Object key, Object value) {
throw new UnsupportedOperationException();
}
public Object remove(Object key) {
throw new UnsupportedOperationException();
}
public void putAll(Map m) {
throw new UnsupportedOperationException();
}
public void clear() {
throw new UnsupportedOperationException();
}
public Set keySet() {
throw new UnsupportedOperationException();
}
public Collection values() {
throw new UnsupportedOperationException();
}
public Set entrySet() {
throw new UnsupportedOperationException();
}
}

View File

@ -0,0 +1,107 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.lookup;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Fieldable;
import org.elasticsearch.index.mapper.FieldMapper;
import java.util.ArrayList;
import java.util.List;
/**
* @author kimchy (shay.banon)
*/
public class FieldLookup {
// we can cached mapper completely per name, since its on an index/shard level (the lookup, and it does not change within the scope of a search request)
private final FieldMapper mapper;
private Document doc;
private Object value;
private boolean valueLoaded = false;
private List<Object> values = new ArrayList<Object>();
private boolean valuesLoaded = false;
FieldLookup(FieldMapper mapper) {
this.mapper = mapper;
}
public FieldMapper mapper() {
return mapper;
}
public Document doc() {
return doc;
}
public void doc(Document doc) {
this.doc = doc;
}
public void clear() {
value = null;
valueLoaded = false;
values.clear();
valuesLoaded = true;
doc = null;
}
public boolean isEmpty() {
if (valueLoaded) {
return value == null;
}
if (valuesLoaded) {
return values.isEmpty();
}
return getValue() == null;
}
public Object getValue() {
if (valueLoaded) {
return value;
}
valueLoaded = true;
value = null;
Fieldable field = doc.getFieldable(mapper.names().indexName());
if (field == null) {
return null;
}
value = mapper.value(field);
return value;
}
public List<Object> getValues() {
if (valuesLoaded) {
return values;
}
valuesLoaded = true;
values.clear();
Fieldable[] fields = doc.getFieldables(mapper.names().indexName());
for (Fieldable field : fields) {
values.add(mapper.value(field));
}
return values;
}
}

View File

@ -0,0 +1,152 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.lookup;
import org.apache.lucene.index.IndexReader;
import org.elasticsearch.ElasticSearchIllegalArgumentException;
import org.elasticsearch.ElasticSearchParseException;
import org.elasticsearch.common.collect.Maps;
import org.elasticsearch.common.lucene.document.SingleFieldSelector;
import org.elasticsearch.index.mapper.FieldMapper;
import org.elasticsearch.index.mapper.MapperService;
import java.io.IOException;
import java.util.Collection;
import java.util.Map;
import java.util.Set;
/**
* @author kimchy (shay.banon)
*/
public class FieldsLookup implements Map {
private final MapperService mapperService;
private IndexReader reader;
private int docId = -1;
private final Map<String, FieldLookup> cachedFieldData = Maps.newHashMap();
private final SingleFieldSelector fieldSelector = new SingleFieldSelector();
FieldsLookup(MapperService mapperService) {
this.mapperService = mapperService;
}
public void setNextReader(IndexReader reader) {
if (this.reader == reader) { // if we are called with the same reader, don't invalidate source
return;
}
this.reader = reader;
clearCache();
this.docId = -1;
}
public void setNextDocId(int docId) {
if (this.docId == docId) { // if we are called with the same docId, don't invalidate source
return;
}
this.docId = docId;
clearCache();
}
@Override public Object get(Object key) {
return loadFieldData(key.toString());
}
@Override public boolean containsKey(Object key) {
try {
loadFieldData(key.toString());
return true;
} catch (Exception e) {
return false;
}
}
@Override public int size() {
throw new UnsupportedOperationException();
}
@Override public boolean isEmpty() {
throw new UnsupportedOperationException();
}
@Override public Set keySet() {
throw new UnsupportedOperationException();
}
@Override public Collection values() {
throw new UnsupportedOperationException();
}
@Override public Set entrySet() {
throw new UnsupportedOperationException();
}
@Override public Object put(Object key, Object value) {
throw new UnsupportedOperationException();
}
@Override public Object remove(Object key) {
throw new UnsupportedOperationException();
}
@Override public void clear() {
throw new UnsupportedOperationException();
}
@Override public void putAll(Map m) {
throw new UnsupportedOperationException();
}
@Override public boolean containsValue(Object value) {
throw new UnsupportedOperationException();
}
private FieldLookup loadFieldData(String name) {
FieldLookup data = cachedFieldData.get(name);
if (data == null) {
FieldMapper mapper = mapperService.smartNameFieldMapper(name);
if (mapper == null) {
throw new ElasticSearchIllegalArgumentException("No field found for [" + name + "]");
}
data = new FieldLookup(mapper);
cachedFieldData.put(name, data);
}
if (data.doc() == null) {
fieldSelector.name(data.mapper().names().indexName());
try {
data.doc(reader.document(docId, fieldSelector));
} catch (IOException e) {
throw new ElasticSearchParseException("failed to load field [" + name + "]", e);
}
}
return data;
}
private void clearCache() {
for (Entry<String, FieldLookup> entry : cachedFieldData.entrySet()) {
entry.getValue().clear();
}
}
}

View File

@ -0,0 +1,84 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.lookup;
import org.apache.lucene.index.IndexReader;
import org.elasticsearch.common.collect.ImmutableMap;
import org.elasticsearch.index.cache.field.data.FieldDataCache;
import org.elasticsearch.index.mapper.MapperService;
import javax.annotation.Nullable;
import java.util.Map;
/**
* @author kimchy (shay.banon)
*/
public class SearchLookup {
final DocLookup docMap;
final SourceLookup sourceLookup;
final FieldsLookup fieldsLookup;
final Map<String, Object> asMap;
public SearchLookup(MapperService mapperService, FieldDataCache fieldDataCache) {
docMap = new DocLookup(mapperService, fieldDataCache);
sourceLookup = new SourceLookup();
fieldsLookup = new FieldsLookup(mapperService);
asMap = ImmutableMap.<String, Object>of("doc", docMap, "_doc", docMap, "_source", sourceLookup, "_fields", fieldsLookup);
}
public Map<String, Object> processAsMap(@Nullable Map<String, Object> params) {
if (params == null) {
return asMap;
}
params.put("doc", docMap);
params.put("_doc", docMap);
params.put("_source", sourceLookup);
params.put("_fields", fieldsLookup);
return params;
}
public SourceLookup source() {
return this.sourceLookup;
}
public FieldsLookup fields() {
return this.fieldsLookup;
}
public DocLookup doc() {
return this.docMap;
}
public void setNextReader(IndexReader reader) {
docMap.setNextReader(reader);
sourceLookup.setNextReader(reader);
fieldsLookup.setNextReader(reader);
}
public void setNextDocId(int docId) {
docMap.setNextDocId(docId);
sourceLookup.setNextDocId(docId);
fieldsLookup.setNextDocId(docId);
}
}

View File

@ -0,0 +1,149 @@
/*
* Licensed to Elastic Search and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Elastic Search licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.lookup;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Fieldable;
import org.apache.lucene.index.IndexReader;
import org.elasticsearch.ElasticSearchParseException;
import org.elasticsearch.common.compress.lzf.LZFDecoder;
import org.elasticsearch.common.io.stream.BytesStreamInput;
import org.elasticsearch.common.io.stream.CachedStreamInput;
import org.elasticsearch.common.io.stream.LZFStreamInput;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.mapper.SourceFieldMapper;
import org.elasticsearch.index.mapper.SourceFieldSelector;
import java.util.Collection;
import java.util.Map;
import java.util.Set;
/**
* @author kimchy (shay.banon)
*/
public class SourceLookup implements Map {
private IndexReader reader;
private int docId = -1;
private Map<String, Object> source;
public Map<String, Object> source() {
return source;
}
private Map<String, Object> loadSourceIfNeeded() {
if (source != null) {
return source;
}
XContentParser parser = null;
try {
Document doc = reader.document(docId, SourceFieldSelector.INSTANCE);
Fieldable sourceField = doc.getFieldable(SourceFieldMapper.NAME);
byte[] source = sourceField.getBinaryValue();
if (LZFDecoder.isCompressed(source)) {
BytesStreamInput siBytes = new BytesStreamInput(source);
LZFStreamInput siLzf = CachedStreamInput.cachedLzf(siBytes);
XContentType contentType = XContentFactory.xContentType(siLzf);
siLzf.resetToBufferStart();
parser = XContentFactory.xContent(contentType).createParser(siLzf);
this.source = parser.map();
} else {
parser = XContentFactory.xContent(source).createParser(source);
this.source = parser.map();
}
} catch (Exception e) {
throw new ElasticSearchParseException("failed to parse / load source", e);
} finally {
if (parser != null) {
parser.close();
}
}
return this.source;
}
public void setNextReader(IndexReader reader) {
if (this.reader == reader) { // if we are called with the same reader, don't invalidate source
return;
}
this.reader = reader;
this.source = null;
this.docId = -1;
}
public void setNextDocId(int docId) {
if (this.docId == docId) { // if we are called with the same docId, don't invalidate source
return;
}
this.docId = docId;
this.source = null;
}
@Override public Object get(Object key) {
return loadSourceIfNeeded().get(key);
}
@Override public int size() {
return loadSourceIfNeeded().size();
}
@Override public boolean isEmpty() {
return loadSourceIfNeeded().isEmpty();
}
@Override public boolean containsKey(Object key) {
return loadSourceIfNeeded().containsKey(key);
}
@Override public boolean containsValue(Object value) {
return loadSourceIfNeeded().containsValue(value);
}
@Override public Set keySet() {
return loadSourceIfNeeded().keySet();
}
@Override public Collection values() {
return loadSourceIfNeeded().values();
}
@Override public Set entrySet() {
return loadSourceIfNeeded().entrySet();
}
@Override public Object put(Object key, Object value) {
throw new UnsupportedOperationException();
}
@Override public Object remove(Object key) {
throw new UnsupportedOperationException();
}
@Override public void putAll(Map m) {
throw new UnsupportedOperationException();
}
@Override public void clear() {
throw new UnsupportedOperationException();
}
}

View File

@ -74,7 +74,7 @@ public class ScriptSortParser implements SortParser {
if (type == null) { if (type == null) {
throw new SearchParseException(context, "_script sorting requires setting the type of the script"); throw new SearchParseException(context, "_script sorting requires setting the type of the script");
} }
SearchScript searchScript = new SearchScript(context.scriptSearchLookup(), scriptLang, script, params, context.scriptService()); SearchScript searchScript = new SearchScript(context.lookup(), scriptLang, script, params, context.scriptService());
FieldComparatorSource fieldComparatorSource; FieldComparatorSource fieldComparatorSource;
if ("string".equals(type)) { if ("string".equals(type)) {
fieldComparatorSource = StringFieldsFunctionDataComparator.comparatorSource(searchScript); fieldComparatorSource = StringFieldsFunctionDataComparator.comparatorSource(searchScript);