mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-02-17 10:25:15 +00:00
Scripting: Propagate Headers and Context through to ScriptService
At the moment if an index script is used in a request, the spawned request to get the indexed script from the `.scripts` index does not get the headers and context copied to it from the original request. This change makes the calls to the `ScriptService` pass in a `HasContextAndHeaders` object that can provide the headers and context. For the `search()` method the context and headers are retrieved from `SearchContext.current()`. Closes #12891
This commit is contained in:
parent
467a459f95
commit
821021f0e4
@ -55,7 +55,7 @@ public class TransportRenderSearchTemplateAction extends HandledTransportAction<
|
||||
|
||||
@Override
|
||||
protected void doRun() throws Exception {
|
||||
ExecutableScript executable = scriptService.executable(request.template(), ScriptContext.Standard.SEARCH);
|
||||
ExecutableScript executable = scriptService.executable(request.template(), ScriptContext.Standard.SEARCH, request);
|
||||
BytesReference processedTemplate = (BytesReference) executable.run();
|
||||
RenderSearchTemplateResponse response = new RenderSearchTemplateResponse();
|
||||
response.source(processedTemplate);
|
||||
|
@ -146,7 +146,7 @@ public class TransportPercolateAction extends TransportBroadcastAction<Percolate
|
||||
PercolateResponse.Match[] matches = request.onlyCount() ? null : PercolateResponse.EMPTY;
|
||||
return new PercolateResponse(shardsResponses.length(), successfulShards, failedShards, shardFailures, tookInMillis, matches);
|
||||
} else {
|
||||
PercolatorService.ReduceResult result = percolatorService.reduce(percolatorTypeId, shardResults);
|
||||
PercolatorService.ReduceResult result = percolatorService.reduce(percolatorTypeId, shardResults, request);
|
||||
long tookInMillis = Math.max(1, System.currentTimeMillis() - request.startTime);
|
||||
return new PercolateResponse(
|
||||
shardsResponses.length(), successfulShards, failedShards, shardFailures,
|
||||
|
@ -75,7 +75,8 @@ public class TransportSearchCountAction extends TransportSearchTypeAction {
|
||||
@Override
|
||||
protected void moveToSecondPhase() throws Exception {
|
||||
// no need to sort, since we know we have no hits back
|
||||
final InternalSearchResponse internalResponse = searchPhaseController.merge(SearchPhaseController.EMPTY_DOCS, firstResults, (AtomicArray<? extends FetchSearchResultProvider>) AtomicArray.empty());
|
||||
final InternalSearchResponse internalResponse = searchPhaseController.merge(SearchPhaseController.EMPTY_DOCS, firstResults,
|
||||
(AtomicArray<? extends FetchSearchResultProvider>) AtomicArray.empty(), request);
|
||||
String scrollId = null;
|
||||
if (request.scroll() != null) {
|
||||
scrollId = buildScrollId(request.searchType(), firstResults, null);
|
||||
|
@ -134,7 +134,8 @@ public class TransportSearchDfsQueryAndFetchAction extends TransportSearchTypeAc
|
||||
@Override
|
||||
public void doRun() throws IOException {
|
||||
sortedShardList = searchPhaseController.sortDocs(true, queryFetchResults);
|
||||
final InternalSearchResponse internalResponse = searchPhaseController.merge(sortedShardList, queryFetchResults, queryFetchResults);
|
||||
final InternalSearchResponse internalResponse = searchPhaseController.merge(sortedShardList, queryFetchResults,
|
||||
queryFetchResults, request);
|
||||
String scrollId = null;
|
||||
if (request.scroll() != null) {
|
||||
scrollId = TransportSearchHelper.buildScrollId(request.searchType(), firstResults, null);
|
||||
|
@ -20,6 +20,7 @@
|
||||
package org.elasticsearch.action.search.type;
|
||||
|
||||
import com.carrotsearch.hppc.IntArrayList;
|
||||
|
||||
import org.apache.lucene.search.ScoreDoc;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.ActionRunnable;
|
||||
@ -39,8 +40,8 @@ import org.elasticsearch.search.action.SearchServiceTransportAction;
|
||||
import org.elasticsearch.search.controller.SearchPhaseController;
|
||||
import org.elasticsearch.search.dfs.AggregatedDfs;
|
||||
import org.elasticsearch.search.dfs.DfsSearchResult;
|
||||
import org.elasticsearch.search.fetch.ShardFetchSearchRequest;
|
||||
import org.elasticsearch.search.fetch.FetchSearchResult;
|
||||
import org.elasticsearch.search.fetch.ShardFetchSearchRequest;
|
||||
import org.elasticsearch.search.internal.InternalSearchResponse;
|
||||
import org.elasticsearch.search.internal.ShardSearchTransportRequest;
|
||||
import org.elasticsearch.search.query.QuerySearchRequest;
|
||||
@ -210,7 +211,8 @@ public class TransportSearchDfsQueryThenFetchAction extends TransportSearchTypeA
|
||||
threadPool.executor(ThreadPool.Names.SEARCH).execute(new ActionRunnable<SearchResponse>(listener) {
|
||||
@Override
|
||||
public void doRun() throws IOException {
|
||||
final InternalSearchResponse internalResponse = searchPhaseController.merge(sortedShardList, queryResults, fetchResults);
|
||||
final InternalSearchResponse internalResponse = searchPhaseController.merge(sortedShardList, queryResults,
|
||||
fetchResults, request);
|
||||
String scrollId = null;
|
||||
if (request.scroll() != null) {
|
||||
scrollId = TransportSearchHelper.buildScrollId(request.searchType(), firstResults, null);
|
||||
|
@ -81,7 +81,8 @@ public class TransportSearchQueryAndFetchAction extends TransportSearchTypeActio
|
||||
public void doRun() throws IOException {
|
||||
boolean useScroll = request.scroll() != null;
|
||||
sortedShardList = searchPhaseController.sortDocs(useScroll, firstResults);
|
||||
final InternalSearchResponse internalResponse = searchPhaseController.merge(sortedShardList, firstResults, firstResults);
|
||||
final InternalSearchResponse internalResponse = searchPhaseController.merge(sortedShardList, firstResults,
|
||||
firstResults, request);
|
||||
String scrollId = null;
|
||||
if (request.scroll() != null) {
|
||||
scrollId = buildScrollId(request.searchType(), firstResults, null);
|
||||
|
@ -20,6 +20,7 @@
|
||||
package org.elasticsearch.action.search.type;
|
||||
|
||||
import com.carrotsearch.hppc.IntArrayList;
|
||||
|
||||
import org.apache.lucene.search.ScoreDoc;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.ActionRunnable;
|
||||
@ -145,7 +146,8 @@ public class TransportSearchQueryThenFetchAction extends TransportSearchTypeActi
|
||||
threadPool.executor(ThreadPool.Names.SEARCH).execute(new ActionRunnable<SearchResponse>(listener) {
|
||||
@Override
|
||||
public void doRun() throws IOException {
|
||||
final InternalSearchResponse internalResponse = searchPhaseController.merge(sortedShardList, firstResults, fetchResults);
|
||||
final InternalSearchResponse internalResponse = searchPhaseController.merge(sortedShardList, firstResults,
|
||||
fetchResults, request);
|
||||
String scrollId = null;
|
||||
if (request.scroll() != null) {
|
||||
scrollId = TransportSearchHelper.buildScrollId(request.searchType(), firstResults, null);
|
||||
|
@ -20,6 +20,7 @@
|
||||
package org.elasticsearch.action.search.type;
|
||||
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
@ -73,7 +74,8 @@ public class TransportSearchScanAction extends TransportSearchTypeAction {
|
||||
|
||||
@Override
|
||||
protected void moveToSecondPhase() throws Exception {
|
||||
final InternalSearchResponse internalResponse = searchPhaseController.merge(SearchPhaseController.EMPTY_DOCS, firstResults, (AtomicArray<? extends FetchSearchResultProvider>) AtomicArray.empty());
|
||||
final InternalSearchResponse internalResponse = searchPhaseController.merge(SearchPhaseController.EMPTY_DOCS, firstResults,
|
||||
(AtomicArray<? extends FetchSearchResultProvider>) AtomicArray.empty(), request);
|
||||
String scrollId = null;
|
||||
if (request.scroll() != null) {
|
||||
scrollId = buildScrollId(request.searchType(), firstResults, ImmutableMap.of("total_hits", Long.toString(internalResponse.hits().totalHits())));
|
||||
|
@ -21,7 +21,11 @@ package org.elasticsearch.action.search.type;
|
||||
|
||||
import org.apache.lucene.search.ScoreDoc;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.search.*;
|
||||
import org.elasticsearch.action.search.ReduceSearchPhaseException;
|
||||
import org.elasticsearch.action.search.SearchPhaseExecutionException;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.search.SearchScrollRequest;
|
||||
import org.elasticsearch.action.search.ShardSearchFailure;
|
||||
import org.elasticsearch.cluster.ClusterService;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNodes;
|
||||
@ -188,7 +192,8 @@ public class TransportSearchScrollQueryAndFetchAction extends AbstractComponent
|
||||
|
||||
private void innerFinishHim() throws Exception {
|
||||
ScoreDoc[] sortedShardList = searchPhaseController.sortDocs(true, queryFetchResults);
|
||||
final InternalSearchResponse internalResponse = searchPhaseController.merge(sortedShardList, queryFetchResults, queryFetchResults);
|
||||
final InternalSearchResponse internalResponse = searchPhaseController.merge(sortedShardList, queryFetchResults,
|
||||
queryFetchResults, request);
|
||||
String scrollId = null;
|
||||
if (request.scroll() != null) {
|
||||
scrollId = request.scrollId();
|
||||
|
@ -20,9 +20,14 @@
|
||||
package org.elasticsearch.action.search.type;
|
||||
|
||||
import com.carrotsearch.hppc.IntArrayList;
|
||||
|
||||
import org.apache.lucene.search.ScoreDoc;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.search.*;
|
||||
import org.elasticsearch.action.search.ReduceSearchPhaseException;
|
||||
import org.elasticsearch.action.search.SearchPhaseExecutionException;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.search.SearchScrollRequest;
|
||||
import org.elasticsearch.action.search.ShardSearchFailure;
|
||||
import org.elasticsearch.cluster.ClusterService;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNode;
|
||||
import org.elasticsearch.cluster.node.DiscoveryNodes;
|
||||
@ -239,7 +244,7 @@ public class TransportSearchScrollQueryThenFetchAction extends AbstractComponent
|
||||
}
|
||||
|
||||
private void innerFinishHim() {
|
||||
InternalSearchResponse internalResponse = searchPhaseController.merge(sortedShardList, queryResults, fetchResults);
|
||||
InternalSearchResponse internalResponse = searchPhaseController.merge(sortedShardList, queryResults, fetchResults, request);
|
||||
String scrollId = null;
|
||||
if (request.scroll() != null) {
|
||||
scrollId = request.scrollId();
|
||||
|
@ -212,7 +212,8 @@ public class TransportSearchScrollScanAction extends AbstractComponent {
|
||||
docs.add(scoreDoc);
|
||||
}
|
||||
}
|
||||
final InternalSearchResponse internalResponse = searchPhaseController.merge(docs.toArray(new ScoreDoc[0]), queryFetchResults, queryFetchResults);
|
||||
final InternalSearchResponse internalResponse = searchPhaseController.merge(docs.toArray(new ScoreDoc[0]), queryFetchResults,
|
||||
queryFetchResults, request);
|
||||
((InternalSearchHits) internalResponse.hits()).totalHits = Long.parseLong(this.scrollId.getAttributes().get("total_hits"));
|
||||
|
||||
|
||||
|
@ -143,7 +143,7 @@ public class TransportSuggestAction extends TransportBroadcastAction<SuggestRequ
|
||||
throw new IllegalArgumentException("suggest content missing");
|
||||
}
|
||||
final SuggestionSearchContext context = suggestPhase.parseElement().parseInternal(parser, indexService.mapperService(),
|
||||
indexService.queryParserService(), request.shardId().getIndex(), request.shardId().id());
|
||||
indexService.queryParserService(), request.shardId().getIndex(), request.shardId().id(), request);
|
||||
final Suggest result = suggestPhase.execute(context, searcher.searcher());
|
||||
return new ShardSuggestResponse(request.shardId(), result);
|
||||
}
|
||||
|
@ -246,7 +246,7 @@ public class UpdateHelper extends AbstractComponent {
|
||||
private Map<String, Object> executeScript(UpdateRequest request, Map<String, Object> ctx) {
|
||||
try {
|
||||
if (scriptService != null) {
|
||||
ExecutableScript script = scriptService.executable(request.script, ScriptContext.Standard.UPDATE);
|
||||
ExecutableScript script = scriptService.executable(request.script, ScriptContext.Standard.UPDATE, request);
|
||||
script.setNextVar("ctx", ctx);
|
||||
script.run();
|
||||
// we need to unwrap the ctx...
|
||||
|
@ -0,0 +1,112 @@
|
||||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.common;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectObjectAssociativeContainer;
|
||||
|
||||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||
|
||||
import java.util.Set;
|
||||
|
||||
public class DelegatingHasContextAndHeaders implements HasContextAndHeaders {
|
||||
|
||||
private HasContextAndHeaders delegate;
|
||||
|
||||
public DelegatingHasContextAndHeaders(HasContextAndHeaders delegate) {
|
||||
this.delegate = delegate;
|
||||
}
|
||||
|
||||
@Override
|
||||
public <V> void putHeader(String key, V value) {
|
||||
delegate.putHeader(key, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void copyContextAndHeadersFrom(HasContextAndHeaders other) {
|
||||
delegate.copyContextAndHeadersFrom(other);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <V> V getHeader(String key) {
|
||||
return delegate.getHeader(key);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasHeader(String key) {
|
||||
return delegate.hasHeader(key);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <V> V putInContext(Object key, Object value) {
|
||||
return delegate.putInContext(key, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<String> getHeaders() {
|
||||
return delegate.getHeaders();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void copyHeadersFrom(HasHeaders from) {
|
||||
delegate.copyHeadersFrom(from);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void putAllInContext(ObjectObjectAssociativeContainer<Object, Object> map) {
|
||||
delegate.putAllInContext(map);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <V> V getFromContext(Object key) {
|
||||
return delegate.getFromContext(key);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <V> V getFromContext(Object key, V defaultValue) {
|
||||
return delegate.getFromContext(key, defaultValue);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasInContext(Object key) {
|
||||
return delegate.hasInContext(key);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int contextSize() {
|
||||
return delegate.contextSize();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isContextEmpty() {
|
||||
return delegate.isContextEmpty();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ImmutableOpenMap<Object, Object> getContext() {
|
||||
return delegate.getContext();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void copyContextFrom(HasContext other) {
|
||||
delegate.copyContextFrom(other);
|
||||
}
|
||||
|
||||
|
||||
}
|
@ -23,6 +23,7 @@ import com.google.common.base.Function;
|
||||
import com.google.common.base.Preconditions;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
import com.google.common.collect.Maps;
|
||||
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.search.DocIdSet;
|
||||
import org.apache.lucene.search.DocIdSetIterator;
|
||||
@ -199,7 +200,7 @@ public class DocumentMapper implements ToXContent {
|
||||
List<FieldMapper> newFieldMappers = new ArrayList<>();
|
||||
for (MetadataFieldMapper metadataMapper : this.mapping.metadataMappers) {
|
||||
if (metadataMapper instanceof FieldMapper) {
|
||||
newFieldMappers.add((FieldMapper) metadataMapper);
|
||||
newFieldMappers.add(metadataMapper);
|
||||
}
|
||||
}
|
||||
MapperUtils.collect(this.mapping.root, newObjectMappers, newFieldMappers);
|
||||
@ -452,7 +453,7 @@ public class DocumentMapper implements ToXContent {
|
||||
public Map<String, Object> transformSourceAsMap(Map<String, Object> sourceAsMap) {
|
||||
try {
|
||||
// We use the ctx variable and the _source name to be consistent with the update api.
|
||||
ExecutableScript executable = scriptService.executable(script, ScriptContext.Standard.MAPPING);
|
||||
ExecutableScript executable = scriptService.executable(script, ScriptContext.Standard.MAPPING, null);
|
||||
Map<String, Object> ctx = new HashMap<>(1);
|
||||
ctx.put("_source", sourceAsMap);
|
||||
executable.setNextVar("ctx", ctx);
|
||||
|
@ -29,6 +29,7 @@ import org.elasticsearch.script.ExecutableScript;
|
||||
import org.elasticsearch.script.ScriptContext;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.script.Template;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
@ -68,7 +69,7 @@ public class TemplateQueryParser implements QueryParser {
|
||||
* Parses the template query replacing template parameters with provided
|
||||
* values. Handles both submitting the template as part of the request as
|
||||
* well as referencing only the template name.
|
||||
*
|
||||
*
|
||||
* @param parseContext
|
||||
* parse context containing the templated query.
|
||||
*/
|
||||
@ -77,7 +78,7 @@ public class TemplateQueryParser implements QueryParser {
|
||||
public Query parse(QueryParseContext parseContext) throws IOException {
|
||||
XContentParser parser = parseContext.parser();
|
||||
Template template = parse(parser, parseContext.parseFieldMatcher());
|
||||
ExecutableScript executable = this.scriptService.executable(template, ScriptContext.Standard.SEARCH);
|
||||
ExecutableScript executable = this.scriptService.executable(template, ScriptContext.Standard.SEARCH, SearchContext.current());
|
||||
|
||||
BytesReference querySource = (BytesReference) executable.run();
|
||||
|
||||
|
@ -32,7 +32,10 @@ import org.apache.lucene.util.Counter;
|
||||
import org.elasticsearch.action.percolate.PercolateShardRequest;
|
||||
import org.elasticsearch.action.search.SearchType;
|
||||
import org.elasticsearch.cache.recycler.PageCacheRecycler;
|
||||
import org.elasticsearch.common.*;
|
||||
import org.elasticsearch.common.HasContext;
|
||||
import org.elasticsearch.common.HasContextAndHeaders;
|
||||
import org.elasticsearch.common.HasHeaders;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||
import org.elasticsearch.common.lease.Releasables;
|
||||
import org.elasticsearch.common.text.StringText;
|
||||
@ -75,7 +78,11 @@ import org.elasticsearch.search.rescore.RescoreSearchContext;
|
||||
import org.elasticsearch.search.scan.ScanContext;
|
||||
import org.elasticsearch.search.suggest.SuggestionSearchContext;
|
||||
|
||||
import java.util.*;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.ConcurrentMap;
|
||||
|
||||
/**
|
||||
@ -121,7 +128,7 @@ public class PercolateContext extends SearchContext {
|
||||
public PercolateContext(PercolateShardRequest request, SearchShardTarget searchShardTarget, IndexShard indexShard,
|
||||
IndexService indexService, PageCacheRecycler pageCacheRecycler,
|
||||
BigArrays bigArrays, ScriptService scriptService, Query aliasFilter, ParseFieldMatcher parseFieldMatcher) {
|
||||
super(parseFieldMatcher);
|
||||
super(parseFieldMatcher, request);
|
||||
this.indexShard = indexShard;
|
||||
this.indexService = indexService;
|
||||
this.fieldDataService = indexService.fieldData();
|
||||
|
@ -19,6 +19,7 @@
|
||||
package org.elasticsearch.percolator;
|
||||
|
||||
import com.carrotsearch.hppc.IntObjectHashMap;
|
||||
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.index.ReaderUtil;
|
||||
import org.apache.lucene.index.memory.ExtendedMemoryIndex;
|
||||
@ -40,6 +41,7 @@ import org.elasticsearch.cache.recycler.PageCacheRecycler;
|
||||
import org.elasticsearch.cluster.ClusterService;
|
||||
import org.elasticsearch.cluster.action.index.MappingUpdatedAction;
|
||||
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
|
||||
import org.elasticsearch.common.HasContextAndHeaders;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
@ -63,9 +65,11 @@ import org.elasticsearch.index.IndexService;
|
||||
import org.elasticsearch.index.engine.Engine;
|
||||
import org.elasticsearch.index.fielddata.IndexFieldData;
|
||||
import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
|
||||
import org.elasticsearch.index.mapper.*;
|
||||
import org.elasticsearch.index.mapper.DocumentMapperForType;
|
||||
import org.elasticsearch.index.mapper.MappedFieldType;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.mapper.Mapping;
|
||||
import org.elasticsearch.index.mapper.ParsedDocument;
|
||||
import org.elasticsearch.index.mapper.Uid;
|
||||
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
|
||||
@ -73,7 +77,10 @@ import org.elasticsearch.index.percolator.stats.ShardPercolateService;
|
||||
import org.elasticsearch.index.query.ParsedQuery;
|
||||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.indices.IndicesService;
|
||||
import org.elasticsearch.percolator.QueryCollector.*;
|
||||
import org.elasticsearch.percolator.QueryCollector.Count;
|
||||
import org.elasticsearch.percolator.QueryCollector.Match;
|
||||
import org.elasticsearch.percolator.QueryCollector.MatchAndScore;
|
||||
import org.elasticsearch.percolator.QueryCollector.MatchAndSort;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.search.SearchParseElement;
|
||||
import org.elasticsearch.search.SearchShardTarget;
|
||||
@ -95,7 +102,9 @@ import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.common.util.CollectionUtils.eagerTransform;
|
||||
import static org.elasticsearch.index.mapper.SourceToParse.source;
|
||||
import static org.elasticsearch.percolator.QueryCollector.*;
|
||||
import static org.elasticsearch.percolator.QueryCollector.count;
|
||||
import static org.elasticsearch.percolator.QueryCollector.match;
|
||||
import static org.elasticsearch.percolator.QueryCollector.matchAndScore;
|
||||
|
||||
public class PercolatorService extends AbstractComponent {
|
||||
|
||||
@ -162,9 +171,9 @@ public class PercolatorService extends AbstractComponent {
|
||||
}
|
||||
|
||||
|
||||
public ReduceResult reduce(byte percolatorTypeId, List<PercolateShardResponse> shardResults) {
|
||||
public ReduceResult reduce(byte percolatorTypeId, List<PercolateShardResponse> shardResults, HasContextAndHeaders headersContext) {
|
||||
PercolatorType percolatorType = percolatorTypes.get(percolatorTypeId);
|
||||
return percolatorType.reduce(shardResults);
|
||||
return percolatorType.reduce(shardResults, headersContext);
|
||||
}
|
||||
|
||||
public PercolateShardResponse percolate(PercolateShardRequest request) {
|
||||
@ -423,7 +432,7 @@ public class PercolatorService extends AbstractComponent {
|
||||
// 0x00 is reserved for empty type.
|
||||
byte id();
|
||||
|
||||
ReduceResult reduce(List<PercolateShardResponse> shardResults);
|
||||
ReduceResult reduce(List<PercolateShardResponse> shardResults, HasContextAndHeaders headersContext);
|
||||
|
||||
PercolateShardResponse doPercolate(PercolateShardRequest request, PercolateContext context, boolean isNested);
|
||||
|
||||
@ -437,14 +446,14 @@ public class PercolatorService extends AbstractComponent {
|
||||
}
|
||||
|
||||
@Override
|
||||
public ReduceResult reduce(List<PercolateShardResponse> shardResults) {
|
||||
public ReduceResult reduce(List<PercolateShardResponse> shardResults, HasContextAndHeaders headersContext) {
|
||||
long finalCount = 0;
|
||||
for (PercolateShardResponse shardResponse : shardResults) {
|
||||
finalCount += shardResponse.count();
|
||||
}
|
||||
|
||||
assert !shardResults.isEmpty();
|
||||
InternalAggregations reducedAggregations = reduceAggregations(shardResults);
|
||||
InternalAggregations reducedAggregations = reduceAggregations(shardResults, headersContext);
|
||||
return new ReduceResult(finalCount, reducedAggregations);
|
||||
}
|
||||
|
||||
@ -481,8 +490,8 @@ public class PercolatorService extends AbstractComponent {
|
||||
}
|
||||
|
||||
@Override
|
||||
public ReduceResult reduce(List<PercolateShardResponse> shardResults) {
|
||||
return countPercolator.reduce(shardResults);
|
||||
public ReduceResult reduce(List<PercolateShardResponse> shardResults, HasContextAndHeaders headersContext) {
|
||||
return countPercolator.reduce(shardResults, headersContext);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -511,7 +520,7 @@ public class PercolatorService extends AbstractComponent {
|
||||
}
|
||||
|
||||
@Override
|
||||
public ReduceResult reduce(List<PercolateShardResponse> shardResults) {
|
||||
public ReduceResult reduce(List<PercolateShardResponse> shardResults, HasContextAndHeaders headersContext) {
|
||||
long foundMatches = 0;
|
||||
int numMatches = 0;
|
||||
for (PercolateShardResponse response : shardResults) {
|
||||
@ -537,7 +546,7 @@ public class PercolatorService extends AbstractComponent {
|
||||
}
|
||||
|
||||
assert !shardResults.isEmpty();
|
||||
InternalAggregations reducedAggregations = reduceAggregations(shardResults);
|
||||
InternalAggregations reducedAggregations = reduceAggregations(shardResults, headersContext);
|
||||
return new ReduceResult(foundMatches, finalMatches.toArray(new PercolateResponse.Match[finalMatches.size()]), reducedAggregations);
|
||||
}
|
||||
|
||||
@ -589,8 +598,8 @@ public class PercolatorService extends AbstractComponent {
|
||||
}
|
||||
|
||||
@Override
|
||||
public ReduceResult reduce(List<PercolateShardResponse> shardResults) {
|
||||
return matchPercolator.reduce(shardResults);
|
||||
public ReduceResult reduce(List<PercolateShardResponse> shardResults, HasContextAndHeaders headersContext) {
|
||||
return matchPercolator.reduce(shardResults, headersContext);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -622,8 +631,8 @@ public class PercolatorService extends AbstractComponent {
|
||||
}
|
||||
|
||||
@Override
|
||||
public ReduceResult reduce(List<PercolateShardResponse> shardResults) {
|
||||
return matchPercolator.reduce(shardResults);
|
||||
public ReduceResult reduce(List<PercolateShardResponse> shardResults, HasContextAndHeaders headersContext) {
|
||||
return matchPercolator.reduce(shardResults, headersContext);
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -656,7 +665,7 @@ public class PercolatorService extends AbstractComponent {
|
||||
}
|
||||
|
||||
@Override
|
||||
public ReduceResult reduce(List<PercolateShardResponse> shardResults) {
|
||||
public ReduceResult reduce(List<PercolateShardResponse> shardResults, HasContextAndHeaders headersContext) {
|
||||
long foundMatches = 0;
|
||||
int nonEmptyResponses = 0;
|
||||
int firstNonEmptyIndex = 0;
|
||||
@ -735,7 +744,7 @@ public class PercolatorService extends AbstractComponent {
|
||||
}
|
||||
|
||||
assert !shardResults.isEmpty();
|
||||
InternalAggregations reducedAggregations = reduceAggregations(shardResults);
|
||||
InternalAggregations reducedAggregations = reduceAggregations(shardResults, headersContext);
|
||||
return new ReduceResult(foundMatches, finalMatches.toArray(new PercolateResponse.Match[finalMatches.size()]), reducedAggregations);
|
||||
}
|
||||
|
||||
@ -843,7 +852,7 @@ public class PercolatorService extends AbstractComponent {
|
||||
}
|
||||
}
|
||||
|
||||
private InternalAggregations reduceAggregations(List<PercolateShardResponse> shardResults) {
|
||||
private InternalAggregations reduceAggregations(List<PercolateShardResponse> shardResults, HasContextAndHeaders headersContext) {
|
||||
if (shardResults.get(0).aggregations() == null) {
|
||||
return null;
|
||||
}
|
||||
@ -852,14 +861,15 @@ public class PercolatorService extends AbstractComponent {
|
||||
for (PercolateShardResponse shardResult : shardResults) {
|
||||
aggregationsList.add(shardResult.aggregations());
|
||||
}
|
||||
InternalAggregations aggregations = InternalAggregations.reduce(aggregationsList, new ReduceContext(bigArrays, scriptService));
|
||||
InternalAggregations aggregations = InternalAggregations.reduce(aggregationsList, new ReduceContext(bigArrays, scriptService,
|
||||
headersContext));
|
||||
if (aggregations != null) {
|
||||
List<SiblingPipelineAggregator> pipelineAggregators = shardResults.get(0).pipelineAggregators();
|
||||
if (pipelineAggregators != null) {
|
||||
List<InternalAggregation> newAggs = new ArrayList<>(eagerTransform(aggregations.asList(), PipelineAggregator.AGGREGATION_TRANFORM_FUNCTION));
|
||||
for (SiblingPipelineAggregator pipelineAggregator : pipelineAggregators) {
|
||||
InternalAggregation newAgg = pipelineAggregator.doReduce(new InternalAggregations(newAggs), new ReduceContext(bigArrays,
|
||||
scriptService));
|
||||
InternalAggregation newAgg = pipelineAggregator.doReduce(new InternalAggregations(newAggs), new ReduceContext(
|
||||
bigArrays, scriptService, headersContext));
|
||||
newAggs.add(newAgg);
|
||||
}
|
||||
aggregations = new InternalAggregations(newAggs);
|
||||
|
@ -25,6 +25,7 @@ import com.google.common.cache.CacheBuilder;
|
||||
import com.google.common.cache.RemovalListener;
|
||||
import com.google.common.cache.RemovalNotification;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.delete.DeleteRequest;
|
||||
@ -37,6 +38,7 @@ import org.elasticsearch.action.indexedscripts.delete.DeleteIndexedScriptRequest
|
||||
import org.elasticsearch.action.indexedscripts.get.GetIndexedScriptRequest;
|
||||
import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptRequest;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.HasContextAndHeaders;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.Strings;
|
||||
@ -114,21 +116,25 @@ public class ScriptService extends AbstractComponent implements Closeable {
|
||||
* @deprecated Use {@link org.elasticsearch.script.Script.ScriptField} instead. This should be removed in
|
||||
* 2.0
|
||||
*/
|
||||
@Deprecated
|
||||
public static final ParseField SCRIPT_LANG = new ParseField("lang","script_lang");
|
||||
/**
|
||||
* @deprecated Use {@link ScriptType#getParseField()} instead. This should
|
||||
* be removed in 2.0
|
||||
*/
|
||||
@Deprecated
|
||||
public static final ParseField SCRIPT_FILE = new ParseField("script_file");
|
||||
/**
|
||||
* @deprecated Use {@link ScriptType#getParseField()} instead. This should
|
||||
* be removed in 2.0
|
||||
*/
|
||||
@Deprecated
|
||||
public static final ParseField SCRIPT_ID = new ParseField("script_id");
|
||||
/**
|
||||
* @deprecated Use {@link ScriptType#getParseField()} instead. This should
|
||||
* be removed in 2.0
|
||||
*/
|
||||
@Deprecated
|
||||
public static final ParseField SCRIPT_INLINE = new ParseField("script");
|
||||
|
||||
@Inject
|
||||
@ -220,7 +226,7 @@ public class ScriptService extends AbstractComponent implements Closeable {
|
||||
/**
|
||||
* Checks if a script can be executed and compiles it if needed, or returns the previously compiled and cached script.
|
||||
*/
|
||||
public CompiledScript compile(Script script, ScriptContext scriptContext) {
|
||||
public CompiledScript compile(Script script, ScriptContext scriptContext, HasContextAndHeaders headersContext) {
|
||||
if (script == null) {
|
||||
throw new IllegalArgumentException("The parameter script (Script) must not be null.");
|
||||
}
|
||||
@ -248,14 +254,14 @@ public class ScriptService extends AbstractComponent implements Closeable {
|
||||
" operation [" + scriptContext.getKey() + "] and lang [" + lang + "] are not supported");
|
||||
}
|
||||
|
||||
return compileInternal(script);
|
||||
return compileInternal(script, headersContext);
|
||||
}
|
||||
|
||||
/**
|
||||
* Compiles a script straight-away, or returns the previously compiled and cached script,
|
||||
* without checking if it can be executed based on settings.
|
||||
*/
|
||||
public CompiledScript compileInternal(Script script) {
|
||||
public CompiledScript compileInternal(Script script, HasContextAndHeaders context) {
|
||||
if (script == null) {
|
||||
throw new IllegalArgumentException("The parameter script (Script) must not be null.");
|
||||
}
|
||||
@ -292,7 +298,7 @@ public class ScriptService extends AbstractComponent implements Closeable {
|
||||
//the script has been updated in the index since the last look up.
|
||||
final IndexedScript indexedScript = new IndexedScript(lang, name);
|
||||
name = indexedScript.id;
|
||||
code = getScriptFromIndex(indexedScript.lang, indexedScript.id);
|
||||
code = getScriptFromIndex(indexedScript.lang, indexedScript.id, context);
|
||||
}
|
||||
|
||||
String cacheKey = getCacheKey(scriptEngineService, type == ScriptType.INLINE ? null : name, code);
|
||||
@ -333,13 +339,13 @@ public class ScriptService extends AbstractComponent implements Closeable {
|
||||
return scriptLang;
|
||||
}
|
||||
|
||||
String getScriptFromIndex(String scriptLang, String id) {
|
||||
String getScriptFromIndex(String scriptLang, String id, HasContextAndHeaders context) {
|
||||
if (client == null) {
|
||||
throw new IllegalArgumentException("Got an indexed script with no Client registered.");
|
||||
}
|
||||
scriptLang = validateScriptLanguage(scriptLang);
|
||||
GetRequest getRequest = new GetRequest(SCRIPT_INDEX, scriptLang, id);
|
||||
getRequest.copyContextAndHeadersFrom(SearchContext.current());
|
||||
getRequest.copyContextAndHeadersFrom(context);
|
||||
GetResponse responseFields = client.get(getRequest).actionGet();
|
||||
if (responseFields.isExists()) {
|
||||
return getScriptFromResponse(responseFields);
|
||||
@ -432,8 +438,8 @@ public class ScriptService extends AbstractComponent implements Closeable {
|
||||
/**
|
||||
* Compiles (or retrieves from cache) and executes the provided script
|
||||
*/
|
||||
public ExecutableScript executable(Script script, ScriptContext scriptContext) {
|
||||
return executable(compile(script, scriptContext), script.getParams());
|
||||
public ExecutableScript executable(Script script, ScriptContext scriptContext, HasContextAndHeaders headersContext) {
|
||||
return executable(compile(script, scriptContext, headersContext), script.getParams());
|
||||
}
|
||||
|
||||
/**
|
||||
@ -447,7 +453,7 @@ public class ScriptService extends AbstractComponent implements Closeable {
|
||||
* Compiles (or retrieves from cache) and executes the provided search script
|
||||
*/
|
||||
public SearchScript search(SearchLookup lookup, Script script, ScriptContext scriptContext) {
|
||||
CompiledScript compiledScript = compile(script, scriptContext);
|
||||
CompiledScript compiledScript = compile(script, scriptContext, SearchContext.current());
|
||||
return getScriptEngineServiceForLang(compiledScript.lang()).search(compiledScript, lookup, script.getParams());
|
||||
}
|
||||
|
||||
|
@ -23,6 +23,7 @@ import com.carrotsearch.hppc.ObjectHashSet;
|
||||
import com.carrotsearch.hppc.ObjectSet;
|
||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||
import com.google.common.collect.ImmutableMap;
|
||||
|
||||
import org.apache.lucene.index.IndexOptions;
|
||||
import org.apache.lucene.index.LeafReaderContext;
|
||||
import org.apache.lucene.index.NumericDocValues;
|
||||
@ -82,10 +83,23 @@ import org.elasticsearch.script.Template;
|
||||
import org.elasticsearch.script.mustache.MustacheScriptEngineService;
|
||||
import org.elasticsearch.search.dfs.DfsPhase;
|
||||
import org.elasticsearch.search.dfs.DfsSearchResult;
|
||||
import org.elasticsearch.search.fetch.*;
|
||||
import org.elasticsearch.search.internal.*;
|
||||
import org.elasticsearch.search.fetch.FetchPhase;
|
||||
import org.elasticsearch.search.fetch.FetchSearchResult;
|
||||
import org.elasticsearch.search.fetch.QueryFetchSearchResult;
|
||||
import org.elasticsearch.search.fetch.ScrollQueryFetchSearchResult;
|
||||
import org.elasticsearch.search.fetch.ShardFetchRequest;
|
||||
import org.elasticsearch.search.internal.DefaultSearchContext;
|
||||
import org.elasticsearch.search.internal.InternalScrollSearchRequest;
|
||||
import org.elasticsearch.search.internal.ScrollContext;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.search.internal.SearchContext.Lifetime;
|
||||
import org.elasticsearch.search.query.*;
|
||||
import org.elasticsearch.search.internal.ShardSearchLocalRequest;
|
||||
import org.elasticsearch.search.internal.ShardSearchRequest;
|
||||
import org.elasticsearch.search.query.QueryPhase;
|
||||
import org.elasticsearch.search.query.QuerySearchRequest;
|
||||
import org.elasticsearch.search.query.QuerySearchResult;
|
||||
import org.elasticsearch.search.query.QuerySearchResultProvider;
|
||||
import org.elasticsearch.search.query.ScrollQuerySearchResult;
|
||||
import org.elasticsearch.search.warmer.IndexWarmersMetaData;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
||||
@ -736,7 +750,7 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
|
||||
|
||||
BytesReference processedQuery;
|
||||
if (request.template() != null) {
|
||||
ExecutableScript executable = this.scriptService.executable(request.template(), ScriptContext.Standard.SEARCH);
|
||||
ExecutableScript executable = this.scriptService.executable(request.template(), ScriptContext.Standard.SEARCH, searchContext);
|
||||
processedQuery = (BytesReference) executable.run();
|
||||
} else {
|
||||
if (!hasLength(request.templateSource())) {
|
||||
@ -753,7 +767,7 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
|
||||
//Try to double parse for nested template id/file
|
||||
parser = null;
|
||||
try {
|
||||
ExecutableScript executable = this.scriptService.executable(template, ScriptContext.Standard.SEARCH);
|
||||
ExecutableScript executable = this.scriptService.executable(template, ScriptContext.Standard.SEARCH, searchContext);
|
||||
processedQuery = (BytesReference) executable.run();
|
||||
parser = XContentFactory.xContent(processedQuery).createParser(processedQuery);
|
||||
} catch (ElasticsearchParseException epe) {
|
||||
@ -761,7 +775,7 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
|
||||
//for backwards compatibility and keep going
|
||||
template = new Template(template.getScript(), ScriptService.ScriptType.FILE, MustacheScriptEngineService.NAME,
|
||||
null, template.getParams());
|
||||
ExecutableScript executable = this.scriptService.executable(template, ScriptContext.Standard.SEARCH);
|
||||
ExecutableScript executable = this.scriptService.executable(template, ScriptContext.Standard.SEARCH, searchContext);
|
||||
processedQuery = (BytesReference) executable.run();
|
||||
}
|
||||
if (parser != null) {
|
||||
@ -771,7 +785,8 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
|
||||
//An inner template referring to a filename or id
|
||||
template = new Template(innerTemplate.getScript(), innerTemplate.getType(),
|
||||
MustacheScriptEngineService.NAME, null, template.getParams());
|
||||
ExecutableScript executable = this.scriptService.executable(template, ScriptContext.Standard.SEARCH);
|
||||
ExecutableScript executable = this.scriptService.executable(template, ScriptContext.Standard.SEARCH,
|
||||
searchContext);
|
||||
processedQuery = (BytesReference) executable.run();
|
||||
}
|
||||
} catch (ScriptParseException e) {
|
||||
@ -779,7 +794,7 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> {
|
||||
}
|
||||
}
|
||||
} else {
|
||||
ExecutableScript executable = this.scriptService.executable(template, ScriptContext.Standard.SEARCH);
|
||||
ExecutableScript executable = this.scriptService.executable(template, ScriptContext.Standard.SEARCH, searchContext);
|
||||
processedQuery = (BytesReference) executable.run();
|
||||
}
|
||||
} catch (IOException e) {
|
||||
|
@ -18,6 +18,8 @@
|
||||
*/
|
||||
package org.elasticsearch.search.aggregations;
|
||||
|
||||
import org.elasticsearch.common.DelegatingHasContextAndHeaders;
|
||||
import org.elasticsearch.common.HasContextAndHeaders;
|
||||
import org.elasticsearch.common.bytes.BytesArray;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
@ -90,12 +92,13 @@ public abstract class InternalAggregation implements Aggregation, ToXContent, St
|
||||
}
|
||||
}
|
||||
|
||||
public static class ReduceContext {
|
||||
public static class ReduceContext extends DelegatingHasContextAndHeaders {
|
||||
|
||||
private final BigArrays bigArrays;
|
||||
private ScriptService scriptService;
|
||||
|
||||
public ReduceContext(BigArrays bigArrays, ScriptService scriptService) {
|
||||
public ReduceContext(BigArrays bigArrays, ScriptService scriptService, HasContextAndHeaders headersContext) {
|
||||
super(headersContext);
|
||||
this.bigArrays = bigArrays;
|
||||
this.scriptService = scriptService;
|
||||
}
|
||||
|
@ -60,11 +60,11 @@ public class SignificantTermsParametersParser extends AbstractTermsParametersPar
|
||||
|
||||
@Override
|
||||
public void parseSpecial(String aggregationName, XContentParser parser, SearchContext context, XContentParser.Token token, String currentFieldName) throws IOException {
|
||||
|
||||
|
||||
if (token == XContentParser.Token.START_OBJECT) {
|
||||
SignificanceHeuristicParser significanceHeuristicParser = significanceHeuristicParserMapper.get(currentFieldName);
|
||||
if (significanceHeuristicParser != null) {
|
||||
significanceHeuristic = significanceHeuristicParser.parse(parser, context.parseFieldMatcher());
|
||||
significanceHeuristic = significanceHeuristicParser.parse(parser, context.parseFieldMatcher(), context);
|
||||
} else if (context.parseFieldMatcher().match(currentFieldName, BACKGROUND_FILTER)) {
|
||||
filter = context.queryParserService().parseInnerFilter(parser).query();
|
||||
} else {
|
||||
|
@ -29,6 +29,7 @@ import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.QueryParsingException;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
@ -115,7 +116,8 @@ public class GND extends NXYSignificanceHeuristic {
|
||||
}
|
||||
|
||||
@Override
|
||||
public SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher) throws IOException, QueryParsingException {
|
||||
public SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher, SearchContext context)
|
||||
throws IOException, QueryParsingException {
|
||||
String givenName = parser.currentName();
|
||||
boolean backgroundIsSuperset = true;
|
||||
XContentParser.Token token = parser.nextToken();
|
||||
|
@ -28,6 +28,7 @@ import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.QueryParsingException;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
@ -108,7 +109,8 @@ public class JLHScore extends SignificanceHeuristic {
|
||||
public static class JLHScoreParser implements SignificanceHeuristicParser {
|
||||
|
||||
@Override
|
||||
public SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher) throws IOException, QueryParsingException {
|
||||
public SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher, SearchContext context)
|
||||
throws IOException, QueryParsingException {
|
||||
// move to the closing bracket
|
||||
if (!parser.nextToken().equals(XContentParser.Token.END_OBJECT)) {
|
||||
throw new ElasticsearchParseException("failed to parse [jhl] significance heuristic. expected an empty object, but found [{}] instead", parser.currentToken());
|
||||
|
@ -28,6 +28,7 @@ import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.QueryParsingException;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
@ -138,7 +139,8 @@ public abstract class NXYSignificanceHeuristic extends SignificanceHeuristic {
|
||||
public static abstract class NXYParser implements SignificanceHeuristicParser {
|
||||
|
||||
@Override
|
||||
public SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher) throws IOException, QueryParsingException {
|
||||
public SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher, SearchContext context)
|
||||
throws IOException, QueryParsingException {
|
||||
String givenName = parser.currentName();
|
||||
boolean includeNegatives = false;
|
||||
boolean backgroundIsSuperset = true;
|
||||
|
@ -28,6 +28,7 @@ import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.QueryParsingException;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
@ -57,7 +58,7 @@ public class PercentageScore extends SignificanceHeuristic {
|
||||
|
||||
/**
|
||||
* Indicates the significance of a term in a sample by determining what percentage
|
||||
* of all occurrences of a term are found in the sample.
|
||||
* of all occurrences of a term are found in the sample.
|
||||
*/
|
||||
@Override
|
||||
public double getScore(long subsetFreq, long subsetSize, long supersetFreq, long supersetSize) {
|
||||
@ -65,7 +66,7 @@ public class PercentageScore extends SignificanceHeuristic {
|
||||
if (supersetFreq == 0) {
|
||||
// avoid a divide by zero issue
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
return (double) subsetFreq / (double) supersetFreq;
|
||||
}
|
||||
|
||||
@ -77,7 +78,8 @@ public class PercentageScore extends SignificanceHeuristic {
|
||||
public static class PercentageScoreParser implements SignificanceHeuristicParser {
|
||||
|
||||
@Override
|
||||
public SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher) throws IOException, QueryParsingException {
|
||||
public SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher, SearchContext context)
|
||||
throws IOException, QueryParsingException {
|
||||
// move to the closing bracket
|
||||
if (!parser.nextToken().equals(XContentParser.Token.END_OBJECT)) {
|
||||
throw new ElasticsearchParseException("failed to parse [percentage] significance heuristic. expected an empty object, but got [{}] instead", parser.currentToken());
|
||||
|
@ -24,17 +24,21 @@ package org.elasticsearch.search.aggregations.bucket.significant.heuristics;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.logging.ESLoggerFactory;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.QueryParsingException;
|
||||
import org.elasticsearch.script.*;
|
||||
import org.elasticsearch.script.ExecutableScript;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.Script.ScriptField;
|
||||
import org.elasticsearch.script.ScriptContext;
|
||||
import org.elasticsearch.script.ScriptParameterParser;
|
||||
import org.elasticsearch.script.ScriptParameterParser.ScriptParameterValue;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.search.aggregations.InternalAggregation;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
@ -81,8 +85,9 @@ public class ScriptHeuristic extends SignificanceHeuristic {
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public void initialize(InternalAggregation.ReduceContext context) {
|
||||
searchScript = context.scriptService().executable(script, ScriptContext.Standard.AGGS);
|
||||
searchScript = context.scriptService().executable(script, ScriptContext.Standard.AGGS, context);
|
||||
searchScript.setNextVar("_subset_freq", subsetDfHolder);
|
||||
searchScript.setNextVar("_subset_size", subsetSizeHolder);
|
||||
searchScript.setNextVar("_superset_freq", supersetDfHolder);
|
||||
@ -129,7 +134,8 @@ public class ScriptHeuristic extends SignificanceHeuristic {
|
||||
}
|
||||
|
||||
@Override
|
||||
public SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher) throws IOException, QueryParsingException {
|
||||
public SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher, SearchContext context)
|
||||
throws IOException, QueryParsingException {
|
||||
String heuristicName = parser.currentName();
|
||||
Script script = null;
|
||||
XContentParser.Token token;
|
||||
@ -169,7 +175,7 @@ public class ScriptHeuristic extends SignificanceHeuristic {
|
||||
}
|
||||
ExecutableScript searchScript;
|
||||
try {
|
||||
searchScript = scriptService.executable(script, ScriptContext.Standard.AGGS);
|
||||
searchScript = scriptService.executable(script, ScriptContext.Standard.AGGS, context);
|
||||
} catch (Exception e) {
|
||||
throw new ElasticsearchParseException("failed to parse [{}] significance heuristic. the script [{}] could not be loaded", e, script, heuristicName);
|
||||
}
|
||||
@ -204,21 +210,23 @@ public class ScriptHeuristic extends SignificanceHeuristic {
|
||||
|
||||
public final class LongAccessor extends Number {
|
||||
public long value;
|
||||
@Override
|
||||
public int intValue() {
|
||||
return (int)value;
|
||||
}
|
||||
@Override
|
||||
public long longValue() {
|
||||
return value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public float floatValue() {
|
||||
return (float)value;
|
||||
return value;
|
||||
}
|
||||
|
||||
@Override
|
||||
public double doubleValue() {
|
||||
return (double)value;
|
||||
return value;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -23,12 +23,14 @@ package org.elasticsearch.search.aggregations.bucket.significant.heuristics;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.query.QueryParsingException;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public interface SignificanceHeuristicParser {
|
||||
|
||||
SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher) throws IOException, QueryParsingException;
|
||||
SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher, SearchContext context) throws IOException,
|
||||
QueryParsingException;
|
||||
|
||||
String[] getNames();
|
||||
}
|
||||
|
@ -91,7 +91,7 @@ public class InternalScriptedMetric extends InternalMetricsAggregation implement
|
||||
vars.putAll(firstAggregation.reduceScript.getParams());
|
||||
}
|
||||
CompiledScript compiledScript = reduceContext.scriptService().compile(firstAggregation.reduceScript,
|
||||
ScriptContext.Standard.AGGS);
|
||||
ScriptContext.Standard.AGGS, reduceContext);
|
||||
ExecutableScript script = reduceContext.scriptService().executable(compiledScript, vars);
|
||||
aggregation = script.run();
|
||||
} else {
|
||||
|
@ -58,11 +58,11 @@ public class ScriptedMetricAggregator extends MetricsAggregator {
|
||||
this.params = params;
|
||||
ScriptService scriptService = context.searchContext().scriptService();
|
||||
if (initScript != null) {
|
||||
scriptService.executable(initScript, ScriptContext.Standard.AGGS).run();
|
||||
scriptService.executable(initScript, ScriptContext.Standard.AGGS, context.searchContext()).run();
|
||||
}
|
||||
this.mapScript = scriptService.search(context.searchContext().lookup(), mapScript, ScriptContext.Standard.AGGS);
|
||||
if (combineScript != null) {
|
||||
this.combineScript = scriptService.executable(combineScript, ScriptContext.Standard.AGGS);
|
||||
this.combineScript = scriptService.executable(combineScript, ScriptContext.Standard.AGGS, context.searchContext());
|
||||
} else {
|
||||
this.combineScript = null;
|
||||
}
|
||||
@ -159,7 +159,7 @@ public class ScriptedMetricAggregator extends MetricsAggregator {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@SuppressWarnings({ "unchecked" })
|
||||
private static <T> T deepCopyParams(T original, SearchContext context) {
|
||||
T clone;
|
||||
|
@ -104,7 +104,7 @@ public class BucketScriptPipelineAggregator extends PipelineAggregator {
|
||||
InternalMultiBucketAggregation<InternalMultiBucketAggregation, InternalMultiBucketAggregation.InternalBucket> originalAgg = (InternalMultiBucketAggregation<InternalMultiBucketAggregation, InternalMultiBucketAggregation.InternalBucket>) aggregation;
|
||||
List<? extends Bucket> buckets = originalAgg.getBuckets();
|
||||
|
||||
CompiledScript compiledScript = reduceContext.scriptService().compile(script, ScriptContext.Standard.AGGS);
|
||||
CompiledScript compiledScript = reduceContext.scriptService().compile(script, ScriptContext.Standard.AGGS, reduceContext);
|
||||
List newBuckets = new ArrayList<>();
|
||||
for (Bucket bucket : buckets) {
|
||||
Map<String, Object> vars = new HashMap<>();
|
||||
|
@ -98,7 +98,7 @@ public class BucketSelectorPipelineAggregator extends PipelineAggregator {
|
||||
InternalMultiBucketAggregation<InternalMultiBucketAggregation, InternalMultiBucketAggregation.InternalBucket> originalAgg = (InternalMultiBucketAggregation<InternalMultiBucketAggregation, InternalMultiBucketAggregation.InternalBucket>) aggregation;
|
||||
List<? extends Bucket> buckets = originalAgg.getBuckets();
|
||||
|
||||
CompiledScript compiledScript = reduceContext.scriptService().compile(script, ScriptContext.Standard.AGGS);
|
||||
CompiledScript compiledScript = reduceContext.scriptService().compile(script, ScriptContext.Standard.AGGS, reduceContext);
|
||||
List newBuckets = new ArrayList<>();
|
||||
for (Bucket bucket : buckets) {
|
||||
Map<String, Object> vars = new HashMap<>();
|
||||
|
@ -31,6 +31,7 @@ import org.apache.lucene.search.TermStatistics;
|
||||
import org.apache.lucene.search.TopDocs;
|
||||
import org.apache.lucene.search.TopFieldDocs;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.common.HasContextAndHeaders;
|
||||
import org.elasticsearch.common.collect.HppcMaps;
|
||||
import org.elasticsearch.common.component.AbstractComponent;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
@ -296,7 +297,8 @@ public class SearchPhaseController extends AbstractComponent {
|
||||
}
|
||||
}
|
||||
|
||||
public InternalSearchResponse merge(ScoreDoc[] sortedDocs, AtomicArray<? extends QuerySearchResultProvider> queryResultsArr, AtomicArray<? extends FetchSearchResultProvider> fetchResultsArr) {
|
||||
public InternalSearchResponse merge(ScoreDoc[] sortedDocs, AtomicArray<? extends QuerySearchResultProvider> queryResultsArr,
|
||||
AtomicArray<? extends FetchSearchResultProvider> fetchResultsArr, HasContextAndHeaders headersContext) {
|
||||
|
||||
List<? extends AtomicArray.Entry<? extends QuerySearchResultProvider>> queryResults = queryResultsArr.asList();
|
||||
List<? extends AtomicArray.Entry<? extends FetchSearchResultProvider>> fetchResults = fetchResultsArr.asList();
|
||||
@ -404,7 +406,7 @@ public class SearchPhaseController extends AbstractComponent {
|
||||
for (AtomicArray.Entry<? extends QuerySearchResultProvider> entry : queryResults) {
|
||||
aggregationsList.add((InternalAggregations) entry.value.queryResult().aggregations());
|
||||
}
|
||||
aggregations = InternalAggregations.reduce(aggregationsList, new ReduceContext(bigArrays, scriptService));
|
||||
aggregations = InternalAggregations.reduce(aggregationsList, new ReduceContext(bigArrays, scriptService, headersContext));
|
||||
}
|
||||
}
|
||||
|
||||
@ -413,8 +415,8 @@ public class SearchPhaseController extends AbstractComponent {
|
||||
if (pipelineAggregators != null) {
|
||||
List<InternalAggregation> newAggs = new ArrayList<>(eagerTransform(aggregations.asList(), PipelineAggregator.AGGREGATION_TRANFORM_FUNCTION));
|
||||
for (SiblingPipelineAggregator pipelineAggregator : pipelineAggregators) {
|
||||
InternalAggregation newAgg = pipelineAggregator.doReduce(new InternalAggregations(newAggs), new ReduceContext(bigArrays,
|
||||
scriptService));
|
||||
InternalAggregation newAgg = pipelineAggregator.doReduce(new InternalAggregations(newAggs), new ReduceContext(
|
||||
bigArrays, scriptService, headersContext));
|
||||
newAggs.add(newAgg);
|
||||
}
|
||||
aggregations = new InternalAggregations(newAggs);
|
||||
|
@ -19,7 +19,6 @@
|
||||
|
||||
package org.elasticsearch.search.internal;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectObjectAssociativeContainer;
|
||||
import org.apache.lucene.search.BooleanClause.Occur;
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.Collector;
|
||||
@ -30,12 +29,8 @@ import org.apache.lucene.search.Sort;
|
||||
import org.apache.lucene.util.Counter;
|
||||
import org.elasticsearch.action.search.SearchType;
|
||||
import org.elasticsearch.cache.recycler.PageCacheRecycler;
|
||||
import org.elasticsearch.common.HasContext;
|
||||
import org.elasticsearch.common.HasContextAndHeaders;
|
||||
import org.elasticsearch.common.HasHeaders;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||
import org.elasticsearch.common.lease.Releasables;
|
||||
import org.elasticsearch.common.lucene.search.Queries;
|
||||
import org.elasticsearch.common.lucene.search.function.BoostScoreFunction;
|
||||
@ -78,7 +73,6 @@ import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
*
|
||||
@ -146,7 +140,7 @@ public class DefaultSearchContext extends SearchContext {
|
||||
BigArrays bigArrays, Counter timeEstimateCounter, ParseFieldMatcher parseFieldMatcher,
|
||||
TimeValue timeout
|
||||
) {
|
||||
super(parseFieldMatcher);
|
||||
super(parseFieldMatcher, request);
|
||||
this.id = id;
|
||||
this.request = request;
|
||||
this.searchType = request.searchType();
|
||||
@ -724,81 +718,6 @@ public class DefaultSearchContext extends SearchContext {
|
||||
return innerHitsContext;
|
||||
}
|
||||
|
||||
@Override
|
||||
public <V> V putInContext(Object key, Object value) {
|
||||
return request.putInContext(key, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void putAllInContext(ObjectObjectAssociativeContainer<Object, Object> map) {
|
||||
request.putAllInContext(map);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <V> V getFromContext(Object key) {
|
||||
return request.getFromContext(key);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <V> V getFromContext(Object key, V defaultValue) {
|
||||
return request.getFromContext(key, defaultValue);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasInContext(Object key) {
|
||||
return request.hasInContext(key);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int contextSize() {
|
||||
return request.contextSize();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isContextEmpty() {
|
||||
return request.isContextEmpty();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ImmutableOpenMap<Object, Object> getContext() {
|
||||
return request.getContext();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void copyContextFrom(HasContext other) {
|
||||
request.copyContextFrom(other);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <V> void putHeader(String key, V value) {
|
||||
request.putHeader(key, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <V> V getHeader(String key) {
|
||||
return request.getHeader(key);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasHeader(String key) {
|
||||
return request.hasHeader(key);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<String> getHeaders() {
|
||||
return request.getHeaders();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void copyHeadersFrom(HasHeaders from) {
|
||||
request.copyHeadersFrom(from);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void copyContextAndHeadersFrom(HasContextAndHeaders other) {
|
||||
request.copyContextAndHeadersFrom(other);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<Class<?>, Collector> queryCollectors() {
|
||||
return queryCollectors;
|
||||
|
@ -19,16 +19,13 @@
|
||||
|
||||
package org.elasticsearch.search.internal;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectObjectAssociativeContainer;
|
||||
|
||||
import org.apache.lucene.search.Collector;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.Sort;
|
||||
import org.apache.lucene.util.Counter;
|
||||
import org.elasticsearch.action.search.SearchType;
|
||||
import org.elasticsearch.cache.recycler.PageCacheRecycler;
|
||||
import org.elasticsearch.common.*;
|
||||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.util.BigArrays;
|
||||
import org.elasticsearch.index.analysis.AnalysisService;
|
||||
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
|
||||
@ -59,7 +56,6 @@ import org.elasticsearch.search.suggest.SuggestionSearchContext;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
public abstract class FilteredSearchContext extends SearchContext {
|
||||
|
||||
@ -67,7 +63,7 @@ public abstract class FilteredSearchContext extends SearchContext {
|
||||
|
||||
public FilteredSearchContext(SearchContext in) {
|
||||
//inner_hits in percolator ends up with null inner search context
|
||||
super(in == null ? ParseFieldMatcher.EMPTY : in.parseFieldMatcher());
|
||||
super(in == null ? ParseFieldMatcher.EMPTY : in.parseFieldMatcher(), in);
|
||||
this.in = in;
|
||||
}
|
||||
|
||||
@ -526,81 +522,6 @@ public abstract class FilteredSearchContext extends SearchContext {
|
||||
return in.timeEstimateCounter();
|
||||
}
|
||||
|
||||
@Override
|
||||
public <V> V putInContext(Object key, Object value) {
|
||||
return in.putInContext(key, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void putAllInContext(ObjectObjectAssociativeContainer<Object, Object> map) {
|
||||
in.putAllInContext(map);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <V> V getFromContext(Object key) {
|
||||
return in.getFromContext(key);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <V> V getFromContext(Object key, V defaultValue) {
|
||||
return in.getFromContext(key, defaultValue);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasInContext(Object key) {
|
||||
return in.hasInContext(key);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int contextSize() {
|
||||
return in.contextSize();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isContextEmpty() {
|
||||
return in.isContextEmpty();
|
||||
}
|
||||
|
||||
@Override
|
||||
public ImmutableOpenMap<Object, Object> getContext() {
|
||||
return in.getContext();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void copyContextFrom(HasContext other) {
|
||||
in.copyContextFrom(other);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <V> void putHeader(String key, V value) {
|
||||
in.putHeader(key, value);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <V> V getHeader(String key) {
|
||||
return in.getHeader(key);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasHeader(String key) {
|
||||
return in.hasHeader(key);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<String> getHeaders() {
|
||||
return in.getHeaders();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void copyHeadersFrom(HasHeaders from) {
|
||||
in.copyHeadersFrom(from);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void copyContextAndHeadersFrom(HasContextAndHeaders other) {
|
||||
in.copyContextAndHeadersFrom(other);
|
||||
}
|
||||
|
||||
@Override
|
||||
public <SubPhaseContext extends FetchSubPhaseContext> SubPhaseContext getFetchSubPhaseContext(FetchSubPhase.ContextFactory<SubPhaseContext> contextFactory) {
|
||||
return in.getFetchSubPhaseContext(contextFactory);
|
||||
|
@ -21,12 +21,14 @@ package org.elasticsearch.search.internal;
|
||||
import com.google.common.collect.Iterables;
|
||||
import com.google.common.collect.Multimap;
|
||||
import com.google.common.collect.MultimapBuilder;
|
||||
|
||||
import org.apache.lucene.search.Collector;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.Sort;
|
||||
import org.apache.lucene.util.Counter;
|
||||
import org.elasticsearch.action.search.SearchType;
|
||||
import org.elasticsearch.cache.recycler.PageCacheRecycler;
|
||||
import org.elasticsearch.common.DelegatingHasContextAndHeaders;
|
||||
import org.elasticsearch.common.HasContextAndHeaders;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
@ -67,7 +69,7 @@ import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
|
||||
public abstract class SearchContext implements Releasable, HasContextAndHeaders {
|
||||
public abstract class SearchContext extends DelegatingHasContextAndHeaders implements Releasable {
|
||||
|
||||
private static ThreadLocal<SearchContext> current = new ThreadLocal<>();
|
||||
public final static int DEFAULT_TERMINATE_AFTER = 0;
|
||||
@ -91,7 +93,8 @@ public abstract class SearchContext implements Releasable, HasContextAndHeaders
|
||||
|
||||
protected final ParseFieldMatcher parseFieldMatcher;
|
||||
|
||||
protected SearchContext(ParseFieldMatcher parseFieldMatcher) {
|
||||
protected SearchContext(ParseFieldMatcher parseFieldMatcher, HasContextAndHeaders contextHeaders) {
|
||||
super(contextHeaders);
|
||||
this.parseFieldMatcher = parseFieldMatcher;
|
||||
}
|
||||
|
||||
|
@ -18,13 +18,15 @@
|
||||
*/
|
||||
package org.elasticsearch.search.suggest;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.elasticsearch.common.HasContextAndHeaders;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
import org.elasticsearch.index.query.IndexQueryParserService;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public interface SuggestContextParser {
|
||||
public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService, IndexQueryParserService queryParserService) throws IOException;
|
||||
public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService,
|
||||
IndexQueryParserService queryParserService, HasContextAndHeaders headersContext) throws IOException;
|
||||
|
||||
}
|
@ -19,6 +19,7 @@
|
||||
package org.elasticsearch.search.suggest;
|
||||
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.HasContextAndHeaders;
|
||||
import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
@ -45,11 +46,13 @@ public final class SuggestParseElement implements SearchParseElement {
|
||||
|
||||
@Override
|
||||
public void parse(XContentParser parser, SearchContext context) throws Exception {
|
||||
SuggestionSearchContext suggestionSearchContext = parseInternal(parser, context.mapperService(), context.queryParserService(), context.shardTarget().index(), context.shardTarget().shardId());
|
||||
SuggestionSearchContext suggestionSearchContext = parseInternal(parser, context.mapperService(), context.queryParserService(),
|
||||
context.shardTarget().index(), context.shardTarget().shardId(), context);
|
||||
context.suggest(suggestionSearchContext);
|
||||
}
|
||||
|
||||
public SuggestionSearchContext parseInternal(XContentParser parser, MapperService mapperService, IndexQueryParserService queryParserService, String index, int shardId) throws IOException {
|
||||
public SuggestionSearchContext parseInternal(XContentParser parser, MapperService mapperService,
|
||||
IndexQueryParserService queryParserService, String index, int shardId, HasContextAndHeaders headersContext) throws IOException {
|
||||
SuggestionSearchContext suggestionSearchContext = new SuggestionSearchContext();
|
||||
|
||||
BytesRef globalText = null;
|
||||
@ -88,7 +91,7 @@ public final class SuggestParseElement implements SearchParseElement {
|
||||
throw new IllegalArgumentException("Suggester[" + fieldName + "] not supported");
|
||||
}
|
||||
final SuggestContextParser contextParser = suggesters.get(fieldName).getContextParser();
|
||||
suggestionContext = contextParser.parse(parser, mapperService, queryParserService);
|
||||
suggestionContext = contextParser.parse(parser, mapperService, queryParserService, headersContext);
|
||||
}
|
||||
}
|
||||
if (suggestionContext != null) {
|
||||
|
@ -18,6 +18,7 @@
|
||||
*/
|
||||
package org.elasticsearch.search.suggest.completion;
|
||||
|
||||
import org.elasticsearch.common.HasContextAndHeaders;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.bytes.BytesReference;
|
||||
import org.elasticsearch.common.unit.Fuzziness;
|
||||
@ -49,13 +50,14 @@ public class CompletionSuggestParser implements SuggestContextParser {
|
||||
}
|
||||
|
||||
@Override
|
||||
public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService, IndexQueryParserService queryParserService) throws IOException {
|
||||
public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService,
|
||||
IndexQueryParserService queryParserService, HasContextAndHeaders headersContext) throws IOException {
|
||||
XContentParser.Token token;
|
||||
String fieldName = null;
|
||||
CompletionSuggestionContext suggestion = new CompletionSuggestionContext(completionSuggester);
|
||||
|
||||
|
||||
XContentParser contextParser = null;
|
||||
|
||||
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
fieldName = parser.currentName();
|
||||
@ -90,7 +92,7 @@ public class CompletionSuggestParser implements SuggestContextParser {
|
||||
// Copy the current structure. We will parse, once the mapping is provided
|
||||
XContentBuilder builder = XContentFactory.contentBuilder(parser.contentType());
|
||||
builder.copyCurrentStructure(parser);
|
||||
BytesReference bytes = builder.bytes();
|
||||
BytesReference bytes = builder.bytes();
|
||||
contextParser = parser.contentType().xContent().createParser(bytes);
|
||||
} else {
|
||||
throw new IllegalArgumentException("suggester [completion] doesn't support field [" + fieldName + "]");
|
||||
@ -99,7 +101,7 @@ public class CompletionSuggestParser implements SuggestContextParser {
|
||||
throw new IllegalArgumentException("suggester[completion] doesn't support field [" + fieldName + "]");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
suggestion.fieldType((CompletionFieldMapper.CompletionFieldType) mapperService.smartNameFieldType(suggestion.getField()));
|
||||
|
||||
CompletionFieldMapper.CompletionFieldType fieldType = suggestion.fieldType();
|
||||
|
@ -22,6 +22,7 @@ import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.index.IndexReader;
|
||||
import org.apache.lucene.index.Terms;
|
||||
import org.apache.lucene.util.BytesRef;
|
||||
import org.elasticsearch.common.HasContextAndHeaders;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.common.xcontent.XContentParser.Token;
|
||||
@ -48,12 +49,13 @@ public final class PhraseSuggestParser implements SuggestContextParser {
|
||||
}
|
||||
|
||||
@Override
|
||||
public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService, IndexQueryParserService queryParserService) throws IOException {
|
||||
public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService,
|
||||
IndexQueryParserService queryParserService, HasContextAndHeaders headersContext) throws IOException {
|
||||
PhraseSuggestionContext suggestion = new PhraseSuggestionContext(suggester);
|
||||
suggestion.setQueryParserService(queryParserService);
|
||||
XContentParser.Token token;
|
||||
String fieldName = null;
|
||||
boolean gramSizeSet = false;
|
||||
boolean gramSizeSet = false;
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
fieldName = parser.currentName();
|
||||
@ -140,7 +142,8 @@ public final class PhraseSuggestParser implements SuggestContextParser {
|
||||
throw new IllegalArgumentException("suggester[phrase][collate] query already set, doesn't support additional [" + fieldName + "]");
|
||||
}
|
||||
Template template = Template.parse(parser, queryParserService.parseFieldMatcher());
|
||||
CompiledScript compiledScript = suggester.scriptService().compile(template, ScriptContext.Standard.SEARCH);
|
||||
CompiledScript compiledScript = suggester.scriptService().compile(template, ScriptContext.Standard.SEARCH,
|
||||
headersContext);
|
||||
suggestion.setCollateQueryScript(compiledScript);
|
||||
} else if ("params".equals(fieldName)) {
|
||||
suggestion.setCollateScriptParams(parser.map());
|
||||
@ -162,7 +165,7 @@ public final class PhraseSuggestParser implements SuggestContextParser {
|
||||
throw new IllegalArgumentException("suggester[phrase] doesn't support field [" + fieldName + "]");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (suggestion.getField() == null) {
|
||||
throw new IllegalArgumentException("The required field option is missing");
|
||||
}
|
||||
@ -178,11 +181,11 @@ public final class PhraseSuggestParser implements SuggestContextParser {
|
||||
suggestion.setAnalyzer(fieldType.searchAnalyzer());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (suggestion.model() == null) {
|
||||
suggestion.setModel(StupidBackoffScorer.FACTORY);
|
||||
}
|
||||
|
||||
|
||||
if (!gramSizeSet || suggestion.generators().isEmpty()) {
|
||||
final ShingleTokenFilterFactory.Factory shingleFilterFactory = SuggestUtils.getShingleFilterFactory(suggestion.getAnalyzer());
|
||||
if (!gramSizeSet) {
|
||||
@ -204,9 +207,9 @@ public final class PhraseSuggestParser implements SuggestContextParser {
|
||||
suggestion.addGenerator(generator);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
return suggestion;
|
||||
}
|
||||
|
||||
|
@ -20,11 +20,16 @@ package org.elasticsearch.search.suggest.phrase;
|
||||
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.script.Template;
|
||||
import org.elasticsearch.search.suggest.SuggestBuilder.SuggestionBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.*;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* Defines the actual suggest command for phrase suggestions ( <tt>phrase</tt>).
|
||||
@ -41,7 +46,7 @@ public final class PhraseSuggestionBuilder extends SuggestionBuilder<PhraseSugge
|
||||
private Integer tokenLimit;
|
||||
private String preTag;
|
||||
private String postTag;
|
||||
private String collateQuery;
|
||||
private Template collateQuery;
|
||||
private Map<String, Object> collateParams;
|
||||
private Boolean collatePrune;
|
||||
|
||||
@ -67,7 +72,7 @@ public final class PhraseSuggestionBuilder extends SuggestionBuilder<PhraseSugge
|
||||
* misspellings in order to form a correction. This method accepts a float
|
||||
* value in the range [0..1) as a fraction of the actual query terms a
|
||||
* number <tt>>=1</tt> as an absolut number of query terms.
|
||||
*
|
||||
*
|
||||
* The default is set to <tt>1.0</tt> which corresponds to that only
|
||||
* corrections with at most 1 missspelled term are returned.
|
||||
*/
|
||||
@ -131,13 +136,13 @@ public final class PhraseSuggestionBuilder extends SuggestionBuilder<PhraseSugge
|
||||
this.generators.clear();
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* If set to <code>true</code> the phrase suggester will fail if the analyzer only
|
||||
* produces ngrams. the default it <code>true</code>.
|
||||
*/
|
||||
public PhraseSuggestionBuilder forceUnigrams(boolean forceUnigrams) {
|
||||
this.forceUnigrams = forceUnigrams;
|
||||
this.forceUnigrams = forceUnigrams;
|
||||
return this;
|
||||
}
|
||||
|
||||
@ -149,7 +154,7 @@ public final class PhraseSuggestionBuilder extends SuggestionBuilder<PhraseSugge
|
||||
this.model = model;
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
public PhraseSuggestionBuilder tokenLimit(int tokenLimit) {
|
||||
this.tokenLimit = tokenLimit;
|
||||
return this;
|
||||
@ -172,7 +177,15 @@ public final class PhraseSuggestionBuilder extends SuggestionBuilder<PhraseSugge
|
||||
* Sets a query used for filtering out suggested phrases (collation).
|
||||
*/
|
||||
public PhraseSuggestionBuilder collateQuery(String collateQuery) {
|
||||
this.collateQuery = collateQuery;
|
||||
this.collateQuery = new Template(collateQuery);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets a query used for filtering out suggested phrases (collation).
|
||||
*/
|
||||
public PhraseSuggestionBuilder collateQuery(Template collateQueryTemplate) {
|
||||
this.collateQuery = collateQueryTemplate;
|
||||
return this;
|
||||
}
|
||||
|
||||
@ -252,7 +265,7 @@ public final class PhraseSuggestionBuilder extends SuggestionBuilder<PhraseSugge
|
||||
|
||||
/**
|
||||
* Creates a new {@link DirectCandidateGenerator}
|
||||
*
|
||||
*
|
||||
* @param field
|
||||
* the field this candidate generator operates on.
|
||||
*/
|
||||
@ -275,7 +288,7 @@ public final class PhraseSuggestionBuilder extends SuggestionBuilder<PhraseSugge
|
||||
|
||||
/**
|
||||
* Creates a Stupid-Backoff smoothing model.
|
||||
*
|
||||
*
|
||||
* @param discount
|
||||
* the discount given to lower order ngrams if the higher order ngram doesn't exits
|
||||
*/
|
||||
@ -293,7 +306,7 @@ public final class PhraseSuggestionBuilder extends SuggestionBuilder<PhraseSugge
|
||||
|
||||
/**
|
||||
* An <a href="http://en.wikipedia.org/wiki/Additive_smoothing">additive
|
||||
* smoothing</a> model.
|
||||
* smoothing</a> model.
|
||||
* <p>
|
||||
* See <a
|
||||
* href="http://en.wikipedia.org/wiki/N-gram#Smoothing_techniques">N-Gram
|
||||
@ -304,7 +317,7 @@ public final class PhraseSuggestionBuilder extends SuggestionBuilder<PhraseSugge
|
||||
private final double alpha;
|
||||
/**
|
||||
* Creates a Laplace smoothing model.
|
||||
*
|
||||
*
|
||||
* @param discount
|
||||
* the discount given to lower order ngrams if the higher order ngram doesn't exits
|
||||
*/
|
||||
@ -319,8 +332,8 @@ public final class PhraseSuggestionBuilder extends SuggestionBuilder<PhraseSugge
|
||||
return builder;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
public static abstract class SmoothingModel implements ToXContent {
|
||||
private final String type;
|
||||
|
||||
@ -335,7 +348,7 @@ public final class PhraseSuggestionBuilder extends SuggestionBuilder<PhraseSugge
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
|
||||
protected abstract XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException;
|
||||
}
|
||||
|
||||
@ -354,9 +367,9 @@ public final class PhraseSuggestionBuilder extends SuggestionBuilder<PhraseSugge
|
||||
|
||||
/**
|
||||
* Creates a linear interpolation smoothing model.
|
||||
*
|
||||
*
|
||||
* Note: the lambdas must sum up to one.
|
||||
*
|
||||
*
|
||||
* @param trigramLambda
|
||||
* the trigram lambda
|
||||
* @param bigramLambda
|
||||
@ -381,7 +394,7 @@ public final class PhraseSuggestionBuilder extends SuggestionBuilder<PhraseSugge
|
||||
}
|
||||
|
||||
/**
|
||||
* {@link CandidateGenerator} base class.
|
||||
* {@link CandidateGenerator} base class.
|
||||
*/
|
||||
public static abstract class CandidateGenerator implements ToXContent {
|
||||
private final String type;
|
||||
@ -397,7 +410,7 @@ public final class PhraseSuggestionBuilder extends SuggestionBuilder<PhraseSugge
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
*
|
||||
*
|
||||
*/
|
||||
public static final class DirectCandidateGenerator extends CandidateGenerator {
|
||||
@ -595,7 +608,7 @@ public final class PhraseSuggestionBuilder extends SuggestionBuilder<PhraseSugge
|
||||
this.postFilter = postFilter;
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
|
@ -18,6 +18,7 @@
|
||||
*/
|
||||
package org.elasticsearch.search.suggest.term;
|
||||
|
||||
import org.elasticsearch.common.HasContextAndHeaders;
|
||||
import org.elasticsearch.common.ParseFieldMatcher;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
@ -38,7 +39,8 @@ public final class TermSuggestParser implements SuggestContextParser {
|
||||
}
|
||||
|
||||
@Override
|
||||
public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService, IndexQueryParserService queryParserService) throws IOException {
|
||||
public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService,
|
||||
IndexQueryParserService queryParserService, HasContextAndHeaders headersContext) throws IOException {
|
||||
XContentParser.Token token;
|
||||
String fieldName = null;
|
||||
TermSuggestionContext suggestion = new TermSuggestionContext(suggester);
|
||||
|
@ -21,7 +21,6 @@ package org.elasticsearch.index.query;
|
||||
import org.apache.lucene.search.MatchAllDocsQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.ActionModule;
|
||||
import org.elasticsearch.cluster.ClusterService;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
import org.elasticsearch.common.inject.AbstractModule;
|
||||
|
@ -20,6 +20,7 @@
|
||||
package org.elasticsearch.script;
|
||||
|
||||
import com.google.common.collect.ImmutableSet;
|
||||
import org.elasticsearch.common.ContextAndHeaderHolder;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.script.expression.ExpressionScriptEngineService;
|
||||
@ -54,12 +55,14 @@ public class CustomScriptContextIT extends ESIntegTestCase {
|
||||
|
||||
@Test
|
||||
public void testCustomScriptContextsSettings() {
|
||||
ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder();
|
||||
|
||||
ScriptService scriptService = internalCluster().getInstance(ScriptService.class);
|
||||
for (String lang : LANG_SET) {
|
||||
for (ScriptService.ScriptType scriptType : ScriptService.ScriptType.values()) {
|
||||
try {
|
||||
scriptService.compile(new Script("test", scriptType, lang, null), new ScriptContext.Plugin(PLUGIN_NAME,
|
||||
"custom_globally_disabled_op"));
|
||||
"custom_globally_disabled_op"), contextAndHeaders);
|
||||
fail("script compilation should have been rejected");
|
||||
} catch(ScriptException e) {
|
||||
assertThat(e.getMessage(), containsString("scripts of type [" + scriptType + "], operation [" + PLUGIN_NAME + "_custom_globally_disabled_op] and lang [" + lang + "] are disabled"));
|
||||
@ -69,34 +72,35 @@ public class CustomScriptContextIT extends ESIntegTestCase {
|
||||
|
||||
try {
|
||||
scriptService.compile(new Script("1", ScriptService.ScriptType.INLINE, "expression", null), new ScriptContext.Plugin(
|
||||
PLUGIN_NAME, "custom_exp_disabled_op"));
|
||||
PLUGIN_NAME, "custom_exp_disabled_op"), contextAndHeaders);
|
||||
fail("script compilation should have been rejected");
|
||||
} catch(ScriptException e) {
|
||||
assertThat(e.getMessage(), containsString("scripts of type [inline], operation [" + PLUGIN_NAME + "_custom_exp_disabled_op] and lang [expression] are disabled"));
|
||||
}
|
||||
|
||||
CompiledScript compiledScript = scriptService.compile(new Script("1", ScriptService.ScriptType.INLINE, "expression", null),
|
||||
randomFrom(new ScriptContext[] {ScriptContext.Standard.AGGS, ScriptContext.Standard.SEARCH}));
|
||||
randomFrom(new ScriptContext[] { ScriptContext.Standard.AGGS, ScriptContext.Standard.SEARCH }), contextAndHeaders);
|
||||
assertThat(compiledScript, notNullValue());
|
||||
|
||||
compiledScript = scriptService.compile(new Script("1", ScriptService.ScriptType.INLINE, "mustache", null),
|
||||
new ScriptContext.Plugin(PLUGIN_NAME, "custom_exp_disabled_op"));
|
||||
new ScriptContext.Plugin(PLUGIN_NAME, "custom_exp_disabled_op"), contextAndHeaders);
|
||||
assertThat(compiledScript, notNullValue());
|
||||
|
||||
for (String lang : LANG_SET) {
|
||||
compiledScript = scriptService.compile(new Script("1", ScriptService.ScriptType.INLINE, lang, null), new ScriptContext.Plugin(
|
||||
PLUGIN_NAME, "custom_op"));
|
||||
PLUGIN_NAME, "custom_op"), contextAndHeaders);
|
||||
assertThat(compiledScript, notNullValue());
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCompileNonRegisteredPluginContext() {
|
||||
ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder();
|
||||
ScriptService scriptService = internalCluster().getInstance(ScriptService.class);
|
||||
try {
|
||||
scriptService.compile(
|
||||
new Script("test", randomFrom(ScriptService.ScriptType.values()), randomFrom(LANG_SET.toArray(new String[LANG_SET
|
||||
.size()])), null), new ScriptContext.Plugin("test", "unknown"));
|
||||
.size()])), null), new ScriptContext.Plugin("test", "unknown"), contextAndHeaders);
|
||||
fail("script compilation should have been rejected");
|
||||
} catch(IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), containsString("script context [test_unknown] not supported"));
|
||||
@ -105,6 +109,7 @@ public class CustomScriptContextIT extends ESIntegTestCase {
|
||||
|
||||
@Test
|
||||
public void testCompileNonRegisteredScriptContext() {
|
||||
ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder();
|
||||
ScriptService scriptService = internalCluster().getInstance(ScriptService.class);
|
||||
try {
|
||||
scriptService.compile(
|
||||
@ -114,7 +119,7 @@ public class CustomScriptContextIT extends ESIntegTestCase {
|
||||
public String getKey() {
|
||||
return "test";
|
||||
}
|
||||
});
|
||||
}, contextAndHeaders);
|
||||
fail("script compilation should have been rejected");
|
||||
} catch(IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), containsString("script context [test] not supported"));
|
||||
@ -133,9 +138,9 @@ public class CustomScriptContextIT extends ESIntegTestCase {
|
||||
}
|
||||
|
||||
public void onModule(ScriptModule scriptModule) {
|
||||
scriptModule.registerScriptContext(new ScriptContext.Plugin(PLUGIN_NAME, "custom_op"));
|
||||
scriptModule.registerScriptContext(new ScriptContext.Plugin(PLUGIN_NAME, "custom_exp_disabled_op"));
|
||||
scriptModule.registerScriptContext(new ScriptContext.Plugin(PLUGIN_NAME, "custom_globally_disabled_op"));
|
||||
scriptModule.registerScriptContext(new ScriptContext.Plugin(PLUGIN_NAME, "custom_op"));
|
||||
scriptModule.registerScriptContext(new ScriptContext.Plugin(PLUGIN_NAME, "custom_exp_disabled_op"));
|
||||
scriptModule.registerScriptContext(new ScriptContext.Plugin(PLUGIN_NAME, "custom_globally_disabled_op"));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -20,6 +20,7 @@
|
||||
package org.elasticsearch.script;
|
||||
|
||||
import com.google.common.collect.ImmutableSet;
|
||||
import org.elasticsearch.common.ContextAndHeaderHolder;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.inject.Injector;
|
||||
import org.elasticsearch.common.inject.ModulesBuilder;
|
||||
@ -47,6 +48,7 @@ public class NativeScriptTests extends ESTestCase {
|
||||
|
||||
@Test
|
||||
public void testNativeScript() throws InterruptedException {
|
||||
ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder();
|
||||
Settings settings = Settings.settingsBuilder()
|
||||
.put("name", "testNativeScript")
|
||||
.put("path.home", createTempDir())
|
||||
@ -62,13 +64,14 @@ public class NativeScriptTests extends ESTestCase {
|
||||
ScriptService scriptService = injector.getInstance(ScriptService.class);
|
||||
|
||||
ExecutableScript executable = scriptService.executable(new Script("my", ScriptType.INLINE, NativeScriptEngineService.NAME, null),
|
||||
ScriptContext.Standard.SEARCH);
|
||||
ScriptContext.Standard.SEARCH, contextAndHeaders);
|
||||
assertThat(executable.run().toString(), equalTo("test"));
|
||||
terminate(injector.getInstance(ThreadPool.class));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFineGrainedSettingsDontAffectNativeScripts() throws IOException {
|
||||
ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder();
|
||||
Settings.Builder builder = Settings.settingsBuilder();
|
||||
if (randomBoolean()) {
|
||||
ScriptType scriptType = randomFrom(ScriptType.values());
|
||||
@ -87,8 +90,8 @@ public class NativeScriptTests extends ESTestCase {
|
||||
ScriptService scriptService = new ScriptService(settings, environment, scriptEngineServices, resourceWatcherService, scriptContextRegistry);
|
||||
|
||||
for (ScriptContext scriptContext : scriptContextRegistry.scriptContexts()) {
|
||||
assertThat(scriptService.compile(new Script("my", ScriptType.INLINE, NativeScriptEngineService.NAME, null), scriptContext),
|
||||
notNullValue());
|
||||
assertThat(scriptService.compile(new Script("my", ScriptType.INLINE, NativeScriptEngineService.NAME, null), scriptContext,
|
||||
contextAndHeaders), notNullValue());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -21,6 +21,8 @@ package org.elasticsearch.script;
|
||||
import com.google.common.collect.ImmutableSet;
|
||||
import com.google.common.collect.Maps;
|
||||
|
||||
import org.elasticsearch.common.ContextAndHeaderHolder;
|
||||
import org.elasticsearch.common.HasContextAndHeaders;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.io.Streams;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
@ -106,7 +108,7 @@ public class ScriptServiceTests extends ESTestCase {
|
||||
Environment environment = new Environment(finalSettings);
|
||||
scriptService = new ScriptService(finalSettings, environment, scriptEngineServices, resourceWatcherService, scriptContextRegistry) {
|
||||
@Override
|
||||
String getScriptFromIndex(String scriptLang, String id) {
|
||||
String getScriptFromIndex(String scriptLang, String id, HasContextAndHeaders headersContext) {
|
||||
//mock the script that gets retrieved from an index
|
||||
return "100";
|
||||
}
|
||||
@ -125,6 +127,8 @@ public class ScriptServiceTests extends ESTestCase {
|
||||
|
||||
@Test
|
||||
public void testScriptsWithoutExtensions() throws IOException {
|
||||
|
||||
ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder();
|
||||
buildScriptService(Settings.EMPTY);
|
||||
logger.info("--> setup two test files one with extension and another without");
|
||||
Path testFileNoExt = scriptsFilePath.resolve("test_no_ext");
|
||||
@ -135,7 +139,7 @@ public class ScriptServiceTests extends ESTestCase {
|
||||
|
||||
logger.info("--> verify that file with extension was correctly processed");
|
||||
CompiledScript compiledScript = scriptService.compile(new Script("test_script", ScriptType.FILE, "test", null),
|
||||
ScriptContext.Standard.SEARCH);
|
||||
ScriptContext.Standard.SEARCH, contextAndHeaders);
|
||||
assertThat(compiledScript.compiled(), equalTo((Object) "compiled_test_file"));
|
||||
|
||||
logger.info("--> delete both files");
|
||||
@ -145,7 +149,8 @@ public class ScriptServiceTests extends ESTestCase {
|
||||
|
||||
logger.info("--> verify that file with extension was correctly removed");
|
||||
try {
|
||||
scriptService.compile(new Script("test_script", ScriptType.FILE, "test", null), ScriptContext.Standard.SEARCH);
|
||||
scriptService.compile(new Script("test_script", ScriptType.FILE, "test", null), ScriptContext.Standard.SEARCH,
|
||||
contextAndHeaders);
|
||||
fail("the script test_script should no longer exist");
|
||||
} catch (IllegalArgumentException ex) {
|
||||
assertThat(ex.getMessage(), containsString("Unable to find on disk file script [test_script] using lang [test]"));
|
||||
@ -154,49 +159,56 @@ public class ScriptServiceTests extends ESTestCase {
|
||||
|
||||
@Test
|
||||
public void testScriptsSameNameDifferentLanguage() throws IOException {
|
||||
ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder();
|
||||
buildScriptService(Settings.EMPTY);
|
||||
createFileScripts("groovy", "expression");
|
||||
CompiledScript groovyScript = scriptService.compile(
|
||||
new Script("file_script", ScriptType.FILE, GroovyScriptEngineService.NAME, null), randomFrom(scriptContexts));
|
||||
new Script("file_script", ScriptType.FILE, GroovyScriptEngineService.NAME, null), randomFrom(scriptContexts),
|
||||
contextAndHeaders);
|
||||
assertThat(groovyScript.lang(), equalTo(GroovyScriptEngineService.NAME));
|
||||
CompiledScript expressionScript = scriptService.compile(new Script("file_script", ScriptType.FILE, ExpressionScriptEngineService.NAME,
|
||||
null), randomFrom(new ScriptContext[] {ScriptContext.Standard.AGGS, ScriptContext.Standard.SEARCH}));
|
||||
null), randomFrom(new ScriptContext[] { ScriptContext.Standard.AGGS,
|
||||
ScriptContext.Standard.SEARCH }), contextAndHeaders);
|
||||
assertThat(expressionScript.lang(), equalTo(ExpressionScriptEngineService.NAME));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testInlineScriptCompiledOnceCache() throws IOException {
|
||||
ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder();
|
||||
buildScriptService(Settings.EMPTY);
|
||||
CompiledScript compiledScript1 = scriptService.compile(new Script("1+1", ScriptType.INLINE, "test", null),
|
||||
randomFrom(scriptContexts));
|
||||
randomFrom(scriptContexts), contextAndHeaders);
|
||||
CompiledScript compiledScript2 = scriptService.compile(new Script("1+1", ScriptType.INLINE, "test", null),
|
||||
randomFrom(scriptContexts));
|
||||
randomFrom(scriptContexts), contextAndHeaders);
|
||||
assertThat(compiledScript1.compiled(), sameInstance(compiledScript2.compiled()));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testInlineScriptCompiledOnceMultipleLangAcronyms() throws IOException {
|
||||
ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder();
|
||||
buildScriptService(Settings.EMPTY);
|
||||
CompiledScript compiledScript1 = scriptService.compile(new Script("script", ScriptType.INLINE, "test", null),
|
||||
randomFrom(scriptContexts));
|
||||
randomFrom(scriptContexts), contextAndHeaders);
|
||||
CompiledScript compiledScript2 = scriptService.compile(new Script("script", ScriptType.INLINE, "test2", null),
|
||||
randomFrom(scriptContexts));
|
||||
randomFrom(scriptContexts), contextAndHeaders);
|
||||
assertThat(compiledScript1.compiled(), sameInstance(compiledScript2.compiled()));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFileScriptCompiledOnceMultipleLangAcronyms() throws IOException {
|
||||
ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder();
|
||||
buildScriptService(Settings.EMPTY);
|
||||
createFileScripts("test");
|
||||
CompiledScript compiledScript1 = scriptService.compile(new Script("file_script", ScriptType.FILE, "test", null),
|
||||
randomFrom(scriptContexts));
|
||||
randomFrom(scriptContexts), contextAndHeaders);
|
||||
CompiledScript compiledScript2 = scriptService.compile(new Script("file_script", ScriptType.FILE, "test2", null),
|
||||
randomFrom(scriptContexts));
|
||||
randomFrom(scriptContexts), contextAndHeaders);
|
||||
assertThat(compiledScript1.compiled(), sameInstance(compiledScript2.compiled()));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDefaultBehaviourFineGrainedSettings() throws IOException {
|
||||
ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder();
|
||||
Settings.Builder builder = Settings.builder();
|
||||
//rarely inject the default settings, which have no effect
|
||||
if (rarely()) {
|
||||
@ -213,29 +225,30 @@ public class ScriptServiceTests extends ESTestCase {
|
||||
|
||||
for (ScriptContext scriptContext : scriptContexts) {
|
||||
//groovy is not sandboxed, only file scripts are enabled by default
|
||||
assertCompileRejected(GroovyScriptEngineService.NAME, "script", ScriptType.INLINE, scriptContext);
|
||||
assertCompileRejected(GroovyScriptEngineService.NAME, "script", ScriptType.INDEXED, scriptContext);
|
||||
assertCompileAccepted(GroovyScriptEngineService.NAME, "file_script", ScriptType.FILE, scriptContext);
|
||||
assertCompileRejected(GroovyScriptEngineService.NAME, "script", ScriptType.INLINE, scriptContext, contextAndHeaders);
|
||||
assertCompileRejected(GroovyScriptEngineService.NAME, "script", ScriptType.INDEXED, scriptContext, contextAndHeaders);
|
||||
assertCompileAccepted(GroovyScriptEngineService.NAME, "file_script", ScriptType.FILE, scriptContext, contextAndHeaders);
|
||||
//expression engine is sandboxed, all scripts are enabled by default
|
||||
if (!scriptContext.getKey().equals(ScriptContext.Standard.MAPPING.getKey()) &&
|
||||
!scriptContext.getKey().equals(ScriptContext.Standard.UPDATE.getKey())) {
|
||||
assertCompileAccepted(ExpressionScriptEngineService.NAME, "script", ScriptType.INLINE, scriptContext);
|
||||
assertCompileAccepted(ExpressionScriptEngineService.NAME, "script", ScriptType.INDEXED, scriptContext);
|
||||
assertCompileAccepted(ExpressionScriptEngineService.NAME, "file_script", ScriptType.FILE, scriptContext);
|
||||
assertCompileAccepted(ExpressionScriptEngineService.NAME, "script", ScriptType.INLINE, scriptContext, contextAndHeaders);
|
||||
assertCompileAccepted(ExpressionScriptEngineService.NAME, "script", ScriptType.INDEXED, scriptContext, contextAndHeaders);
|
||||
assertCompileAccepted(ExpressionScriptEngineService.NAME, "file_script", ScriptType.FILE, scriptContext, contextAndHeaders);
|
||||
}
|
||||
//mustache engine is sandboxed, all scripts are enabled by default
|
||||
assertCompileAccepted(MustacheScriptEngineService.NAME, "script", ScriptType.INLINE, scriptContext);
|
||||
assertCompileAccepted(MustacheScriptEngineService.NAME, "script", ScriptType.INDEXED, scriptContext);
|
||||
assertCompileAccepted(MustacheScriptEngineService.NAME, "file_script", ScriptType.FILE, scriptContext);
|
||||
assertCompileAccepted(MustacheScriptEngineService.NAME, "script", ScriptType.INLINE, scriptContext, contextAndHeaders);
|
||||
assertCompileAccepted(MustacheScriptEngineService.NAME, "script", ScriptType.INDEXED, scriptContext, contextAndHeaders);
|
||||
assertCompileAccepted(MustacheScriptEngineService.NAME, "file_script", ScriptType.FILE, scriptContext, contextAndHeaders);
|
||||
//custom engine is sandboxed, all scripts are enabled by default
|
||||
assertCompileAccepted("test", "script", ScriptType.INLINE, scriptContext);
|
||||
assertCompileAccepted("test", "script", ScriptType.INDEXED, scriptContext);
|
||||
assertCompileAccepted("test", "file_script", ScriptType.FILE, scriptContext);
|
||||
assertCompileAccepted("test", "script", ScriptType.INLINE, scriptContext, contextAndHeaders);
|
||||
assertCompileAccepted("test", "script", ScriptType.INDEXED, scriptContext, contextAndHeaders);
|
||||
assertCompileAccepted("test", "file_script", ScriptType.FILE, scriptContext, contextAndHeaders);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFineGrainedSettings() throws IOException {
|
||||
ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder();
|
||||
//collect the fine-grained settings to set for this run
|
||||
int numScriptSettings = randomIntBetween(0, ScriptType.values().length);
|
||||
Map<ScriptType, ScriptMode> scriptSourceSettings = new HashMap<>();
|
||||
@ -345,16 +358,16 @@ public class ScriptServiceTests extends ESTestCase {
|
||||
for (String lang : scriptEngineService.types()) {
|
||||
switch (scriptMode) {
|
||||
case ON:
|
||||
assertCompileAccepted(lang, script, scriptType, scriptContext);
|
||||
assertCompileAccepted(lang, script, scriptType, scriptContext, contextAndHeaders);
|
||||
break;
|
||||
case OFF:
|
||||
assertCompileRejected(lang, script, scriptType, scriptContext);
|
||||
assertCompileRejected(lang, script, scriptType, scriptContext, contextAndHeaders);
|
||||
break;
|
||||
case SANDBOX:
|
||||
if (scriptEngineService.sandboxed()) {
|
||||
assertCompileAccepted(lang, script, scriptType, scriptContext);
|
||||
assertCompileAccepted(lang, script, scriptType, scriptContext, contextAndHeaders);
|
||||
} else {
|
||||
assertCompileRejected(lang, script, scriptType, scriptContext);
|
||||
assertCompileRejected(lang, script, scriptType, scriptContext, contextAndHeaders);
|
||||
}
|
||||
break;
|
||||
}
|
||||
@ -366,6 +379,7 @@ public class ScriptServiceTests extends ESTestCase {
|
||||
|
||||
@Test
|
||||
public void testCompileNonRegisteredContext() throws IOException {
|
||||
ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder();
|
||||
buildScriptService(Settings.EMPTY);
|
||||
String pluginName;
|
||||
String unknownContext;
|
||||
@ -378,7 +392,7 @@ public class ScriptServiceTests extends ESTestCase {
|
||||
for (String type : scriptEngineService.types()) {
|
||||
try {
|
||||
scriptService.compile(new Script("test", randomFrom(ScriptType.values()), type, null), new ScriptContext.Plugin(
|
||||
pluginName, unknownContext));
|
||||
pluginName, unknownContext), contextAndHeaders);
|
||||
fail("script compilation should have been rejected");
|
||||
} catch(IllegalArgumentException e) {
|
||||
assertThat(e.getMessage(), containsString("script context [" + pluginName + "_" + unknownContext + "] not supported"));
|
||||
@ -389,15 +403,17 @@ public class ScriptServiceTests extends ESTestCase {
|
||||
|
||||
@Test
|
||||
public void testCompileCountedInCompilationStats() throws IOException {
|
||||
ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder();
|
||||
buildScriptService(Settings.EMPTY);
|
||||
scriptService.compile(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts));
|
||||
scriptService.compile(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders);
|
||||
assertEquals(1L, scriptService.stats().getCompilations());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testExecutableCountedInCompilationStats() throws IOException {
|
||||
ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder();
|
||||
buildScriptService(Settings.EMPTY);
|
||||
scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts));
|
||||
scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders);
|
||||
assertEquals(1L, scriptService.stats().getCompilations());
|
||||
}
|
||||
|
||||
@ -410,46 +426,52 @@ public class ScriptServiceTests extends ESTestCase {
|
||||
|
||||
@Test
|
||||
public void testMultipleCompilationsCountedInCompilationStats() throws IOException {
|
||||
ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder();
|
||||
buildScriptService(Settings.EMPTY);
|
||||
int numberOfCompilations = randomIntBetween(1, 1024);
|
||||
for (int i = 0; i < numberOfCompilations; i++) {
|
||||
scriptService.compile(new Script(i + " + " + i, ScriptType.INLINE, "test", null), randomFrom(scriptContexts));
|
||||
scriptService
|
||||
.compile(new Script(i + " + " + i, ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders);
|
||||
}
|
||||
assertEquals(numberOfCompilations, scriptService.stats().getCompilations());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCompilationStatsOnCacheHit() throws IOException {
|
||||
ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder();
|
||||
Settings.Builder builder = Settings.builder();
|
||||
builder.put(ScriptService.SCRIPT_CACHE_SIZE_SETTING, 1);
|
||||
buildScriptService(builder.build());
|
||||
scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts));
|
||||
scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts));
|
||||
scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders);
|
||||
scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders);
|
||||
assertEquals(1L, scriptService.stats().getCompilations());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFileScriptCountedInCompilationStats() throws IOException {
|
||||
ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder();
|
||||
buildScriptService(Settings.EMPTY);
|
||||
createFileScripts("test");
|
||||
scriptService.compile(new Script("file_script", ScriptType.FILE, "test", null), randomFrom(scriptContexts));
|
||||
scriptService.compile(new Script("file_script", ScriptType.FILE, "test", null), randomFrom(scriptContexts), contextAndHeaders);
|
||||
assertEquals(1L, scriptService.stats().getCompilations());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIndexedScriptCountedInCompilationStats() throws IOException {
|
||||
ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder();
|
||||
buildScriptService(Settings.EMPTY);
|
||||
scriptService.compile(new Script("script", ScriptType.INDEXED, "test", null), randomFrom(scriptContexts));
|
||||
scriptService.compile(new Script("script", ScriptType.INDEXED, "test", null), randomFrom(scriptContexts), contextAndHeaders);
|
||||
assertEquals(1L, scriptService.stats().getCompilations());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCacheEvictionCountedInCacheEvictionsStats() throws IOException {
|
||||
ContextAndHeaderHolder contextAndHeaders = new ContextAndHeaderHolder();
|
||||
Settings.Builder builder = Settings.builder();
|
||||
builder.put(ScriptService.SCRIPT_CACHE_SIZE_SETTING, 1);
|
||||
buildScriptService(builder.build());
|
||||
scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts));
|
||||
scriptService.executable(new Script("2+2", ScriptType.INLINE, "test", null), randomFrom(scriptContexts));
|
||||
scriptService.executable(new Script("1+1", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders);
|
||||
scriptService.executable(new Script("2+2", ScriptType.INLINE, "test", null), randomFrom(scriptContexts), contextAndHeaders);
|
||||
assertEquals(2L, scriptService.stats().getCompilations());
|
||||
assertEquals(1L, scriptService.stats().getCacheEvictions());
|
||||
}
|
||||
@ -462,17 +484,19 @@ public class ScriptServiceTests extends ESTestCase {
|
||||
resourceWatcherService.notifyNow();
|
||||
}
|
||||
|
||||
private void assertCompileRejected(String lang, String script, ScriptType scriptType, ScriptContext scriptContext) {
|
||||
private void assertCompileRejected(String lang, String script, ScriptType scriptType, ScriptContext scriptContext,
|
||||
HasContextAndHeaders contextAndHeaders) {
|
||||
try {
|
||||
scriptService.compile(new Script(script, scriptType, lang, null), scriptContext);
|
||||
scriptService.compile(new Script(script, scriptType, lang, null), scriptContext, contextAndHeaders);
|
||||
fail("compile should have been rejected for lang [" + lang + "], script_type [" + scriptType + "], scripted_op [" + scriptContext + "]");
|
||||
} catch(ScriptException e) {
|
||||
//all good
|
||||
}
|
||||
}
|
||||
|
||||
private void assertCompileAccepted(String lang, String script, ScriptType scriptType, ScriptContext scriptContext) {
|
||||
assertThat(scriptService.compile(new Script(script, scriptType, lang, null), scriptContext), notNullValue());
|
||||
private void assertCompileAccepted(String lang, String script, ScriptType scriptType, ScriptContext scriptContext,
|
||||
HasContextAndHeaders contextAndHeaders) {
|
||||
assertThat(scriptService.compile(new Script(script, scriptType, lang, null), scriptContext, contextAndHeaders), notNullValue());
|
||||
}
|
||||
|
||||
public static class TestEngineService implements ScriptEngineService {
|
||||
|
@ -56,6 +56,7 @@ import org.elasticsearch.search.aggregations.bucket.significant.heuristics.Signi
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.StringTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.TermsBuilder;
|
||||
import org.elasticsearch.search.internal.SearchContext;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.junit.Test;
|
||||
|
||||
@ -235,7 +236,8 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase {
|
||||
public static class SimpleHeuristicParser implements SignificanceHeuristicParser {
|
||||
|
||||
@Override
|
||||
public SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher) throws IOException, QueryParsingException {
|
||||
public SignificanceHeuristic parse(XContentParser parser, ParseFieldMatcher parseFieldMatcher, SearchContext context)
|
||||
throws IOException, QueryParsingException {
|
||||
parser.nextToken();
|
||||
return new SimpleHeuristic();
|
||||
}
|
||||
@ -291,7 +293,7 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase {
|
||||
assertThat(responseBuilder.string(), equalTo(result));
|
||||
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testDeletesIssue7951() throws Exception {
|
||||
String settings = "{\"index.number_of_shards\": 1, \"index.number_of_replicas\": 0}";
|
||||
@ -311,10 +313,10 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase {
|
||||
indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "4")
|
||||
.setSource(TEXT_FIELD, cat2v2, CLASS_FIELD, "2"));
|
||||
indexRandom(true, false, indexRequestBuilderList);
|
||||
|
||||
|
||||
// Now create some holes in the index with selective deletes caused by updates.
|
||||
// This is the scenario that caused this issue https://github.com/elasticsearch/elasticsearch/issues/7951
|
||||
// Scoring algorithms throw exceptions if term docFreqs exceed the reported size of the index
|
||||
// Scoring algorithms throw exceptions if term docFreqs exceed the reported size of the index
|
||||
// from which they are taken so need to make sure this doesn't happen.
|
||||
String[] text = cat1v1;
|
||||
indexRequestBuilderList.clear();
|
||||
@ -323,7 +325,7 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase {
|
||||
indexRequestBuilderList.add(client().prepareIndex(INDEX_NAME, DOC_TYPE, "1").setSource(TEXT_FIELD, text, CLASS_FIELD, "1"));
|
||||
}
|
||||
indexRandom(true, false, indexRequestBuilderList);
|
||||
|
||||
|
||||
SearchResponse response1 = client().prepareSearch(INDEX_NAME).setTypes(DOC_TYPE)
|
||||
.addAggregation(new TermsBuilder("class")
|
||||
.field(CLASS_FIELD)
|
||||
@ -333,7 +335,7 @@ public class SignificantTermsSignificanceScoreIT extends ESIntegTestCase {
|
||||
.minDocCount(1)))
|
||||
.execute()
|
||||
.actionGet();
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testBackgroundVsSeparateSet() throws Exception {
|
||||
|
@ -20,6 +20,7 @@ package org.elasticsearch.search.suggest;
|
||||
|
||||
import org.apache.lucene.search.IndexSearcher;
|
||||
import org.apache.lucene.util.CharsRefBuilder;
|
||||
import org.elasticsearch.common.HasContextAndHeaders;
|
||||
import org.elasticsearch.common.text.StringText;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.index.mapper.MapperService;
|
||||
@ -59,7 +60,8 @@ public class CustomSuggester extends Suggester<CustomSuggester.CustomSuggestions
|
||||
public SuggestContextParser getContextParser() {
|
||||
return new SuggestContextParser() {
|
||||
@Override
|
||||
public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService, IndexQueryParserService queryParserService) throws IOException {
|
||||
public SuggestionSearchContext.SuggestionContext parse(XContentParser parser, MapperService mapperService,
|
||||
IndexQueryParserService queryParserService, HasContextAndHeaders headersContext) throws IOException {
|
||||
Map<String, Object> options = parser.map();
|
||||
CustomSuggestionsContext suggestionContext = new CustomSuggestionsContext(CustomSuggester.this, options);
|
||||
suggestionContext.setField((String) options.get("field"));
|
||||
|
@ -94,7 +94,7 @@ public class TestSearchContext extends SearchContext {
|
||||
private final Map<String, FetchSubPhaseContext> subPhaseContexts = new HashMap<>();
|
||||
|
||||
public TestSearchContext(ThreadPool threadPool,PageCacheRecycler pageCacheRecycler, BigArrays bigArrays, IndexService indexService) {
|
||||
super(ParseFieldMatcher.STRICT);
|
||||
super(ParseFieldMatcher.STRICT, null);
|
||||
this.pageCacheRecycler = pageCacheRecycler;
|
||||
this.bigArrays = bigArrays.withCircuitBreaking();
|
||||
this.indexService = indexService;
|
||||
@ -105,7 +105,7 @@ public class TestSearchContext extends SearchContext {
|
||||
}
|
||||
|
||||
public TestSearchContext() {
|
||||
super(ParseFieldMatcher.STRICT);
|
||||
super(ParseFieldMatcher.STRICT, null);
|
||||
this.pageCacheRecycler = null;
|
||||
this.bigArrays = null;
|
||||
this.indexService = null;
|
||||
|
@ -27,13 +27,16 @@ import org.elasticsearch.action.ActionModule;
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.ActionRequestBuilder;
|
||||
import org.elasticsearch.action.ActionResponse;
|
||||
import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder;
|
||||
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
|
||||
import org.elasticsearch.action.get.GetRequest;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.index.IndexRequestBuilder;
|
||||
import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptRequest;
|
||||
import org.elasticsearch.action.indexedscripts.put.PutIndexedScriptResponse;
|
||||
import org.elasticsearch.action.percolate.PercolateResponse;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.action.search.SearchRequestBuilder;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.support.ActionFilter;
|
||||
import org.elasticsearch.action.termvectors.MultiTermVectorsRequest;
|
||||
@ -44,6 +47,7 @@ import org.elasticsearch.common.inject.Inject;
|
||||
import org.elasticsearch.common.inject.Module;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.common.xcontent.XContentFactory;
|
||||
import org.elasticsearch.http.HttpServerTransport;
|
||||
import org.elasticsearch.index.query.BoolQueryBuilder;
|
||||
import org.elasticsearch.index.query.GeoShapeQueryBuilder;
|
||||
@ -52,9 +56,16 @@ import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.index.query.TermsLookupQueryBuilder;
|
||||
import org.elasticsearch.plugins.Plugin;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
import org.elasticsearch.script.Script;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.script.ScriptService.ScriptType;
|
||||
import org.elasticsearch.script.Template;
|
||||
import org.elasticsearch.script.groovy.GroovyScriptEngineService;
|
||||
import org.elasticsearch.script.mustache.MustacheScriptEngineService;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilders;
|
||||
import org.elasticsearch.search.aggregations.pipeline.PipelineAggregatorBuilders;
|
||||
import org.elasticsearch.search.suggest.Suggest;
|
||||
import org.elasticsearch.search.suggest.phrase.PhraseSuggestionBuilder;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
import org.elasticsearch.test.ESIntegTestCase.ClusterScope;
|
||||
import org.elasticsearch.test.rest.client.http.HttpRequestBuilder;
|
||||
@ -64,6 +75,7 @@ import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
@ -77,11 +89,14 @@ import static org.elasticsearch.common.settings.Settings.settingsBuilder;
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.node.Node.HTTP_ENABLED;
|
||||
import static org.elasticsearch.rest.RestStatus.OK;
|
||||
import static org.elasticsearch.search.suggest.SuggestBuilders.phraseSuggestion;
|
||||
import static org.elasticsearch.test.ESIntegTestCase.Scope.SUITE;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSuggestionSize;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.hasStatus;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.greaterThan;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
@ -273,6 +288,59 @@ public class ContextAndHeaderTransportIT extends ESIntegTestCase {
|
||||
assertRequestsContainHeader(PutIndexedScriptRequest.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testThatIndexedScriptGetRequestInTemplateQueryContainsContextAndHeaders() throws Exception {
|
||||
PutIndexedScriptResponse scriptResponse = transportClient()
|
||||
.preparePutIndexedScript(
|
||||
MustacheScriptEngineService.NAME,
|
||||
"my_script",
|
||||
jsonBuilder().startObject().field("script", "{ \"query\": { \"match\": { \"name\": \"Star Wars\" }}}").endObject()
|
||||
.string()).get();
|
||||
assertThat(scriptResponse.isCreated(), is(true));
|
||||
|
||||
transportClient().prepareIndex(queryIndex, "type", "1")
|
||||
.setSource(jsonBuilder().startObject().field("name", "Star Wars - The new republic").endObject()).get();
|
||||
transportClient().admin().indices().prepareRefresh(queryIndex).get();
|
||||
|
||||
SearchResponse searchResponse = transportClient()
|
||||
.prepareSearch(queryIndex)
|
||||
.setQuery(
|
||||
QueryBuilders.templateQuery(new Template("my_script", ScriptType.INDEXED,
|
||||
MustacheScriptEngineService.NAME, null, null))).get();
|
||||
assertNoFailures(searchResponse);
|
||||
assertHitCount(searchResponse, 1);
|
||||
|
||||
assertGetRequestsContainHeaders(".scripts");
|
||||
assertRequestsContainHeader(PutIndexedScriptRequest.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testThatIndexedScriptGetRequestInReducePhaseContainsContextAndHeaders() throws Exception {
|
||||
PutIndexedScriptResponse scriptResponse = transportClient().preparePutIndexedScript(GroovyScriptEngineService.NAME, "my_script",
|
||||
jsonBuilder().startObject().field("script", "_value0 * 10").endObject().string()).get();
|
||||
assertThat(scriptResponse.isCreated(), is(true));
|
||||
|
||||
transportClient().prepareIndex(queryIndex, "type", "1")
|
||||
.setSource(jsonBuilder().startObject().field("s_field", "foo").field("l_field", 10).endObject()).get();
|
||||
transportClient().admin().indices().prepareRefresh(queryIndex).get();
|
||||
|
||||
SearchResponse searchResponse = transportClient()
|
||||
.prepareSearch(queryIndex)
|
||||
.addAggregation(
|
||||
AggregationBuilders
|
||||
.terms("terms")
|
||||
.field("s_field")
|
||||
.subAggregation(AggregationBuilders.max("max").field("l_field"))
|
||||
.subAggregation(
|
||||
PipelineAggregatorBuilders.bucketScript("scripted").setBucketsPaths("max").script(
|
||||
new Script("my_script", ScriptType.INDEXED, GroovyScriptEngineService.NAME, null)))).get();
|
||||
assertNoFailures(searchResponse);
|
||||
assertHitCount(searchResponse, 1);
|
||||
|
||||
assertGetRequestsContainHeaders(".scripts");
|
||||
assertRequestsContainHeader(PutIndexedScriptRequest.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testThatSearchTemplatesWithIndexedTemplatesGetRequestContainsContextAndHeaders() throws Exception {
|
||||
PutIndexedScriptResponse scriptResponse = transportClient().preparePutIndexedScript(MustacheScriptEngineService.NAME, "the_template",
|
||||
@ -302,6 +370,98 @@ public class ContextAndHeaderTransportIT extends ESIntegTestCase {
|
||||
assertRequestsContainHeader(PutIndexedScriptRequest.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testThatIndexedScriptGetRequestInPhraseSuggestContainsContextAndHeaders() throws Exception {
|
||||
CreateIndexRequestBuilder builder = transportClient().admin().indices().prepareCreate("test").setSettings(settingsBuilder()
|
||||
.put(indexSettings())
|
||||
.put(SETTING_NUMBER_OF_SHARDS, 1) // A single shard will help to keep the tests repeatable.
|
||||
.put("index.analysis.analyzer.text.tokenizer", "standard")
|
||||
.putArray("index.analysis.analyzer.text.filter", "lowercase", "my_shingle")
|
||||
.put("index.analysis.filter.my_shingle.type", "shingle")
|
||||
.put("index.analysis.filter.my_shingle.output_unigrams", true)
|
||||
.put("index.analysis.filter.my_shingle.min_shingle_size", 2)
|
||||
.put("index.analysis.filter.my_shingle.max_shingle_size", 3));
|
||||
|
||||
XContentBuilder mapping = XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("type1")
|
||||
.startObject("properties")
|
||||
.startObject("title")
|
||||
.field("type", "string")
|
||||
.field("analyzer", "text")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject();
|
||||
assertAcked(builder.addMapping("type1", mapping));
|
||||
ensureGreen();
|
||||
|
||||
List<String> titles = new ArrayList<>();
|
||||
|
||||
titles.add("United States House of Representatives Elections in Washington 2006");
|
||||
titles.add("United States House of Representatives Elections in Washington 2005");
|
||||
titles.add("State");
|
||||
titles.add("Houses of Parliament");
|
||||
titles.add("Representative Government");
|
||||
titles.add("Election");
|
||||
|
||||
List<IndexRequestBuilder> builders = new ArrayList<>();
|
||||
for (String title: titles) {
|
||||
transportClient().prepareIndex("test", "type1").setSource("title", title).get();
|
||||
}
|
||||
transportClient().admin().indices().prepareRefresh("test").get();
|
||||
|
||||
String filterStringAsFilter = XContentFactory.jsonBuilder()
|
||||
.startObject()
|
||||
.startObject("query")
|
||||
.startObject("match_phrase")
|
||||
.field("title", "{{suggestion}}")
|
||||
.endObject()
|
||||
.endObject()
|
||||
.endObject()
|
||||
.string();
|
||||
|
||||
PutIndexedScriptResponse scriptResponse = transportClient()
|
||||
.preparePutIndexedScript(
|
||||
MustacheScriptEngineService.NAME,
|
||||
"my_script",
|
||||
jsonBuilder().startObject().field("script", filterStringAsFilter).endObject()
|
||||
.string()).get();
|
||||
assertThat(scriptResponse.isCreated(), is(true));
|
||||
|
||||
PhraseSuggestionBuilder suggest = phraseSuggestion("title")
|
||||
.field("title")
|
||||
.addCandidateGenerator(PhraseSuggestionBuilder.candidateGenerator("title")
|
||||
.suggestMode("always")
|
||||
.maxTermFreq(.99f)
|
||||
.size(10)
|
||||
.maxInspections(200)
|
||||
)
|
||||
.confidence(0f)
|
||||
.maxErrors(2f)
|
||||
.shardSize(30000)
|
||||
.size(10);
|
||||
|
||||
PhraseSuggestionBuilder filteredFilterSuggest = suggest.collateQuery(new Template("my_script", ScriptType.INDEXED,
|
||||
MustacheScriptEngineService.NAME, null, null));
|
||||
|
||||
SearchRequestBuilder searchRequestBuilder = transportClient().prepareSearch("test").setSize(0);
|
||||
String suggestText = "united states house of representatives elections in washington 2006";
|
||||
if (suggestText != null) {
|
||||
searchRequestBuilder.setSuggestText(suggestText);
|
||||
}
|
||||
searchRequestBuilder.addSuggestion(filteredFilterSuggest);
|
||||
SearchResponse actionGet = searchRequestBuilder.execute().actionGet();
|
||||
assertThat(Arrays.toString(actionGet.getShardFailures()), actionGet.getFailedShards(), equalTo(0));
|
||||
Suggest searchSuggest = actionGet.getSuggest();
|
||||
|
||||
assertSuggestionSize(searchSuggest, 0, 2, "title");
|
||||
|
||||
assertGetRequestsContainHeaders(".scripts");
|
||||
assertRequestsContainHeader(PutIndexedScriptRequest.class);
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testThatRelevantHttpHeadersBecomeRequestHeaders() throws Exception {
|
||||
String releventHeaderName = "relevant_" + randomHeaderKey;
|
||||
|
Loading…
x
Reference in New Issue
Block a user