Merge pull request #21393 from masaruh/alias_boost
Resolve index names in indices_boost
This commit is contained in:
commit
a0185c83a7
|
@ -50,6 +50,7 @@ import java.util.function.Function;
|
|||
|
||||
|
||||
abstract class AbstractSearchAsyncAction<FirstResult extends SearchPhaseResult> extends AbstractAsyncAction {
|
||||
private static final float DEFAULT_INDEX_BOOST = 1.0f;
|
||||
|
||||
protected final Logger logger;
|
||||
protected final SearchTransportService searchTransportService;
|
||||
|
@ -66,6 +67,7 @@ abstract class AbstractSearchAsyncAction<FirstResult extends SearchPhaseResult>
|
|||
private final AtomicInteger totalOps = new AtomicInteger();
|
||||
protected final AtomicArray<FirstResult> firstResults;
|
||||
private final Map<String, AliasFilter> aliasFilter;
|
||||
private final Map<String, Float> concreteIndexBoosts;
|
||||
private final long clusterStateVersion;
|
||||
private volatile AtomicArray<ShardSearchFailure> shardFailures;
|
||||
private final Object shardFailuresMutex = new Object();
|
||||
|
@ -73,9 +75,9 @@ abstract class AbstractSearchAsyncAction<FirstResult extends SearchPhaseResult>
|
|||
|
||||
protected AbstractSearchAsyncAction(Logger logger, SearchTransportService searchTransportService,
|
||||
Function<String, DiscoveryNode> nodeIdToDiscoveryNode,
|
||||
Map<String, AliasFilter> aliasFilter, Executor executor, SearchRequest request,
|
||||
ActionListener<SearchResponse> listener, GroupShardsIterator shardsIts, long startTime,
|
||||
long clusterStateVersion, SearchTask task) {
|
||||
Map<String, AliasFilter> aliasFilter, Map<String, Float> concreteIndexBoosts,
|
||||
Executor executor, SearchRequest request, ActionListener<SearchResponse> listener,
|
||||
GroupShardsIterator shardsIts, long startTime, long clusterStateVersion, SearchTask task) {
|
||||
super(startTime);
|
||||
this.logger = logger;
|
||||
this.searchTransportService = searchTransportService;
|
||||
|
@ -91,6 +93,7 @@ abstract class AbstractSearchAsyncAction<FirstResult extends SearchPhaseResult>
|
|||
expectedTotalOps = shardsIts.totalSizeWith1ForEmpty();
|
||||
firstResults = new AtomicArray<>(shardsIts.size());
|
||||
this.aliasFilter = aliasFilter;
|
||||
this.concreteIndexBoosts = concreteIndexBoosts;
|
||||
}
|
||||
|
||||
public void start() {
|
||||
|
@ -125,8 +128,10 @@ abstract class AbstractSearchAsyncAction<FirstResult extends SearchPhaseResult>
|
|||
} else {
|
||||
AliasFilter filter = this.aliasFilter.get(shard.index().getUUID());
|
||||
assert filter != null;
|
||||
|
||||
float indexBoost = concreteIndexBoosts.getOrDefault(shard.index().getUUID(), DEFAULT_INDEX_BOOST);
|
||||
ShardSearchTransportRequest transportRequest = new ShardSearchTransportRequest(request, shardIt.shardId(), shardsIts.size(),
|
||||
filter, startTime());
|
||||
filter, indexBoost, startTime());
|
||||
sendExecuteFirstPhase(node, transportRequest , new ActionListener<FirstResult>() {
|
||||
@Override
|
||||
public void onResponse(FirstResult result) {
|
||||
|
|
|
@ -47,10 +47,11 @@ class SearchDfsQueryAndFetchAsyncAction extends AbstractSearchAsyncAction<DfsSea
|
|||
private final SearchPhaseController searchPhaseController;
|
||||
SearchDfsQueryAndFetchAsyncAction(Logger logger, SearchTransportService searchTransportService,
|
||||
Function<String, DiscoveryNode> nodeIdToDiscoveryNode,
|
||||
Map<String, AliasFilter> aliasFilter, SearchPhaseController searchPhaseController,
|
||||
Executor executor, SearchRequest request, ActionListener<SearchResponse> listener,
|
||||
GroupShardsIterator shardsIts, long startTime, long clusterStateVersion, SearchTask task) {
|
||||
super(logger, searchTransportService, nodeIdToDiscoveryNode, aliasFilter, executor,
|
||||
Map<String, AliasFilter> aliasFilter, Map<String, Float> concreteIndexBoosts,
|
||||
SearchPhaseController searchPhaseController, Executor executor, SearchRequest request,
|
||||
ActionListener<SearchResponse> listener, GroupShardsIterator shardsIts,
|
||||
long startTime, long clusterStateVersion, SearchTask task) {
|
||||
super(logger, searchTransportService, nodeIdToDiscoveryNode, aliasFilter, concreteIndexBoosts, executor,
|
||||
request, listener, shardsIts, startTime, clusterStateVersion, task);
|
||||
this.searchPhaseController = searchPhaseController;
|
||||
queryFetchResults = new AtomicArray<>(firstResults.length());
|
||||
|
|
|
@ -55,11 +55,11 @@ class SearchDfsQueryThenFetchAsyncAction extends AbstractSearchAsyncAction<DfsSe
|
|||
|
||||
SearchDfsQueryThenFetchAsyncAction(Logger logger, SearchTransportService searchTransportService,
|
||||
Function<String, DiscoveryNode> nodeIdToDiscoveryNode,
|
||||
Map<String, AliasFilter> aliasFilter, SearchPhaseController searchPhaseController,
|
||||
Executor executor, SearchRequest request, ActionListener<SearchResponse> listener,
|
||||
GroupShardsIterator shardsIts, long startTime, long clusterStateVersion,
|
||||
SearchTask task) {
|
||||
super(logger, searchTransportService, nodeIdToDiscoveryNode, aliasFilter, executor,
|
||||
Map<String, AliasFilter> aliasFilter, Map<String, Float> concreteIndexBoosts,
|
||||
SearchPhaseController searchPhaseController, Executor executor, SearchRequest request,
|
||||
ActionListener<SearchResponse> listener, GroupShardsIterator shardsIts, long startTime,
|
||||
long clusterStateVersion, SearchTask task) {
|
||||
super(logger, searchTransportService, nodeIdToDiscoveryNode, aliasFilter, concreteIndexBoosts, executor,
|
||||
request, listener, shardsIts, startTime, clusterStateVersion, task);
|
||||
this.searchPhaseController = searchPhaseController;
|
||||
queryResults = new AtomicArray<>(firstResults.length());
|
||||
|
|
|
@ -40,12 +40,12 @@ class SearchQueryAndFetchAsyncAction extends AbstractSearchAsyncAction<QueryFetc
|
|||
|
||||
SearchQueryAndFetchAsyncAction(Logger logger, SearchTransportService searchTransportService,
|
||||
Function<String, DiscoveryNode> nodeIdToDiscoveryNode,
|
||||
Map<String, AliasFilter> aliasFilter,
|
||||
Map<String, AliasFilter> aliasFilter, Map<String, Float> concreteIndexBoosts,
|
||||
SearchPhaseController searchPhaseController, Executor executor,
|
||||
SearchRequest request, ActionListener<SearchResponse> listener,
|
||||
GroupShardsIterator shardsIts, long startTime, long clusterStateVersion,
|
||||
SearchTask task) {
|
||||
super(logger, searchTransportService, nodeIdToDiscoveryNode, aliasFilter, executor,
|
||||
super(logger, searchTransportService, nodeIdToDiscoveryNode, aliasFilter, concreteIndexBoosts, executor,
|
||||
request, listener, shardsIts, startTime, clusterStateVersion, task);
|
||||
this.searchPhaseController = searchPhaseController;
|
||||
|
||||
|
|
|
@ -50,13 +50,13 @@ class SearchQueryThenFetchAsyncAction extends AbstractSearchAsyncAction<QuerySea
|
|||
private final SearchPhaseController searchPhaseController;
|
||||
|
||||
SearchQueryThenFetchAsyncAction(Logger logger, SearchTransportService searchTransportService,
|
||||
Function<String, DiscoveryNode> nodeIdToDiscoveryNode, Map<String,
|
||||
AliasFilter> aliasFilter,
|
||||
Function<String, DiscoveryNode> nodeIdToDiscoveryNode,
|
||||
Map<String, AliasFilter> aliasFilter, Map<String, Float> concreteIndexBoosts,
|
||||
SearchPhaseController searchPhaseController, Executor executor,
|
||||
SearchRequest request, ActionListener<SearchResponse> listener,
|
||||
GroupShardsIterator shardsIts, long startTime, long clusterStateVersion,
|
||||
SearchTask task) {
|
||||
super(logger, searchTransportService, nodeIdToDiscoveryNode, aliasFilter, executor, request, listener,
|
||||
super(logger, searchTransportService, nodeIdToDiscoveryNode, aliasFilter, concreteIndexBoosts, executor, request, listener,
|
||||
shardsIts, startTime, clusterStateVersion, task);
|
||||
this.searchPhaseController = searchPhaseController;
|
||||
fetchResults = new AtomicArray<>(firstResults.length());
|
||||
|
|
|
@ -34,6 +34,7 @@ import org.elasticsearch.common.settings.Setting.Property;
|
|||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.index.Index;
|
||||
import org.elasticsearch.search.SearchService;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.search.internal.AliasFilter;
|
||||
import org.elasticsearch.tasks.Task;
|
||||
import org.elasticsearch.threadpool.ThreadPool;
|
||||
|
@ -84,6 +85,29 @@ public class TransportSearchAction extends HandledTransportAction<SearchRequest,
|
|||
return aliasFilterMap;
|
||||
}
|
||||
|
||||
private Map<String, Float> resolveIndexBoosts(SearchRequest searchRequest, ClusterState clusterState) {
|
||||
if (searchRequest.source() == null) {
|
||||
return Collections.emptyMap();
|
||||
}
|
||||
|
||||
SearchSourceBuilder source = searchRequest.source();
|
||||
if (source.indexBoosts() == null) {
|
||||
return Collections.emptyMap();
|
||||
}
|
||||
|
||||
Map<String, Float> concreteIndexBoosts = new HashMap<>();
|
||||
for (SearchSourceBuilder.IndexBoost ib : source.indexBoosts()) {
|
||||
Index[] concreteIndices =
|
||||
indexNameExpressionResolver.concreteIndices(clusterState, searchRequest.indicesOptions(), ib.getIndex());
|
||||
|
||||
for (Index concreteIndex : concreteIndices) {
|
||||
concreteIndexBoosts.putIfAbsent(concreteIndex.getUUID(), ib.getBoost());
|
||||
}
|
||||
}
|
||||
|
||||
return Collections.unmodifiableMap(concreteIndexBoosts);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void doExecute(Task task, SearchRequest searchRequest, ActionListener<SearchResponse> listener) {
|
||||
// pure paranoia if time goes backwards we are at least positive
|
||||
|
@ -107,6 +131,8 @@ public class TransportSearchAction extends HandledTransportAction<SearchRequest,
|
|||
searchRequest.preference());
|
||||
failIfOverShardCountLimit(clusterService, shardIterators.size());
|
||||
|
||||
Map<String, Float> concreteIndexBoosts = resolveIndexBoosts(searchRequest, clusterState);
|
||||
|
||||
// optimize search type for cases where there is only one shard group to search on
|
||||
if (shardIterators.size() == 1) {
|
||||
// if we only have one group, then we always want Q_A_F, no need for DFS, and no need to do THEN since we hit one shard
|
||||
|
@ -125,7 +151,7 @@ public class TransportSearchAction extends HandledTransportAction<SearchRequest,
|
|||
}
|
||||
|
||||
searchAsyncAction((SearchTask)task, searchRequest, shardIterators, startTimeInMillis, clusterState,
|
||||
Collections.unmodifiableMap(aliasFilter), listener).start();
|
||||
Collections.unmodifiableMap(aliasFilter), concreteIndexBoosts, listener).start();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -135,6 +161,7 @@ public class TransportSearchAction extends HandledTransportAction<SearchRequest,
|
|||
|
||||
private AbstractSearchAsyncAction searchAsyncAction(SearchTask task, SearchRequest searchRequest, GroupShardsIterator shardIterators,
|
||||
long startTime, ClusterState state, Map<String, AliasFilter> aliasFilter,
|
||||
Map<String, Float> concreteIndexBoosts,
|
||||
ActionListener<SearchResponse> listener) {
|
||||
final Function<String, DiscoveryNode> nodesLookup = state.nodes()::get;
|
||||
final long clusterStateVersion = state.version();
|
||||
|
@ -143,22 +170,22 @@ public class TransportSearchAction extends HandledTransportAction<SearchRequest,
|
|||
switch(searchRequest.searchType()) {
|
||||
case DFS_QUERY_THEN_FETCH:
|
||||
searchAsyncAction = new SearchDfsQueryThenFetchAsyncAction(logger, searchTransportService, nodesLookup,
|
||||
aliasFilter, searchPhaseController, executor, searchRequest, listener, shardIterators, startTime,
|
||||
aliasFilter, concreteIndexBoosts, searchPhaseController, executor, searchRequest, listener, shardIterators, startTime,
|
||||
clusterStateVersion, task);
|
||||
break;
|
||||
case QUERY_THEN_FETCH:
|
||||
searchAsyncAction = new SearchQueryThenFetchAsyncAction(logger, searchTransportService, nodesLookup,
|
||||
aliasFilter, searchPhaseController, executor, searchRequest, listener, shardIterators, startTime,
|
||||
aliasFilter, concreteIndexBoosts, searchPhaseController, executor, searchRequest, listener, shardIterators, startTime,
|
||||
clusterStateVersion, task);
|
||||
break;
|
||||
case DFS_QUERY_AND_FETCH:
|
||||
searchAsyncAction = new SearchDfsQueryAndFetchAsyncAction(logger, searchTransportService, nodesLookup,
|
||||
aliasFilter, searchPhaseController, executor, searchRequest, listener, shardIterators, startTime,
|
||||
aliasFilter, concreteIndexBoosts, searchPhaseController, executor, searchRequest, listener, shardIterators, startTime,
|
||||
clusterStateVersion, task);
|
||||
break;
|
||||
case QUERY_AND_FETCH:
|
||||
searchAsyncAction = new SearchQueryAndFetchAsyncAction(logger, searchTransportService, nodesLookup,
|
||||
aliasFilter, searchPhaseController, executor, searchRequest, listener, shardIterators, startTime,
|
||||
aliasFilter, concreteIndexBoosts, searchPhaseController, executor, searchRequest, listener, shardIterators, startTime,
|
||||
clusterStateVersion, task);
|
||||
break;
|
||||
default:
|
||||
|
@ -177,5 +204,4 @@ public class TransportSearchAction extends HandledTransportAction<SearchRequest,
|
|||
+ "] to a greater value if you really want to query that many shards at the same time.");
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -96,7 +96,7 @@ final class DefaultSearchContext extends SearchContext {
|
|||
private final DfsSearchResult dfsResult;
|
||||
private final QuerySearchResult queryResult;
|
||||
private final FetchSearchResult fetchResult;
|
||||
private float queryBoost = 1.0f;
|
||||
private final float queryBoost;
|
||||
private TimeValue timeout;
|
||||
// terminate after count
|
||||
private int terminateAfter = DEFAULT_TERMINATE_AFTER;
|
||||
|
@ -173,6 +173,7 @@ final class DefaultSearchContext extends SearchContext {
|
|||
this.timeout = timeout;
|
||||
queryShardContext = indexService.newQueryShardContext(request.shardId().id(), searcher.getIndexReader(), request::nowInMillis);
|
||||
queryShardContext.setTypes(request.types());
|
||||
queryBoost = request.indexBoost();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -352,12 +353,6 @@ final class DefaultSearchContext extends SearchContext {
|
|||
return queryBoost;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SearchContext queryBoost(float queryBoost) {
|
||||
this.queryBoost = queryBoost;
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getOriginNanoTime() {
|
||||
return originNanoTime;
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
|
||||
package org.elasticsearch.search;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectFloatHashMap;
|
||||
import org.apache.lucene.search.FieldDoc;
|
||||
import org.apache.lucene.search.TopDocs;
|
||||
import org.apache.lucene.util.IOUtils;
|
||||
|
@ -671,13 +670,6 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
|
|||
QueryShardContext queryShardContext = context.getQueryShardContext();
|
||||
context.from(source.from());
|
||||
context.size(source.size());
|
||||
ObjectFloatHashMap<String> indexBoostMap = source.indexBoost();
|
||||
if (indexBoostMap != null) {
|
||||
Float indexBoost = indexBoostMap.get(context.shardTarget().index());
|
||||
if (indexBoost != null) {
|
||||
context.queryBoost(indexBoost);
|
||||
}
|
||||
}
|
||||
Map<String, InnerHitBuilder> innerHitBuilders = new HashMap<>();
|
||||
if (source.query() != null) {
|
||||
InnerHitBuilder.extractInnerHits(source.query(), innerHitBuilders);
|
||||
|
|
|
@ -19,16 +19,16 @@
|
|||
|
||||
package org.elasticsearch.search.builder;
|
||||
|
||||
import com.carrotsearch.hppc.ObjectFloatHashMap;
|
||||
import org.elasticsearch.action.support.ToXContentToBytes;
|
||||
import org.elasticsearch.common.Nullable;
|
||||
import org.elasticsearch.common.ParseField;
|
||||
import org.elasticsearch.common.ParsingException;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.collect.Tuple;
|
||||
import org.elasticsearch.common.io.stream.StreamInput;
|
||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||
import org.elasticsearch.common.io.stream.Writeable;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.ToXContent;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
@ -63,10 +63,6 @@ import java.util.ArrayList;
|
|||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.StreamSupport;
|
||||
|
||||
import static org.elasticsearch.common.collect.Tuple.tuple;
|
||||
|
||||
/**
|
||||
* A search source builder allowing to easily build search source. Simple
|
||||
|
@ -76,6 +72,8 @@ import static org.elasticsearch.common.collect.Tuple.tuple;
|
|||
* @see org.elasticsearch.action.search.SearchRequest#source(SearchSourceBuilder)
|
||||
*/
|
||||
public final class SearchSourceBuilder extends ToXContentToBytes implements Writeable {
|
||||
private static final DeprecationLogger DEPRECATION_LOGGER =
|
||||
new DeprecationLogger(Loggers.getLogger(SearchSourceBuilder.class));
|
||||
|
||||
public static final ParseField FROM_FIELD = new ParseField("from");
|
||||
public static final ParseField SIZE_FIELD = new ParseField("size");
|
||||
|
@ -167,7 +165,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
|
||||
private List<RescoreBuilder> rescoreBuilders;
|
||||
|
||||
private ObjectFloatHashMap<String> indexBoost = null;
|
||||
private List<IndexBoost> indexBoosts = new ArrayList<>();
|
||||
|
||||
private List<String> stats;
|
||||
|
||||
|
@ -193,13 +191,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
storedFieldsContext = in.readOptionalWriteable(StoredFieldsContext::new);
|
||||
from = in.readVInt();
|
||||
highlightBuilder = in.readOptionalWriteable(HighlightBuilder::new);
|
||||
int indexBoostSize = in.readVInt();
|
||||
if (indexBoostSize > 0) {
|
||||
indexBoost = new ObjectFloatHashMap<>(indexBoostSize);
|
||||
for (int i = 0; i < indexBoostSize; i++) {
|
||||
indexBoost.put(in.readString(), in.readFloat());
|
||||
}
|
||||
}
|
||||
indexBoosts = in.readList(IndexBoost::new);
|
||||
minScore = in.readOptionalFloat();
|
||||
postQueryBuilder = in.readOptionalNamedWriteable(QueryBuilder.class);
|
||||
queryBuilder = in.readOptionalNamedWriteable(QueryBuilder.class);
|
||||
|
@ -240,11 +232,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
out.writeOptionalWriteable(storedFieldsContext);
|
||||
out.writeVInt(from);
|
||||
out.writeOptionalWriteable(highlightBuilder);
|
||||
int indexBoostSize = indexBoost == null ? 0 : indexBoost.size();
|
||||
out.writeVInt(indexBoostSize);
|
||||
if (indexBoostSize > 0) {
|
||||
writeIndexBoost(out);
|
||||
}
|
||||
out.writeList(indexBoosts);
|
||||
out.writeOptionalFloat(minScore);
|
||||
out.writeOptionalNamedWriteable(postQueryBuilder);
|
||||
out.writeOptionalNamedWriteable(queryBuilder);
|
||||
|
@ -283,17 +271,6 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
out.writeOptionalWriteable(sliceBuilder);
|
||||
}
|
||||
|
||||
private void writeIndexBoost(StreamOutput out) throws IOException {
|
||||
List<Tuple<String, Float>> ibs = StreamSupport
|
||||
.stream(indexBoost.spliterator(), false)
|
||||
.map(i -> tuple(i.key, i.value)).sorted((o1, o2) -> o1.v1().compareTo(o2.v1()))
|
||||
.collect(Collectors.toList());
|
||||
for (Tuple<String, Float> ib : ibs) {
|
||||
out.writeString(ib.v1());
|
||||
out.writeFloat(ib.v2());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the search query for this request.
|
||||
*
|
||||
|
@ -816,28 +793,26 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
}
|
||||
|
||||
/**
|
||||
* Sets the boost a specific index will receive when the query is executed
|
||||
* Sets the boost a specific index or alias will receive when the query is executed
|
||||
* against it.
|
||||
*
|
||||
* @param index
|
||||
* The index to apply the boost against
|
||||
* The index or alias to apply the boost against
|
||||
* @param indexBoost
|
||||
* The boost to apply to the index
|
||||
*/
|
||||
public SearchSourceBuilder indexBoost(String index, float indexBoost) {
|
||||
if (this.indexBoost == null) {
|
||||
this.indexBoost = new ObjectFloatHashMap<>();
|
||||
}
|
||||
this.indexBoost.put(index, indexBoost);
|
||||
Objects.requireNonNull(index, "index must not be null");
|
||||
this.indexBoosts.add(new IndexBoost(index, indexBoost));
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets the boost a specific indices will receive when the query is
|
||||
* Gets the boost a specific indices or aliases will receive when the query is
|
||||
* executed against them.
|
||||
*/
|
||||
public ObjectFloatHashMap<String> indexBoost() {
|
||||
return indexBoost;
|
||||
public List<IndexBoost> indexBoosts() {
|
||||
return indexBoosts;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -916,7 +891,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
rewrittenBuilder.storedFieldsContext = storedFieldsContext;
|
||||
rewrittenBuilder.from = from;
|
||||
rewrittenBuilder.highlightBuilder = highlightBuilder;
|
||||
rewrittenBuilder.indexBoost = indexBoost;
|
||||
rewrittenBuilder.indexBoosts = indexBoosts;
|
||||
rewrittenBuilder.minScore = minScore;
|
||||
rewrittenBuilder.postQueryBuilder = postQueryBuilder;
|
||||
rewrittenBuilder.profile = profile;
|
||||
|
@ -1002,15 +977,16 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
scriptFields.add(new ScriptField(context));
|
||||
}
|
||||
} else if (context.getParseFieldMatcher().match(currentFieldName, INDICES_BOOST_FIELD)) {
|
||||
indexBoost = new ObjectFloatHashMap<>();
|
||||
DEPRECATION_LOGGER.deprecated(
|
||||
"Object format in indices_boost is deprecated, please use array format instead");
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (token.isValue()) {
|
||||
indexBoost.put(currentFieldName, parser.floatValue());
|
||||
indexBoosts.add(new IndexBoost(currentFieldName, parser.floatValue()));
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token +
|
||||
" in [" + currentFieldName + "].", parser.getTokenLocation());
|
||||
" in [" + currentFieldName + "].", parser.getTokenLocation());
|
||||
}
|
||||
}
|
||||
} else if (context.getParseFieldMatcher().match(currentFieldName, AGGREGATIONS_FIELD)
|
||||
|
@ -1059,9 +1035,13 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
docValueFields.add(parser.text());
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.VALUE_STRING +
|
||||
"] in [" + currentFieldName + "] but found [" + token + "]", parser.getTokenLocation());
|
||||
"] in [" + currentFieldName + "] but found [" + token + "]", parser.getTokenLocation());
|
||||
}
|
||||
}
|
||||
} else if (context.getParseFieldMatcher().match(currentFieldName, INDICES_BOOST_FIELD)) {
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
|
||||
indexBoosts.add(new IndexBoost(context));
|
||||
}
|
||||
} else if (context.getParseFieldMatcher().match(currentFieldName, SORT_FIELD)) {
|
||||
sorts = new ArrayList<>(SortBuilder.fromXContent(context));
|
||||
} else if (context.getParseFieldMatcher().match(currentFieldName, RESCORE_FIELD)) {
|
||||
|
@ -1191,18 +1171,13 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
builder.field(SLICE.getPreferredName(), sliceBuilder);
|
||||
}
|
||||
|
||||
if (indexBoost != null) {
|
||||
builder.startObject(INDICES_BOOST_FIELD.getPreferredName());
|
||||
assert !indexBoost.containsKey(null);
|
||||
final Object[] keys = indexBoost.keys;
|
||||
final float[] values = indexBoost.values;
|
||||
for (int i = 0; i < keys.length; i++) {
|
||||
if (keys[i] != null) {
|
||||
builder.field((String) keys[i], values[i]);
|
||||
}
|
||||
}
|
||||
builder.startArray(INDICES_BOOST_FIELD.getPreferredName());
|
||||
for (IndexBoost ib : indexBoosts) {
|
||||
builder.startObject();
|
||||
builder.field(ib.index, ib.boost);
|
||||
builder.endObject();
|
||||
}
|
||||
builder.endArray();
|
||||
|
||||
if (aggregations != null) {
|
||||
builder.field(AGGREGATIONS_FIELD.getPreferredName(), aggregations);
|
||||
|
@ -1237,6 +1212,91 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
}
|
||||
}
|
||||
|
||||
public static class IndexBoost implements Writeable, ToXContent {
|
||||
private final String index;
|
||||
private final float boost;
|
||||
|
||||
IndexBoost(String index, float boost) {
|
||||
this.index = index;
|
||||
this.boost = boost;
|
||||
}
|
||||
|
||||
IndexBoost(StreamInput in) throws IOException {
|
||||
index = in.readString();
|
||||
boost = in.readFloat();
|
||||
}
|
||||
|
||||
IndexBoost(QueryParseContext context) throws IOException {
|
||||
XContentParser parser = context.parser();
|
||||
XContentParser.Token token = parser.currentToken();
|
||||
|
||||
if (token == XContentParser.Token.START_OBJECT) {
|
||||
token = parser.nextToken();
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
index = parser.currentName();
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.FIELD_NAME +
|
||||
"] in [" + INDICES_BOOST_FIELD + "] but found [" + token + "]", parser.getTokenLocation());
|
||||
}
|
||||
token = parser.nextToken();
|
||||
if (token == XContentParser.Token.VALUE_NUMBER) {
|
||||
boost = parser.floatValue();
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.VALUE_NUMBER +
|
||||
"] in [" + INDICES_BOOST_FIELD + "] but found [" + token + "]", parser.getTokenLocation());
|
||||
}
|
||||
token = parser.nextToken();
|
||||
if (token != XContentParser.Token.END_OBJECT) {
|
||||
throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.END_OBJECT +
|
||||
"] in [" + INDICES_BOOST_FIELD + "] but found [" + token + "]", parser.getTokenLocation());
|
||||
}
|
||||
} else {
|
||||
throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.START_OBJECT +
|
||||
"] in [" + parser.currentName() + "] but found [" + token + "]", parser.getTokenLocation());
|
||||
}
|
||||
}
|
||||
|
||||
public String getIndex() {
|
||||
return index;
|
||||
}
|
||||
|
||||
public float getBoost() {
|
||||
return boost;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void writeTo(StreamOutput out) throws IOException {
|
||||
out.writeString(index);
|
||||
out.writeFloat(boost);
|
||||
}
|
||||
|
||||
@Override
|
||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject();
|
||||
builder.field(index, boost);
|
||||
builder.endObject();
|
||||
return builder;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(index, boost);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (getClass() != obj.getClass()) {
|
||||
return false;
|
||||
}
|
||||
IndexBoost other = (IndexBoost) obj;
|
||||
return Objects.equals(index, other.index)
|
||||
&& Objects.equals(boost, other.boost);
|
||||
}
|
||||
|
||||
}
|
||||
public static class ScriptField implements Writeable, ToXContent {
|
||||
|
||||
private final boolean ignoreFailure;
|
||||
|
@ -1352,8 +1412,9 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
@Override
|
||||
public int hashCode() {
|
||||
return Objects.hash(aggregations, explain, fetchSourceContext, docValueFields, storedFieldsContext, from, highlightBuilder,
|
||||
indexBoost, minScore, postQueryBuilder, queryBuilder, rescoreBuilders, scriptFields, size, sorts, searchAfterBuilder,
|
||||
sliceBuilder, stats, suggestBuilder, terminateAfter, timeout, trackScores, version, profile, extBuilders);
|
||||
indexBoosts, minScore, postQueryBuilder, queryBuilder, rescoreBuilders, scriptFields, size,
|
||||
sorts, searchAfterBuilder, sliceBuilder, stats, suggestBuilder, terminateAfter, timeout, trackScores, version,
|
||||
profile, extBuilders);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -1372,7 +1433,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
&& Objects.equals(storedFieldsContext, other.storedFieldsContext)
|
||||
&& Objects.equals(from, other.from)
|
||||
&& Objects.equals(highlightBuilder, other.highlightBuilder)
|
||||
&& Objects.equals(indexBoost, other.indexBoost)
|
||||
&& Objects.equals(indexBoosts, other.indexBoosts)
|
||||
&& Objects.equals(minScore, other.minScore)
|
||||
&& Objects.equals(postQueryBuilder, other.postQueryBuilder)
|
||||
&& Objects.equals(queryBuilder, other.queryBuilder)
|
||||
|
|
|
@ -37,7 +37,6 @@ import org.elasticsearch.index.query.ParsedQuery;
|
|||
import org.elasticsearch.index.query.QueryShardContext;
|
||||
import org.elasticsearch.index.shard.IndexShard;
|
||||
import org.elasticsearch.index.similarity.SimilarityService;
|
||||
import org.elasticsearch.script.ScriptService;
|
||||
import org.elasticsearch.search.SearchExtBuilder;
|
||||
import org.elasticsearch.search.SearchShardTarget;
|
||||
import org.elasticsearch.search.aggregations.SearchContextAggregations;
|
||||
|
@ -144,11 +143,6 @@ public abstract class FilteredSearchContext extends SearchContext {
|
|||
return in.queryBoost();
|
||||
}
|
||||
|
||||
@Override
|
||||
public SearchContext queryBoost(float queryBoost) {
|
||||
return in.queryBoost(queryBoost);
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getOriginNanoTime() {
|
||||
return in.getOriginNanoTime();
|
||||
|
|
|
@ -148,8 +148,6 @@ public abstract class SearchContext extends AbstractRefCounted implements Releas
|
|||
|
||||
public abstract float queryBoost();
|
||||
|
||||
public abstract SearchContext queryBoost(float queryBoost);
|
||||
|
||||
public abstract long getOriginNanoTime();
|
||||
|
||||
public abstract ScrollContext scrollContext();
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.elasticsearch.search.internal;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.action.search.SearchType;
|
||||
import org.elasticsearch.common.Strings;
|
||||
|
@ -34,6 +35,7 @@ import org.elasticsearch.search.Scroll;
|
|||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Optional;
|
||||
|
||||
/**
|
||||
* Shard level search request that gets created and consumed on the local node.
|
||||
|
@ -63,6 +65,7 @@ public class ShardSearchLocalRequest implements ShardSearchRequest {
|
|||
private Scroll scroll;
|
||||
private String[] types = Strings.EMPTY_ARRAY;
|
||||
private AliasFilter aliasFilter;
|
||||
private float indexBoost;
|
||||
private SearchSourceBuilder source;
|
||||
private Boolean requestCache;
|
||||
private long nowInMillis;
|
||||
|
@ -73,9 +76,9 @@ public class ShardSearchLocalRequest implements ShardSearchRequest {
|
|||
}
|
||||
|
||||
ShardSearchLocalRequest(SearchRequest searchRequest, ShardId shardId, int numberOfShards,
|
||||
AliasFilter aliasFilter, long nowInMillis) {
|
||||
AliasFilter aliasFilter, float indexBoost, long nowInMillis) {
|
||||
this(shardId, numberOfShards, searchRequest.searchType(),
|
||||
searchRequest.source(), searchRequest.types(), searchRequest.requestCache(), aliasFilter);
|
||||
searchRequest.source(), searchRequest.types(), searchRequest.requestCache(), aliasFilter, indexBoost);
|
||||
this.scroll = searchRequest.scroll();
|
||||
this.nowInMillis = nowInMillis;
|
||||
}
|
||||
|
@ -85,10 +88,11 @@ public class ShardSearchLocalRequest implements ShardSearchRequest {
|
|||
this.nowInMillis = nowInMillis;
|
||||
this.aliasFilter = aliasFilter;
|
||||
this.shardId = shardId;
|
||||
indexBoost = 1.0f;
|
||||
}
|
||||
|
||||
public ShardSearchLocalRequest(ShardId shardId, int numberOfShards, SearchType searchType, SearchSourceBuilder source, String[] types,
|
||||
Boolean requestCache, AliasFilter aliasFilter) {
|
||||
Boolean requestCache, AliasFilter aliasFilter, float indexBoost) {
|
||||
this.shardId = shardId;
|
||||
this.numberOfShards = numberOfShards;
|
||||
this.searchType = searchType;
|
||||
|
@ -96,6 +100,7 @@ public class ShardSearchLocalRequest implements ShardSearchRequest {
|
|||
this.types = types;
|
||||
this.requestCache = requestCache;
|
||||
this.aliasFilter = aliasFilter;
|
||||
this.indexBoost = indexBoost;
|
||||
}
|
||||
|
||||
|
||||
|
@ -134,6 +139,11 @@ public class ShardSearchLocalRequest implements ShardSearchRequest {
|
|||
return aliasFilter.getQueryBuilder();
|
||||
}
|
||||
|
||||
@Override
|
||||
public float indexBoost() {
|
||||
return indexBoost;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long nowInMillis() {
|
||||
return nowInMillis;
|
||||
|
@ -167,6 +177,20 @@ public class ShardSearchLocalRequest implements ShardSearchRequest {
|
|||
source = in.readOptionalWriteable(SearchSourceBuilder::new);
|
||||
types = in.readStringArray();
|
||||
aliasFilter = new AliasFilter(in);
|
||||
if (in.getVersion().onOrAfter(Version.V_5_2_0_UNRELEASED)) {
|
||||
indexBoost = in.readFloat();
|
||||
} else {
|
||||
// Nodes < 5.2.0 doesn't send index boost. Read it from source.
|
||||
if (source != null) {
|
||||
Optional<SearchSourceBuilder.IndexBoost> boost = source.indexBoosts()
|
||||
.stream()
|
||||
.filter(ib -> ib.getIndex().equals(shardId.getIndexName()))
|
||||
.findFirst();
|
||||
indexBoost = boost.isPresent() ? boost.get().getBoost() : 1.0f;
|
||||
} else {
|
||||
indexBoost = 1.0f;
|
||||
}
|
||||
}
|
||||
nowInMillis = in.readVLong();
|
||||
requestCache = in.readOptionalBoolean();
|
||||
}
|
||||
|
@ -181,6 +205,9 @@ public class ShardSearchLocalRequest implements ShardSearchRequest {
|
|||
out.writeOptionalWriteable(source);
|
||||
out.writeStringArray(types);
|
||||
aliasFilter.writeTo(out);
|
||||
if (out.getVersion().onOrAfter(Version.V_5_2_0_UNRELEASED)) {
|
||||
out.writeFloat(indexBoost);
|
||||
}
|
||||
if (!asKey) {
|
||||
out.writeVLong(nowInMillis);
|
||||
}
|
||||
|
|
|
@ -61,6 +61,8 @@ public interface ShardSearchRequest {
|
|||
|
||||
QueryBuilder filteringAliases();
|
||||
|
||||
float indexBoost();
|
||||
|
||||
long nowInMillis();
|
||||
|
||||
Boolean requestCache();
|
||||
|
|
|
@ -54,8 +54,8 @@ public class ShardSearchTransportRequest extends TransportRequest implements Sha
|
|||
}
|
||||
|
||||
public ShardSearchTransportRequest(SearchRequest searchRequest, ShardId shardId, int numberOfShards,
|
||||
AliasFilter aliasFilter, long nowInMillis) {
|
||||
this.shardSearchLocalRequest = new ShardSearchLocalRequest(searchRequest, shardId, numberOfShards, aliasFilter, nowInMillis);
|
||||
AliasFilter aliasFilter, float indexBoost, long nowInMillis) {
|
||||
this.shardSearchLocalRequest = new ShardSearchLocalRequest(searchRequest, shardId, numberOfShards, aliasFilter, indexBoost, nowInMillis);
|
||||
this.originalIndices = new OriginalIndices(searchRequest);
|
||||
}
|
||||
|
||||
|
@ -111,6 +111,11 @@ public class ShardSearchTransportRequest extends TransportRequest implements Sha
|
|||
return shardSearchLocalRequest.filteringAliases();
|
||||
}
|
||||
|
||||
@Override
|
||||
public float indexBoost() {
|
||||
return shardSearchLocalRequest.indexBoost();
|
||||
}
|
||||
|
||||
@Override
|
||||
public long nowInMillis() {
|
||||
return shardSearchLocalRequest.nowInMillis();
|
||||
|
|
|
@ -22,14 +22,13 @@ import org.apache.lucene.search.Query;
|
|||
import org.apache.lucene.util.Counter;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.index.query.ParsedQuery;
|
||||
import org.elasticsearch.search.fetch.StoredFieldsContext;
|
||||
import org.elasticsearch.search.aggregations.SearchContextAggregations;
|
||||
import org.elasticsearch.search.fetch.FetchSearchResult;
|
||||
import org.elasticsearch.search.fetch.StoredFieldsContext;
|
||||
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext;
|
||||
import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
|
||||
import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext;
|
||||
import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight;
|
||||
import org.elasticsearch.search.lookup.SearchLookup;
|
||||
import org.elasticsearch.search.query.QuerySearchResult;
|
||||
import org.elasticsearch.search.rescore.RescoreSearchContext;
|
||||
import org.elasticsearch.search.sort.SortAndFormats;
|
||||
|
@ -85,11 +84,6 @@ public class SubSearchContext extends FilteredSearchContext {
|
|||
throw new UnsupportedOperationException("this context should be read only");
|
||||
}
|
||||
|
||||
@Override
|
||||
public SearchContext queryBoost(float queryBoost) {
|
||||
throw new UnsupportedOperationException("Not supported");
|
||||
}
|
||||
|
||||
@Override
|
||||
public SearchContext scrollContext(ScrollContext scrollContext) {
|
||||
throw new UnsupportedOperationException("Not supported");
|
||||
|
|
|
@ -88,7 +88,7 @@ public class SearchAsyncActionTests extends ESTestCase {
|
|||
lookup.put(primaryNode.getId(), primaryNode);
|
||||
Map<String, AliasFilter> aliasFilters = Collections.singletonMap("_na_", new AliasFilter(null, Strings.EMPTY_ARRAY));
|
||||
AbstractSearchAsyncAction asyncAction = new AbstractSearchAsyncAction<TestSearchPhaseResult>(logger, transportService, lookup::get,
|
||||
aliasFilters, null, request, responseListener, shardsIter, 0, 0, null) {
|
||||
aliasFilters, Collections.emptyMap(), null, request, responseListener, shardsIter, 0, 0, null) {
|
||||
TestSearchResponse response = new TestSearchResponse();
|
||||
|
||||
@Override
|
||||
|
|
|
@ -87,6 +87,11 @@ public class SearchSlowLogTests extends ESSingleNodeTestCase {
|
|||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public float indexBoost() {
|
||||
return 1.0f;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long nowInMillis() {
|
||||
return 0;
|
||||
|
|
|
@ -186,7 +186,7 @@ public class SearchServiceTests extends ESSingleNodeTestCase {
|
|||
try {
|
||||
QuerySearchResultProvider querySearchResultProvider = service.executeQueryPhase(
|
||||
new ShardSearchLocalRequest(indexShard.shardId(), 1, SearchType.DEFAULT,
|
||||
new SearchSourceBuilder(), new String[0], false, new AliasFilter(null, Strings.EMPTY_ARRAY)),
|
||||
new SearchSourceBuilder(), new String[0], false, new AliasFilter(null, Strings.EMPTY_ARRAY), 1.0f),
|
||||
new SearchTask(123L, "", "", "", null));
|
||||
IntArrayList intCursors = new IntArrayList(1);
|
||||
intCursors.add(0);
|
||||
|
@ -221,7 +221,8 @@ public class SearchServiceTests extends ESSingleNodeTestCase {
|
|||
new SearchSourceBuilder(),
|
||||
new String[0],
|
||||
false,
|
||||
new AliasFilter(null, Strings.EMPTY_ARRAY)),
|
||||
new AliasFilter(null, Strings.EMPTY_ARRAY),
|
||||
1.0f),
|
||||
null);
|
||||
// the search context should inherit the default timeout
|
||||
assertThat(contextWithDefaultTimeout.timeout(), equalTo(TimeValue.timeValueSeconds(5)));
|
||||
|
@ -235,7 +236,8 @@ public class SearchServiceTests extends ESSingleNodeTestCase {
|
|||
new SearchSourceBuilder().timeout(TimeValue.timeValueSeconds(seconds)),
|
||||
new String[0],
|
||||
false,
|
||||
new AliasFilter(null, Strings.EMPTY_ARRAY)),
|
||||
new AliasFilter(null, Strings.EMPTY_ARRAY),
|
||||
1.0f),
|
||||
null);
|
||||
// the search context should inherit the query timeout
|
||||
assertThat(context.timeout(), equalTo(TimeValue.timeValueSeconds(seconds)));
|
||||
|
|
|
@ -314,4 +314,78 @@ public class SearchSourceBuilderTests extends AbstractSearchTestCase {
|
|||
assertEquals("query", sourceAsMap.keySet().iterator().next());
|
||||
}
|
||||
}
|
||||
|
||||
public void testParseIndicesBoost() throws IOException {
|
||||
{
|
||||
String restContent = " { \"indices_boost\": {\"foo\": 1.0, \"bar\": 2.0}}";
|
||||
try (XContentParser parser = XContentFactory.xContent(restContent).createParser(restContent)) {
|
||||
SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(createParseContext(parser),
|
||||
searchRequestParsers.aggParsers, searchRequestParsers.suggesters, searchRequestParsers.searchExtParsers);
|
||||
assertEquals(2, searchSourceBuilder.indexBoosts().size());
|
||||
assertEquals(new SearchSourceBuilder.IndexBoost("foo", 1.0f), searchSourceBuilder.indexBoosts().get(0));
|
||||
assertEquals(new SearchSourceBuilder.IndexBoost("bar", 2.0f), searchSourceBuilder.indexBoosts().get(1));
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
String restContent = "{" +
|
||||
" \"indices_boost\" : [\n" +
|
||||
" { \"foo\" : 1.0 },\n" +
|
||||
" { \"bar\" : 2.0 },\n" +
|
||||
" { \"baz\" : 3.0 }\n" +
|
||||
" ]}";
|
||||
try (XContentParser parser = XContentFactory.xContent(restContent).createParser(restContent)) {
|
||||
SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(createParseContext(parser),
|
||||
searchRequestParsers.aggParsers, searchRequestParsers.suggesters, searchRequestParsers.searchExtParsers);
|
||||
assertEquals(3, searchSourceBuilder.indexBoosts().size());
|
||||
assertEquals(new SearchSourceBuilder.IndexBoost("foo", 1.0f), searchSourceBuilder.indexBoosts().get(0));
|
||||
assertEquals(new SearchSourceBuilder.IndexBoost("bar", 2.0f), searchSourceBuilder.indexBoosts().get(1));
|
||||
assertEquals(new SearchSourceBuilder.IndexBoost("baz", 3.0f), searchSourceBuilder.indexBoosts().get(2));
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
String restContent = "{" +
|
||||
" \"indices_boost\" : [\n" +
|
||||
" { \"foo\" : 1.0, \"bar\": 2.0}\n" + // invalid format
|
||||
" ]}";
|
||||
|
||||
assertIndicesBoostParseErrorMessage(restContent, "Expected [END_OBJECT] in [indices_boost] but found [FIELD_NAME]");
|
||||
}
|
||||
|
||||
{
|
||||
String restContent = "{" +
|
||||
" \"indices_boost\" : [\n" +
|
||||
" {}\n" + // invalid format
|
||||
" ]}";
|
||||
|
||||
assertIndicesBoostParseErrorMessage(restContent, "Expected [FIELD_NAME] in [indices_boost] but found [END_OBJECT]");
|
||||
}
|
||||
|
||||
{
|
||||
String restContent = "{" +
|
||||
" \"indices_boost\" : [\n" +
|
||||
" { \"foo\" : \"bar\"}\n" + // invalid format
|
||||
" ]}";
|
||||
|
||||
assertIndicesBoostParseErrorMessage(restContent, "Expected [VALUE_NUMBER] in [indices_boost] but found [VALUE_STRING]");
|
||||
}
|
||||
|
||||
{
|
||||
String restContent = "{" +
|
||||
" \"indices_boost\" : [\n" +
|
||||
" { \"foo\" : {\"bar\": 1}}\n" + // invalid format
|
||||
" ]}";
|
||||
|
||||
assertIndicesBoostParseErrorMessage(restContent, "Expected [VALUE_NUMBER] in [indices_boost] but found [START_OBJECT]");
|
||||
}
|
||||
}
|
||||
|
||||
private void assertIndicesBoostParseErrorMessage(String restContent, String expectedErrorMessage) throws IOException {
|
||||
try (XContentParser parser = XContentFactory.xContent(restContent).createParser(restContent)) {
|
||||
ParsingException e = expectThrows(ParsingException.class, () -> SearchSourceBuilder.fromXContent(createParseContext(parser),
|
||||
searchRequestParsers.aggParsers, searchRequestParsers.suggesters, searchRequestParsers.searchExtParsers));
|
||||
assertEquals(expectedErrorMessage, e.getMessage());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,107 +0,0 @@
|
|||
/*
|
||||
* Licensed to Elasticsearch under one or more contributor
|
||||
* license agreements. See the NOTICE file distributed with
|
||||
* this work for additional information regarding copyright
|
||||
* ownership. Elasticsearch licenses this file to you under
|
||||
* the Apache License, Version 2.0 (the "License"); you may
|
||||
* not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.search.indicesboost;
|
||||
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.search.SearchType;
|
||||
import org.elasticsearch.test.ESIntegTestCase;
|
||||
|
||||
import static org.elasticsearch.client.Requests.indexRequest;
|
||||
import static org.elasticsearch.client.Requests.searchRequest;
|
||||
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
|
||||
import static org.elasticsearch.index.query.QueryBuilders.termQuery;
|
||||
import static org.elasticsearch.search.builder.SearchSourceBuilder.searchSource;
|
||||
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
|
||||
public class SimpleIndicesBoostSearchIT extends ESIntegTestCase {
|
||||
public void testIndicesBoost() throws Exception {
|
||||
assertHitCount(client().prepareSearch().setQuery(termQuery("test", "value")).get(), 0);
|
||||
|
||||
try {
|
||||
client().prepareSearch("test").setQuery(termQuery("test", "value")).execute().actionGet();
|
||||
fail("should fail");
|
||||
} catch (Exception e) {
|
||||
// ignore, no indices
|
||||
}
|
||||
|
||||
createIndex("test1", "test2");
|
||||
ensureGreen();
|
||||
client().index(indexRequest("test1").type("type1").id("1")
|
||||
.source(jsonBuilder().startObject().field("test", "value check").endObject())).actionGet();
|
||||
client().index(indexRequest("test2").type("type1").id("1")
|
||||
.source(jsonBuilder().startObject().field("test", "value beck").endObject())).actionGet();
|
||||
refresh();
|
||||
|
||||
float indexBoost = 1.1f;
|
||||
|
||||
logger.info("--- QUERY_THEN_FETCH");
|
||||
|
||||
logger.info("Query with test1 boosted");
|
||||
SearchResponse response = client().search(searchRequest()
|
||||
.searchType(SearchType.QUERY_THEN_FETCH)
|
||||
.source(searchSource().explain(true).indexBoost("test1", indexBoost).query(termQuery("test", "value")))
|
||||
).actionGet();
|
||||
|
||||
assertThat(response.getHits().totalHits(), equalTo(2L));
|
||||
logger.info("Hit[0] {} Explanation {}", response.getHits().getAt(0).index(), response.getHits().getAt(0).explanation());
|
||||
logger.info("Hit[1] {} Explanation {}", response.getHits().getAt(1).index(), response.getHits().getAt(1).explanation());
|
||||
assertThat(response.getHits().getAt(0).index(), equalTo("test1"));
|
||||
assertThat(response.getHits().getAt(1).index(), equalTo("test2"));
|
||||
|
||||
logger.info("Query with test2 boosted");
|
||||
response = client().search(searchRequest()
|
||||
.searchType(SearchType.QUERY_THEN_FETCH)
|
||||
.source(searchSource().explain(true).indexBoost("test2", indexBoost).query(termQuery("test", "value")))
|
||||
).actionGet();
|
||||
|
||||
assertThat(response.getHits().totalHits(), equalTo(2L));
|
||||
logger.info("Hit[0] {} Explanation {}", response.getHits().getAt(0).index(), response.getHits().getAt(0).explanation());
|
||||
logger.info("Hit[1] {} Explanation {}", response.getHits().getAt(1).index(), response.getHits().getAt(1).explanation());
|
||||
assertThat(response.getHits().getAt(0).index(), equalTo("test2"));
|
||||
assertThat(response.getHits().getAt(1).index(), equalTo("test1"));
|
||||
|
||||
logger.info("--- DFS_QUERY_THEN_FETCH");
|
||||
|
||||
logger.info("Query with test1 boosted");
|
||||
response = client().search(searchRequest()
|
||||
.searchType(SearchType.DFS_QUERY_THEN_FETCH)
|
||||
.source(searchSource().explain(true).indexBoost("test1", indexBoost).query(termQuery("test", "value")))
|
||||
).actionGet();
|
||||
|
||||
assertThat(response.getHits().totalHits(), equalTo(2L));
|
||||
logger.info("Hit[0] {} Explanation {}", response.getHits().getAt(0).index(), response.getHits().getAt(0).explanation());
|
||||
logger.info("Hit[1] {} Explanation {}", response.getHits().getAt(1).index(), response.getHits().getAt(1).explanation());
|
||||
assertThat(response.getHits().getAt(0).index(), equalTo("test1"));
|
||||
assertThat(response.getHits().getAt(1).index(), equalTo("test2"));
|
||||
|
||||
logger.info("Query with test2 boosted");
|
||||
response = client().search(searchRequest()
|
||||
.searchType(SearchType.DFS_QUERY_THEN_FETCH)
|
||||
.source(searchSource().explain(true).indexBoost("test2", indexBoost).query(termQuery("test", "value")))
|
||||
).actionGet();
|
||||
|
||||
assertThat(response.getHits().totalHits(), equalTo(2L));
|
||||
logger.info("Hit[0] {} Explanation {}", response.getHits().getAt(0).index(), response.getHits().getAt(0).explanation());
|
||||
logger.info("Hit[1] {} Explanation {}", response.getHits().getAt(1).index(), response.getHits().getAt(1).explanation());
|
||||
assertThat(response.getHits().getAt(0).index(), equalTo("test2"));
|
||||
assertThat(response.getHits().getAt(1).index(), equalTo("test1"));
|
||||
}
|
||||
}
|
|
@ -81,6 +81,7 @@ public class ShardSearchTransportRequestTests extends AbstractSearchTestCase {
|
|||
assertEquals(deserializedRequest.cacheKey(), shardSearchTransportRequest.cacheKey());
|
||||
assertNotSame(deserializedRequest, shardSearchTransportRequest);
|
||||
assertEquals(deserializedRequest.filteringAliases(), shardSearchTransportRequest.filteringAliases());
|
||||
assertEquals(deserializedRequest.indexBoost(), shardSearchTransportRequest.indexBoost(), 0.0f);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -96,7 +97,7 @@ public class ShardSearchTransportRequestTests extends AbstractSearchTestCase {
|
|||
filteringAliases = new AliasFilter(null, Strings.EMPTY_ARRAY);
|
||||
}
|
||||
return new ShardSearchTransportRequest(searchRequest, shardId,
|
||||
randomIntBetween(1, 100), filteringAliases, Math.abs(randomLong()));
|
||||
randomIntBetween(1, 100), filteringAliases, randomBoolean() ? 1.0f : randomFloat(), Math.abs(randomLong()));
|
||||
}
|
||||
|
||||
public void testFilteringAliases() throws Exception {
|
||||
|
@ -212,4 +213,24 @@ public class ShardSearchTransportRequestTests extends AbstractSearchTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
// BWC test for changes from #21393
|
||||
public void testSerialize50RequestForIndexBoost() throws IOException {
|
||||
BytesArray requestBytes = new BytesArray(Base64.getDecoder()
|
||||
// this is a base64 encoded request generated with the same input
|
||||
.decode("AAZpbmRleDEWTjEyM2trbHFUT21XZDY1Z2VDYlo5ZwABBAABAAIA/wD/////DwABBmluZGV4MUAAAAAAAAAAAP////8PAAAAAAAAAgAAAA" +
|
||||
"AAAPa/q8mOKwIAJg=="));
|
||||
|
||||
try (StreamInput in = new NamedWriteableAwareStreamInput(requestBytes.streamInput(), namedWriteableRegistry)) {
|
||||
in.setVersion(Version.V_5_0_0);
|
||||
ShardSearchTransportRequest readRequest = new ShardSearchTransportRequest();
|
||||
readRequest.readFrom(in);
|
||||
assertEquals(0, in.available());
|
||||
assertEquals(2.0f, readRequest.indexBoost(), 0);
|
||||
|
||||
BytesStreamOutput output = new BytesStreamOutput();
|
||||
output.setVersion(Version.V_5_0_0);
|
||||
readRequest.writeTo(output);
|
||||
assertEquals(output.bytes().toBytesRef(), requestBytes.toBytesRef());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -368,3 +368,18 @@ buildRestTests.setups['range_index'] = '''
|
|||
body: |
|
||||
{"index":{"_id": 1}}
|
||||
{"expected_attendees": {"gte": 10, "lte": 20}, "time_frame": {"gte": "2015-10-31 12:00:00", "lte": "2015-11-01"}}'''
|
||||
|
||||
// Used by index boost doc
|
||||
buildRestTests.setups['index_boost'] = '''
|
||||
- do:
|
||||
indices.create:
|
||||
index: index1
|
||||
- do:
|
||||
indices.create:
|
||||
index: index2
|
||||
|
||||
- do:
|
||||
indices.put_alias:
|
||||
index: index1
|
||||
name: alias1
|
||||
'''
|
||||
|
|
|
@ -6,6 +6,7 @@ across more than one indices. This is very handy when hits coming from
|
|||
one index matter more than hits coming from another index (think social
|
||||
graph where each user has an index).
|
||||
|
||||
deprecated[5.2.0, This format is deprecated. Please use array format instead.]
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
GET /_search
|
||||
|
@ -17,3 +18,23 @@ GET /_search
|
|||
}
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[setup:index_boost warning:Object format in indices_boost is deprecated, please use array format instead]
|
||||
|
||||
You can also specify it as an array to control the order of boosts.
|
||||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
GET /_search
|
||||
{
|
||||
"indices_boost" : [
|
||||
{ "alias1" : 1.4 },
|
||||
{ "index*" : 1.3 }
|
||||
]
|
||||
}
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[continued]
|
||||
|
||||
This is important when you use aliases or wildcard expression.
|
||||
If multiple matches are found, the first match will be used.
|
||||
For example, if an index is included in both `alias1` and `index*`, boost value of `1.4` is applied.
|
|
@ -0,0 +1,196 @@
|
|||
setup:
|
||||
- do:
|
||||
indices.create:
|
||||
index: test_1
|
||||
- do:
|
||||
indices.create:
|
||||
index: test_2
|
||||
|
||||
- do:
|
||||
indices.put_alias:
|
||||
index: test_1
|
||||
name: alias_1
|
||||
|
||||
- do:
|
||||
indices.put_alias:
|
||||
index: test_2
|
||||
name: alias_2
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: test_1
|
||||
type: test
|
||||
id: 1
|
||||
body: { foo: bar }
|
||||
|
||||
- do:
|
||||
index:
|
||||
index: test_2
|
||||
type: test
|
||||
id: 1
|
||||
body: { foo: bar }
|
||||
|
||||
- do:
|
||||
indices.refresh:
|
||||
index: [test_1, test_2]
|
||||
|
||||
---
|
||||
"Indices boost using object":
|
||||
- skip:
|
||||
version: " - 5.1.99"
|
||||
reason: deprecation was added in 5.2.0
|
||||
features: "warnings"
|
||||
|
||||
- do:
|
||||
warnings:
|
||||
- 'Object format in indices_boost is deprecated, please use array format instead'
|
||||
search:
|
||||
index: _all
|
||||
body:
|
||||
indices_boost: {test_1: 2.0, test_2: 1.0}
|
||||
|
||||
- match: { hits.total: 2 }
|
||||
- match: { hits.hits.0._index: test_1 }
|
||||
- match: { hits.hits.1._index: test_2 }
|
||||
|
||||
- do:
|
||||
warnings:
|
||||
- 'Object format in indices_boost is deprecated, please use array format instead'
|
||||
search:
|
||||
index: _all
|
||||
body:
|
||||
indices_boost: {test_1: 1.0, test_2: 2.0}
|
||||
|
||||
- match: { hits.total: 2 }
|
||||
- match: { hits.hits.0._index: test_2 }
|
||||
- match: { hits.hits.1._index: test_1 }
|
||||
|
||||
---
|
||||
"Indices boost using array":
|
||||
- skip:
|
||||
version: " - 5.1.99"
|
||||
reason: array format was added in 5.2.0
|
||||
|
||||
- do:
|
||||
search:
|
||||
index: _all
|
||||
body:
|
||||
indices_boost: [{test_1: 2.0}, {test_2: 1.0}]
|
||||
|
||||
- match: { hits.total: 2 }
|
||||
- match: { hits.hits.0._index: test_1 }
|
||||
- match: { hits.hits.1._index: test_2 }
|
||||
|
||||
- do:
|
||||
search:
|
||||
index: _all
|
||||
body:
|
||||
indices_boost: [{test_1: 1.0}, {test_2: 2.0}]
|
||||
|
||||
- match: { hits.total: 2 }
|
||||
- match: { hits.hits.0._index: test_2 }
|
||||
- match: { hits.hits.1._index: test_1 }
|
||||
|
||||
---
|
||||
"Indices boost using array with alias":
|
||||
- skip:
|
||||
version: " - 5.1.99"
|
||||
reason: array format was added in 5.2.0
|
||||
|
||||
- do:
|
||||
search:
|
||||
index: _all
|
||||
body:
|
||||
indices_boost: [{alias_1: 2.0}]
|
||||
|
||||
- match: { hits.total: 2}
|
||||
- match: { hits.hits.0._index: test_1 }
|
||||
- match: { hits.hits.1._index: test_2 }
|
||||
|
||||
- do:
|
||||
search:
|
||||
index: _all
|
||||
body:
|
||||
indices_boost: [{alias_2: 2.0}]
|
||||
|
||||
- match: { hits.total: 2}
|
||||
- match: { hits.hits.0._index: test_2 }
|
||||
- match: { hits.hits.1._index: test_1 }
|
||||
|
||||
---
|
||||
"Indices boost using array with wildcard":
|
||||
- skip:
|
||||
version: " - 5.1.99"
|
||||
reason: array format was added in 5.2.0
|
||||
|
||||
- do:
|
||||
search:
|
||||
index: _all
|
||||
body:
|
||||
indices_boost: [{"*_1": 2.0}]
|
||||
|
||||
- match: { hits.total: 2}
|
||||
- match: { hits.hits.0._index: test_1 }
|
||||
- match: { hits.hits.1._index: test_2 }
|
||||
|
||||
- do:
|
||||
search:
|
||||
index: _all
|
||||
body:
|
||||
indices_boost: [{"*_2": 2.0}]
|
||||
|
||||
- match: { hits.total: 2}
|
||||
- match: { hits.hits.0._index: test_2 }
|
||||
- match: { hits.hits.1._index: test_1 }
|
||||
|
||||
---
|
||||
"Indices boost using array multiple match":
|
||||
- skip:
|
||||
version: " - 5.1.99"
|
||||
reason: array format was added in 5.2.0
|
||||
|
||||
- do:
|
||||
search:
|
||||
index: _all
|
||||
body:
|
||||
# First match (3.0) is used for test_1
|
||||
indices_boost: [{"*_1": 3.0}, {alias_1: 1.0}, {test_2: 2.0}]
|
||||
|
||||
- match: { hits.total: 2}
|
||||
- match: { hits.hits.0._index: test_1 }
|
||||
- match: { hits.hits.1._index: test_2 }
|
||||
|
||||
- do:
|
||||
search:
|
||||
index: _all
|
||||
body:
|
||||
# First match (1.0) is used for test_1
|
||||
indices_boost: [{"*_1": 1.0}, {test_2: 2.0}, {alias_1: 3.0}]
|
||||
|
||||
- match: { hits.total: 2}
|
||||
- match: { hits.hits.0._index: test_2 }
|
||||
- match: { hits.hits.1._index: test_1 }
|
||||
|
||||
---
|
||||
"Indices boost for nonexistent index/alias":
|
||||
- skip:
|
||||
version: " - 5.1.99"
|
||||
reason: array format was added in 5.2.0
|
||||
|
||||
- do:
|
||||
catch: /no such index/
|
||||
search:
|
||||
index: _all
|
||||
body:
|
||||
indices_boost: [{nonexistent: 2.0}, {test_1: 1.0}, {test_2: 2.0}]
|
||||
|
||||
- do:
|
||||
search:
|
||||
index: _all
|
||||
ignore_unavailable: true
|
||||
body:
|
||||
indices_boost: [{nonexistent: 2.0}, {test_1: 1.0}, {test_2: 2.0}]
|
||||
|
||||
- match: { hits.total: 2}
|
||||
- match: { hits.hits.0._index: test_2 }
|
||||
- match: { hits.hits.1._index: test_1 }
|
|
@ -156,11 +156,6 @@ public class TestSearchContext extends SearchContext {
|
|||
return 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public SearchContext queryBoost(float queryBoost) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getOriginNanoTime() {
|
||||
return originNanoTime;
|
||||
|
|
Loading…
Reference in New Issue