Resolve index names in indices_boost

This change allows specifying alias/wildcard expression in indices_boost.
And added another format for specifying indices_boost. It accepts array of index name and boost pair.
If an index is included in multiple aliases/wildcard expressions, the first match will be used.
With new format, old format is marked as deprecated.

Closes #4756
This commit is contained in:
Masaru Hasegawa 2016-11-07 17:20:06 +09:00
parent 20ff703e07
commit 3df2a086d4
25 changed files with 555 additions and 233 deletions

View File

@ -50,6 +50,7 @@ import java.util.function.Function;
abstract class AbstractSearchAsyncAction<FirstResult extends SearchPhaseResult> extends AbstractAsyncAction { abstract class AbstractSearchAsyncAction<FirstResult extends SearchPhaseResult> extends AbstractAsyncAction {
private static final float DEFAULT_INDEX_BOOST = 1.0f;
protected final Logger logger; protected final Logger logger;
protected final SearchTransportService searchTransportService; protected final SearchTransportService searchTransportService;
@ -66,6 +67,7 @@ abstract class AbstractSearchAsyncAction<FirstResult extends SearchPhaseResult>
private final AtomicInteger totalOps = new AtomicInteger(); private final AtomicInteger totalOps = new AtomicInteger();
protected final AtomicArray<FirstResult> firstResults; protected final AtomicArray<FirstResult> firstResults;
private final Map<String, AliasFilter> aliasFilter; private final Map<String, AliasFilter> aliasFilter;
private final Map<String, Float> concreteIndexBoosts;
private final long clusterStateVersion; private final long clusterStateVersion;
private volatile AtomicArray<ShardSearchFailure> shardFailures; private volatile AtomicArray<ShardSearchFailure> shardFailures;
private final Object shardFailuresMutex = new Object(); private final Object shardFailuresMutex = new Object();
@ -73,9 +75,9 @@ abstract class AbstractSearchAsyncAction<FirstResult extends SearchPhaseResult>
protected AbstractSearchAsyncAction(Logger logger, SearchTransportService searchTransportService, protected AbstractSearchAsyncAction(Logger logger, SearchTransportService searchTransportService,
Function<String, DiscoveryNode> nodeIdToDiscoveryNode, Function<String, DiscoveryNode> nodeIdToDiscoveryNode,
Map<String, AliasFilter> aliasFilter, Executor executor, SearchRequest request, Map<String, AliasFilter> aliasFilter, Map<String, Float> concreteIndexBoosts,
ActionListener<SearchResponse> listener, GroupShardsIterator shardsIts, long startTime, Executor executor, SearchRequest request, ActionListener<SearchResponse> listener,
long clusterStateVersion, SearchTask task) { GroupShardsIterator shardsIts, long startTime, long clusterStateVersion, SearchTask task) {
super(startTime); super(startTime);
this.logger = logger; this.logger = logger;
this.searchTransportService = searchTransportService; this.searchTransportService = searchTransportService;
@ -91,6 +93,7 @@ abstract class AbstractSearchAsyncAction<FirstResult extends SearchPhaseResult>
expectedTotalOps = shardsIts.totalSizeWith1ForEmpty(); expectedTotalOps = shardsIts.totalSizeWith1ForEmpty();
firstResults = new AtomicArray<>(shardsIts.size()); firstResults = new AtomicArray<>(shardsIts.size());
this.aliasFilter = aliasFilter; this.aliasFilter = aliasFilter;
this.concreteIndexBoosts = concreteIndexBoosts;
} }
public void start() { public void start() {
@ -125,8 +128,10 @@ abstract class AbstractSearchAsyncAction<FirstResult extends SearchPhaseResult>
} else { } else {
AliasFilter filter = this.aliasFilter.get(shard.index().getUUID()); AliasFilter filter = this.aliasFilter.get(shard.index().getUUID());
assert filter != null; assert filter != null;
float indexBoost = concreteIndexBoosts.getOrDefault(shard.index().getUUID(), DEFAULT_INDEX_BOOST);
ShardSearchTransportRequest transportRequest = new ShardSearchTransportRequest(request, shardIt.shardId(), shardsIts.size(), ShardSearchTransportRequest transportRequest = new ShardSearchTransportRequest(request, shardIt.shardId(), shardsIts.size(),
filter, startTime()); filter, indexBoost, startTime());
sendExecuteFirstPhase(node, transportRequest , new ActionListener<FirstResult>() { sendExecuteFirstPhase(node, transportRequest , new ActionListener<FirstResult>() {
@Override @Override
public void onResponse(FirstResult result) { public void onResponse(FirstResult result) {

View File

@ -47,10 +47,11 @@ class SearchDfsQueryAndFetchAsyncAction extends AbstractSearchAsyncAction<DfsSea
private final SearchPhaseController searchPhaseController; private final SearchPhaseController searchPhaseController;
SearchDfsQueryAndFetchAsyncAction(Logger logger, SearchTransportService searchTransportService, SearchDfsQueryAndFetchAsyncAction(Logger logger, SearchTransportService searchTransportService,
Function<String, DiscoveryNode> nodeIdToDiscoveryNode, Function<String, DiscoveryNode> nodeIdToDiscoveryNode,
Map<String, AliasFilter> aliasFilter, SearchPhaseController searchPhaseController, Map<String, AliasFilter> aliasFilter, Map<String, Float> concreteIndexBoosts,
Executor executor, SearchRequest request, ActionListener<SearchResponse> listener, SearchPhaseController searchPhaseController, Executor executor, SearchRequest request,
GroupShardsIterator shardsIts, long startTime, long clusterStateVersion, SearchTask task) { ActionListener<SearchResponse> listener, GroupShardsIterator shardsIts,
super(logger, searchTransportService, nodeIdToDiscoveryNode, aliasFilter, executor, long startTime, long clusterStateVersion, SearchTask task) {
super(logger, searchTransportService, nodeIdToDiscoveryNode, aliasFilter, concreteIndexBoosts, executor,
request, listener, shardsIts, startTime, clusterStateVersion, task); request, listener, shardsIts, startTime, clusterStateVersion, task);
this.searchPhaseController = searchPhaseController; this.searchPhaseController = searchPhaseController;
queryFetchResults = new AtomicArray<>(firstResults.length()); queryFetchResults = new AtomicArray<>(firstResults.length());

View File

@ -55,11 +55,11 @@ class SearchDfsQueryThenFetchAsyncAction extends AbstractSearchAsyncAction<DfsSe
SearchDfsQueryThenFetchAsyncAction(Logger logger, SearchTransportService searchTransportService, SearchDfsQueryThenFetchAsyncAction(Logger logger, SearchTransportService searchTransportService,
Function<String, DiscoveryNode> nodeIdToDiscoveryNode, Function<String, DiscoveryNode> nodeIdToDiscoveryNode,
Map<String, AliasFilter> aliasFilter, SearchPhaseController searchPhaseController, Map<String, AliasFilter> aliasFilter, Map<String, Float> concreteIndexBoosts,
Executor executor, SearchRequest request, ActionListener<SearchResponse> listener, SearchPhaseController searchPhaseController, Executor executor, SearchRequest request,
GroupShardsIterator shardsIts, long startTime, long clusterStateVersion, ActionListener<SearchResponse> listener, GroupShardsIterator shardsIts, long startTime,
SearchTask task) { long clusterStateVersion, SearchTask task) {
super(logger, searchTransportService, nodeIdToDiscoveryNode, aliasFilter, executor, super(logger, searchTransportService, nodeIdToDiscoveryNode, aliasFilter, concreteIndexBoosts, executor,
request, listener, shardsIts, startTime, clusterStateVersion, task); request, listener, shardsIts, startTime, clusterStateVersion, task);
this.searchPhaseController = searchPhaseController; this.searchPhaseController = searchPhaseController;
queryResults = new AtomicArray<>(firstResults.length()); queryResults = new AtomicArray<>(firstResults.length());

View File

@ -40,12 +40,12 @@ class SearchQueryAndFetchAsyncAction extends AbstractSearchAsyncAction<QueryFetc
SearchQueryAndFetchAsyncAction(Logger logger, SearchTransportService searchTransportService, SearchQueryAndFetchAsyncAction(Logger logger, SearchTransportService searchTransportService,
Function<String, DiscoveryNode> nodeIdToDiscoveryNode, Function<String, DiscoveryNode> nodeIdToDiscoveryNode,
Map<String, AliasFilter> aliasFilter, Map<String, AliasFilter> aliasFilter, Map<String, Float> concreteIndexBoosts,
SearchPhaseController searchPhaseController, Executor executor, SearchPhaseController searchPhaseController, Executor executor,
SearchRequest request, ActionListener<SearchResponse> listener, SearchRequest request, ActionListener<SearchResponse> listener,
GroupShardsIterator shardsIts, long startTime, long clusterStateVersion, GroupShardsIterator shardsIts, long startTime, long clusterStateVersion,
SearchTask task) { SearchTask task) {
super(logger, searchTransportService, nodeIdToDiscoveryNode, aliasFilter, executor, super(logger, searchTransportService, nodeIdToDiscoveryNode, aliasFilter, concreteIndexBoosts, executor,
request, listener, shardsIts, startTime, clusterStateVersion, task); request, listener, shardsIts, startTime, clusterStateVersion, task);
this.searchPhaseController = searchPhaseController; this.searchPhaseController = searchPhaseController;

View File

@ -50,13 +50,13 @@ class SearchQueryThenFetchAsyncAction extends AbstractSearchAsyncAction<QuerySea
private final SearchPhaseController searchPhaseController; private final SearchPhaseController searchPhaseController;
SearchQueryThenFetchAsyncAction(Logger logger, SearchTransportService searchTransportService, SearchQueryThenFetchAsyncAction(Logger logger, SearchTransportService searchTransportService,
Function<String, DiscoveryNode> nodeIdToDiscoveryNode, Map<String, Function<String, DiscoveryNode> nodeIdToDiscoveryNode,
AliasFilter> aliasFilter, Map<String, AliasFilter> aliasFilter, Map<String, Float> concreteIndexBoosts,
SearchPhaseController searchPhaseController, Executor executor, SearchPhaseController searchPhaseController, Executor executor,
SearchRequest request, ActionListener<SearchResponse> listener, SearchRequest request, ActionListener<SearchResponse> listener,
GroupShardsIterator shardsIts, long startTime, long clusterStateVersion, GroupShardsIterator shardsIts, long startTime, long clusterStateVersion,
SearchTask task) { SearchTask task) {
super(logger, searchTransportService, nodeIdToDiscoveryNode, aliasFilter, executor, request, listener, super(logger, searchTransportService, nodeIdToDiscoveryNode, aliasFilter, concreteIndexBoosts, executor, request, listener,
shardsIts, startTime, clusterStateVersion, task); shardsIts, startTime, clusterStateVersion, task);
this.searchPhaseController = searchPhaseController; this.searchPhaseController = searchPhaseController;
fetchResults = new AtomicArray<>(firstResults.length()); fetchResults = new AtomicArray<>(firstResults.length());

View File

@ -34,6 +34,7 @@ import org.elasticsearch.common.settings.Setting.Property;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.Index; import org.elasticsearch.index.Index;
import org.elasticsearch.search.SearchService; import org.elasticsearch.search.SearchService;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.search.internal.AliasFilter;
import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.Task;
import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool;
@ -84,6 +85,29 @@ public class TransportSearchAction extends HandledTransportAction<SearchRequest,
return aliasFilterMap; return aliasFilterMap;
} }
private Map<String, Float> resolveIndexBoosts(SearchRequest searchRequest, ClusterState clusterState) {
if (searchRequest.source() == null) {
return Collections.emptyMap();
}
SearchSourceBuilder source = searchRequest.source();
if (source.indexBoosts() == null) {
return Collections.emptyMap();
}
Map<String, Float> concreteIndexBoosts = new HashMap<>();
for (SearchSourceBuilder.IndexBoost ib : source.indexBoosts()) {
Index[] concreteIndices =
indexNameExpressionResolver.concreteIndices(clusterState, searchRequest.indicesOptions(), ib.getIndex());
for (Index concreteIndex : concreteIndices) {
concreteIndexBoosts.putIfAbsent(concreteIndex.getUUID(), ib.getBoost());
}
}
return Collections.unmodifiableMap(concreteIndexBoosts);
}
@Override @Override
protected void doExecute(Task task, SearchRequest searchRequest, ActionListener<SearchResponse> listener) { protected void doExecute(Task task, SearchRequest searchRequest, ActionListener<SearchResponse> listener) {
// pure paranoia if time goes backwards we are at least positive // pure paranoia if time goes backwards we are at least positive
@ -107,6 +131,8 @@ public class TransportSearchAction extends HandledTransportAction<SearchRequest,
searchRequest.preference()); searchRequest.preference());
failIfOverShardCountLimit(clusterService, shardIterators.size()); failIfOverShardCountLimit(clusterService, shardIterators.size());
Map<String, Float> concreteIndexBoosts = resolveIndexBoosts(searchRequest, clusterState);
// optimize search type for cases where there is only one shard group to search on // optimize search type for cases where there is only one shard group to search on
if (shardIterators.size() == 1) { if (shardIterators.size() == 1) {
// if we only have one group, then we always want Q_A_F, no need for DFS, and no need to do THEN since we hit one shard // if we only have one group, then we always want Q_A_F, no need for DFS, and no need to do THEN since we hit one shard
@ -125,7 +151,7 @@ public class TransportSearchAction extends HandledTransportAction<SearchRequest,
} }
searchAsyncAction((SearchTask)task, searchRequest, shardIterators, startTimeInMillis, clusterState, searchAsyncAction((SearchTask)task, searchRequest, shardIterators, startTimeInMillis, clusterState,
Collections.unmodifiableMap(aliasFilter), listener).start(); Collections.unmodifiableMap(aliasFilter), concreteIndexBoosts, listener).start();
} }
@Override @Override
@ -135,6 +161,7 @@ public class TransportSearchAction extends HandledTransportAction<SearchRequest,
private AbstractSearchAsyncAction searchAsyncAction(SearchTask task, SearchRequest searchRequest, GroupShardsIterator shardIterators, private AbstractSearchAsyncAction searchAsyncAction(SearchTask task, SearchRequest searchRequest, GroupShardsIterator shardIterators,
long startTime, ClusterState state, Map<String, AliasFilter> aliasFilter, long startTime, ClusterState state, Map<String, AliasFilter> aliasFilter,
Map<String, Float> concreteIndexBoosts,
ActionListener<SearchResponse> listener) { ActionListener<SearchResponse> listener) {
final Function<String, DiscoveryNode> nodesLookup = state.nodes()::get; final Function<String, DiscoveryNode> nodesLookup = state.nodes()::get;
final long clusterStateVersion = state.version(); final long clusterStateVersion = state.version();
@ -143,22 +170,22 @@ public class TransportSearchAction extends HandledTransportAction<SearchRequest,
switch(searchRequest.searchType()) { switch(searchRequest.searchType()) {
case DFS_QUERY_THEN_FETCH: case DFS_QUERY_THEN_FETCH:
searchAsyncAction = new SearchDfsQueryThenFetchAsyncAction(logger, searchTransportService, nodesLookup, searchAsyncAction = new SearchDfsQueryThenFetchAsyncAction(logger, searchTransportService, nodesLookup,
aliasFilter, searchPhaseController, executor, searchRequest, listener, shardIterators, startTime, aliasFilter, concreteIndexBoosts, searchPhaseController, executor, searchRequest, listener, shardIterators, startTime,
clusterStateVersion, task); clusterStateVersion, task);
break; break;
case QUERY_THEN_FETCH: case QUERY_THEN_FETCH:
searchAsyncAction = new SearchQueryThenFetchAsyncAction(logger, searchTransportService, nodesLookup, searchAsyncAction = new SearchQueryThenFetchAsyncAction(logger, searchTransportService, nodesLookup,
aliasFilter, searchPhaseController, executor, searchRequest, listener, shardIterators, startTime, aliasFilter, concreteIndexBoosts, searchPhaseController, executor, searchRequest, listener, shardIterators, startTime,
clusterStateVersion, task); clusterStateVersion, task);
break; break;
case DFS_QUERY_AND_FETCH: case DFS_QUERY_AND_FETCH:
searchAsyncAction = new SearchDfsQueryAndFetchAsyncAction(logger, searchTransportService, nodesLookup, searchAsyncAction = new SearchDfsQueryAndFetchAsyncAction(logger, searchTransportService, nodesLookup,
aliasFilter, searchPhaseController, executor, searchRequest, listener, shardIterators, startTime, aliasFilter, concreteIndexBoosts, searchPhaseController, executor, searchRequest, listener, shardIterators, startTime,
clusterStateVersion, task); clusterStateVersion, task);
break; break;
case QUERY_AND_FETCH: case QUERY_AND_FETCH:
searchAsyncAction = new SearchQueryAndFetchAsyncAction(logger, searchTransportService, nodesLookup, searchAsyncAction = new SearchQueryAndFetchAsyncAction(logger, searchTransportService, nodesLookup,
aliasFilter, searchPhaseController, executor, searchRequest, listener, shardIterators, startTime, aliasFilter, concreteIndexBoosts, searchPhaseController, executor, searchRequest, listener, shardIterators, startTime,
clusterStateVersion, task); clusterStateVersion, task);
break; break;
default: default:
@ -177,5 +204,4 @@ public class TransportSearchAction extends HandledTransportAction<SearchRequest,
+ "] to a greater value if you really want to query that many shards at the same time."); + "] to a greater value if you really want to query that many shards at the same time.");
} }
} }
} }

View File

@ -96,7 +96,7 @@ final class DefaultSearchContext extends SearchContext {
private final DfsSearchResult dfsResult; private final DfsSearchResult dfsResult;
private final QuerySearchResult queryResult; private final QuerySearchResult queryResult;
private final FetchSearchResult fetchResult; private final FetchSearchResult fetchResult;
private float queryBoost = 1.0f; private final float queryBoost;
private TimeValue timeout; private TimeValue timeout;
// terminate after count // terminate after count
private int terminateAfter = DEFAULT_TERMINATE_AFTER; private int terminateAfter = DEFAULT_TERMINATE_AFTER;
@ -173,6 +173,7 @@ final class DefaultSearchContext extends SearchContext {
this.timeout = timeout; this.timeout = timeout;
queryShardContext = indexService.newQueryShardContext(request.shardId().id(), searcher.getIndexReader(), request::nowInMillis); queryShardContext = indexService.newQueryShardContext(request.shardId().id(), searcher.getIndexReader(), request::nowInMillis);
queryShardContext.setTypes(request.types()); queryShardContext.setTypes(request.types());
queryBoost = request.indexBoost();
} }
@Override @Override
@ -352,12 +353,6 @@ final class DefaultSearchContext extends SearchContext {
return queryBoost; return queryBoost;
} }
@Override
public SearchContext queryBoost(float queryBoost) {
this.queryBoost = queryBoost;
return this;
}
@Override @Override
public long getOriginNanoTime() { public long getOriginNanoTime() {
return originNanoTime; return originNanoTime;

View File

@ -19,7 +19,6 @@
package org.elasticsearch.search; package org.elasticsearch.search;
import com.carrotsearch.hppc.ObjectFloatHashMap;
import org.apache.lucene.search.FieldDoc; import org.apache.lucene.search.FieldDoc;
import org.apache.lucene.search.TopDocs; import org.apache.lucene.search.TopDocs;
import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.IOUtils;
@ -679,13 +678,6 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv
QueryShardContext queryShardContext = context.getQueryShardContext(); QueryShardContext queryShardContext = context.getQueryShardContext();
context.from(source.from()); context.from(source.from());
context.size(source.size()); context.size(source.size());
ObjectFloatHashMap<String> indexBoostMap = source.indexBoost();
if (indexBoostMap != null) {
Float indexBoost = indexBoostMap.get(context.shardTarget().index());
if (indexBoost != null) {
context.queryBoost(indexBoost);
}
}
Map<String, InnerHitBuilder> innerHitBuilders = new HashMap<>(); Map<String, InnerHitBuilder> innerHitBuilders = new HashMap<>();
if (source.query() != null) { if (source.query() != null) {
InnerHitBuilder.extractInnerHits(source.query(), innerHitBuilders); InnerHitBuilder.extractInnerHits(source.query(), innerHitBuilders);

View File

@ -19,16 +19,16 @@
package org.elasticsearch.search.builder; package org.elasticsearch.search.builder;
import com.carrotsearch.hppc.ObjectFloatHashMap;
import org.elasticsearch.action.support.ToXContentToBytes; import org.elasticsearch.action.support.ToXContentToBytes;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
@ -63,10 +63,6 @@ import java.util.ArrayList;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Objects; import java.util.Objects;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;
import static org.elasticsearch.common.collect.Tuple.tuple;
/** /**
* A search source builder allowing to easily build search source. Simple * A search source builder allowing to easily build search source. Simple
@ -76,6 +72,8 @@ import static org.elasticsearch.common.collect.Tuple.tuple;
* @see org.elasticsearch.action.search.SearchRequest#source(SearchSourceBuilder) * @see org.elasticsearch.action.search.SearchRequest#source(SearchSourceBuilder)
*/ */
public final class SearchSourceBuilder extends ToXContentToBytes implements Writeable { public final class SearchSourceBuilder extends ToXContentToBytes implements Writeable {
private static final DeprecationLogger DEPRECATION_LOGGER =
new DeprecationLogger(Loggers.getLogger(SearchSourceBuilder.class));
public static final ParseField FROM_FIELD = new ParseField("from"); public static final ParseField FROM_FIELD = new ParseField("from");
public static final ParseField SIZE_FIELD = new ParseField("size"); public static final ParseField SIZE_FIELD = new ParseField("size");
@ -167,7 +165,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
private List<RescoreBuilder> rescoreBuilders; private List<RescoreBuilder> rescoreBuilders;
private ObjectFloatHashMap<String> indexBoost = null; private List<IndexBoost> indexBoosts = new ArrayList<>();
private List<String> stats; private List<String> stats;
@ -193,13 +191,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
storedFieldsContext = in.readOptionalWriteable(StoredFieldsContext::new); storedFieldsContext = in.readOptionalWriteable(StoredFieldsContext::new);
from = in.readVInt(); from = in.readVInt();
highlightBuilder = in.readOptionalWriteable(HighlightBuilder::new); highlightBuilder = in.readOptionalWriteable(HighlightBuilder::new);
int indexBoostSize = in.readVInt(); indexBoosts = in.readList(IndexBoost::new);
if (indexBoostSize > 0) {
indexBoost = new ObjectFloatHashMap<>(indexBoostSize);
for (int i = 0; i < indexBoostSize; i++) {
indexBoost.put(in.readString(), in.readFloat());
}
}
minScore = in.readOptionalFloat(); minScore = in.readOptionalFloat();
postQueryBuilder = in.readOptionalNamedWriteable(QueryBuilder.class); postQueryBuilder = in.readOptionalNamedWriteable(QueryBuilder.class);
queryBuilder = in.readOptionalNamedWriteable(QueryBuilder.class); queryBuilder = in.readOptionalNamedWriteable(QueryBuilder.class);
@ -240,11 +232,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
out.writeOptionalWriteable(storedFieldsContext); out.writeOptionalWriteable(storedFieldsContext);
out.writeVInt(from); out.writeVInt(from);
out.writeOptionalWriteable(highlightBuilder); out.writeOptionalWriteable(highlightBuilder);
int indexBoostSize = indexBoost == null ? 0 : indexBoost.size(); out.writeList(indexBoosts);
out.writeVInt(indexBoostSize);
if (indexBoostSize > 0) {
writeIndexBoost(out);
}
out.writeOptionalFloat(minScore); out.writeOptionalFloat(minScore);
out.writeOptionalNamedWriteable(postQueryBuilder); out.writeOptionalNamedWriteable(postQueryBuilder);
out.writeOptionalNamedWriteable(queryBuilder); out.writeOptionalNamedWriteable(queryBuilder);
@ -283,17 +271,6 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
out.writeOptionalWriteable(sliceBuilder); out.writeOptionalWriteable(sliceBuilder);
} }
private void writeIndexBoost(StreamOutput out) throws IOException {
List<Tuple<String, Float>> ibs = StreamSupport
.stream(indexBoost.spliterator(), false)
.map(i -> tuple(i.key, i.value)).sorted((o1, o2) -> o1.v1().compareTo(o2.v1()))
.collect(Collectors.toList());
for (Tuple<String, Float> ib : ibs) {
out.writeString(ib.v1());
out.writeFloat(ib.v2());
}
}
/** /**
* Sets the search query for this request. * Sets the search query for this request.
* *
@ -816,28 +793,26 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
} }
/** /**
* Sets the boost a specific index will receive when the query is executed * Sets the boost a specific index or alias will receive when the query is executed
* against it. * against it.
* *
* @param index * @param index
* The index to apply the boost against * The index or alias to apply the boost against
* @param indexBoost * @param indexBoost
* The boost to apply to the index * The boost to apply to the index
*/ */
public SearchSourceBuilder indexBoost(String index, float indexBoost) { public SearchSourceBuilder indexBoost(String index, float indexBoost) {
if (this.indexBoost == null) { Objects.requireNonNull(index, "index must not be null");
this.indexBoost = new ObjectFloatHashMap<>(); this.indexBoosts.add(new IndexBoost(index, indexBoost));
}
this.indexBoost.put(index, indexBoost);
return this; return this;
} }
/** /**
* Gets the boost a specific indices will receive when the query is * Gets the boost a specific indices or aliases will receive when the query is
* executed against them. * executed against them.
*/ */
public ObjectFloatHashMap<String> indexBoost() { public List<IndexBoost> indexBoosts() {
return indexBoost; return indexBoosts;
} }
/** /**
@ -916,7 +891,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
rewrittenBuilder.storedFieldsContext = storedFieldsContext; rewrittenBuilder.storedFieldsContext = storedFieldsContext;
rewrittenBuilder.from = from; rewrittenBuilder.from = from;
rewrittenBuilder.highlightBuilder = highlightBuilder; rewrittenBuilder.highlightBuilder = highlightBuilder;
rewrittenBuilder.indexBoost = indexBoost; rewrittenBuilder.indexBoosts = indexBoosts;
rewrittenBuilder.minScore = minScore; rewrittenBuilder.minScore = minScore;
rewrittenBuilder.postQueryBuilder = postQueryBuilder; rewrittenBuilder.postQueryBuilder = postQueryBuilder;
rewrittenBuilder.profile = profile; rewrittenBuilder.profile = profile;
@ -1002,12 +977,13 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
scriptFields.add(new ScriptField(context)); scriptFields.add(new ScriptField(context));
} }
} else if (context.getParseFieldMatcher().match(currentFieldName, INDICES_BOOST_FIELD)) { } else if (context.getParseFieldMatcher().match(currentFieldName, INDICES_BOOST_FIELD)) {
indexBoost = new ObjectFloatHashMap<>(); DEPRECATION_LOGGER.deprecated(
"Object format in indices_boost is deprecated, please use array format instead");
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) { if (token == XContentParser.Token.FIELD_NAME) {
currentFieldName = parser.currentName(); currentFieldName = parser.currentName();
} else if (token.isValue()) { } else if (token.isValue()) {
indexBoost.put(currentFieldName, parser.floatValue()); indexBoosts.add(new IndexBoost(currentFieldName, parser.floatValue()));
} else { } else {
throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token +
" in [" + currentFieldName + "].", parser.getTokenLocation()); " in [" + currentFieldName + "].", parser.getTokenLocation());
@ -1062,6 +1038,10 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
"] in [" + currentFieldName + "] but found [" + token + "]", parser.getTokenLocation()); "] in [" + currentFieldName + "] but found [" + token + "]", parser.getTokenLocation());
} }
} }
} else if (context.getParseFieldMatcher().match(currentFieldName, INDICES_BOOST_FIELD)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
indexBoosts.add(new IndexBoost(context));
}
} else if (context.getParseFieldMatcher().match(currentFieldName, SORT_FIELD)) { } else if (context.getParseFieldMatcher().match(currentFieldName, SORT_FIELD)) {
sorts = new ArrayList<>(SortBuilder.fromXContent(context)); sorts = new ArrayList<>(SortBuilder.fromXContent(context));
} else if (context.getParseFieldMatcher().match(currentFieldName, RESCORE_FIELD)) { } else if (context.getParseFieldMatcher().match(currentFieldName, RESCORE_FIELD)) {
@ -1191,18 +1171,13 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
builder.field(SLICE.getPreferredName(), sliceBuilder); builder.field(SLICE.getPreferredName(), sliceBuilder);
} }
if (indexBoost != null) { builder.startArray(INDICES_BOOST_FIELD.getPreferredName());
builder.startObject(INDICES_BOOST_FIELD.getPreferredName()); for (IndexBoost ib : indexBoosts) {
assert !indexBoost.containsKey(null); builder.startObject();
final Object[] keys = indexBoost.keys; builder.field(ib.index, ib.boost);
final float[] values = indexBoost.values;
for (int i = 0; i < keys.length; i++) {
if (keys[i] != null) {
builder.field((String) keys[i], values[i]);
}
}
builder.endObject(); builder.endObject();
} }
builder.endArray();
if (aggregations != null) { if (aggregations != null) {
builder.field(AGGREGATIONS_FIELD.getPreferredName(), aggregations); builder.field(AGGREGATIONS_FIELD.getPreferredName(), aggregations);
@ -1237,6 +1212,91 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
} }
} }
public static class IndexBoost implements Writeable, ToXContent {
private final String index;
private final float boost;
IndexBoost(String index, float boost) {
this.index = index;
this.boost = boost;
}
IndexBoost(StreamInput in) throws IOException {
index = in.readString();
boost = in.readFloat();
}
IndexBoost(QueryParseContext context) throws IOException {
XContentParser parser = context.parser();
XContentParser.Token token = parser.currentToken();
if (token == XContentParser.Token.START_OBJECT) {
token = parser.nextToken();
if (token == XContentParser.Token.FIELD_NAME) {
index = parser.currentName();
} else {
throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.FIELD_NAME +
"] in [" + INDICES_BOOST_FIELD + "] but found [" + token + "]", parser.getTokenLocation());
}
token = parser.nextToken();
if (token == XContentParser.Token.VALUE_NUMBER) {
boost = parser.floatValue();
} else {
throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.VALUE_NUMBER +
"] in [" + INDICES_BOOST_FIELD + "] but found [" + token + "]", parser.getTokenLocation());
}
token = parser.nextToken();
if (token != XContentParser.Token.END_OBJECT) {
throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.END_OBJECT +
"] in [" + INDICES_BOOST_FIELD + "] but found [" + token + "]", parser.getTokenLocation());
}
} else {
throw new ParsingException(parser.getTokenLocation(), "Expected [" + XContentParser.Token.START_OBJECT +
"] in [" + parser.currentName() + "] but found [" + token + "]", parser.getTokenLocation());
}
}
public String getIndex() {
return index;
}
public float getBoost() {
return boost;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(index);
out.writeFloat(boost);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(index, boost);
builder.endObject();
return builder;
}
@Override
public int hashCode() {
return Objects.hash(index, boost);
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
IndexBoost other = (IndexBoost) obj;
return Objects.equals(index, other.index)
&& Objects.equals(boost, other.boost);
}
}
public static class ScriptField implements Writeable, ToXContent { public static class ScriptField implements Writeable, ToXContent {
private final boolean ignoreFailure; private final boolean ignoreFailure;
@ -1352,8 +1412,9 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
@Override @Override
public int hashCode() { public int hashCode() {
return Objects.hash(aggregations, explain, fetchSourceContext, docValueFields, storedFieldsContext, from, highlightBuilder, return Objects.hash(aggregations, explain, fetchSourceContext, docValueFields, storedFieldsContext, from, highlightBuilder,
indexBoost, minScore, postQueryBuilder, queryBuilder, rescoreBuilders, scriptFields, size, sorts, searchAfterBuilder, indexBoosts, minScore, postQueryBuilder, queryBuilder, rescoreBuilders, scriptFields, size,
sliceBuilder, stats, suggestBuilder, terminateAfter, timeout, trackScores, version, profile, extBuilders); sorts, searchAfterBuilder, sliceBuilder, stats, suggestBuilder, terminateAfter, timeout, trackScores, version,
profile, extBuilders);
} }
@Override @Override
@ -1372,7 +1433,7 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
&& Objects.equals(storedFieldsContext, other.storedFieldsContext) && Objects.equals(storedFieldsContext, other.storedFieldsContext)
&& Objects.equals(from, other.from) && Objects.equals(from, other.from)
&& Objects.equals(highlightBuilder, other.highlightBuilder) && Objects.equals(highlightBuilder, other.highlightBuilder)
&& Objects.equals(indexBoost, other.indexBoost) && Objects.equals(indexBoosts, other.indexBoosts)
&& Objects.equals(minScore, other.minScore) && Objects.equals(minScore, other.minScore)
&& Objects.equals(postQueryBuilder, other.postQueryBuilder) && Objects.equals(postQueryBuilder, other.postQueryBuilder)
&& Objects.equals(queryBuilder, other.queryBuilder) && Objects.equals(queryBuilder, other.queryBuilder)

View File

@ -37,7 +37,6 @@ import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.similarity.SimilarityService; import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.search.SearchExtBuilder; import org.elasticsearch.search.SearchExtBuilder;
import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.SearchShardTarget;
import org.elasticsearch.search.aggregations.SearchContextAggregations; import org.elasticsearch.search.aggregations.SearchContextAggregations;
@ -144,11 +143,6 @@ public abstract class FilteredSearchContext extends SearchContext {
return in.queryBoost(); return in.queryBoost();
} }
@Override
public SearchContext queryBoost(float queryBoost) {
return in.queryBoost(queryBoost);
}
@Override @Override
public long getOriginNanoTime() { public long getOriginNanoTime() {
return in.getOriginNanoTime(); return in.getOriginNanoTime();

View File

@ -148,8 +148,6 @@ public abstract class SearchContext extends AbstractRefCounted implements Releas
public abstract float queryBoost(); public abstract float queryBoost();
public abstract SearchContext queryBoost(float queryBoost);
public abstract long getOriginNanoTime(); public abstract long getOriginNanoTime();
public abstract ScrollContext scrollContext(); public abstract ScrollContext scrollContext();

View File

@ -19,6 +19,7 @@
package org.elasticsearch.search.internal; package org.elasticsearch.search.internal;
import org.elasticsearch.Version;
import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchType; import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
@ -34,6 +35,7 @@ import org.elasticsearch.search.Scroll;
import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder;
import java.io.IOException; import java.io.IOException;
import java.util.Optional;
/** /**
* Shard level search request that gets created and consumed on the local node. * Shard level search request that gets created and consumed on the local node.
@ -63,6 +65,7 @@ public class ShardSearchLocalRequest implements ShardSearchRequest {
private Scroll scroll; private Scroll scroll;
private String[] types = Strings.EMPTY_ARRAY; private String[] types = Strings.EMPTY_ARRAY;
private AliasFilter aliasFilter; private AliasFilter aliasFilter;
private float indexBoost;
private SearchSourceBuilder source; private SearchSourceBuilder source;
private Boolean requestCache; private Boolean requestCache;
private long nowInMillis; private long nowInMillis;
@ -73,9 +76,9 @@ public class ShardSearchLocalRequest implements ShardSearchRequest {
} }
ShardSearchLocalRequest(SearchRequest searchRequest, ShardId shardId, int numberOfShards, ShardSearchLocalRequest(SearchRequest searchRequest, ShardId shardId, int numberOfShards,
AliasFilter aliasFilter, long nowInMillis) { AliasFilter aliasFilter, float indexBoost, long nowInMillis) {
this(shardId, numberOfShards, searchRequest.searchType(), this(shardId, numberOfShards, searchRequest.searchType(),
searchRequest.source(), searchRequest.types(), searchRequest.requestCache(), aliasFilter); searchRequest.source(), searchRequest.types(), searchRequest.requestCache(), aliasFilter, indexBoost);
this.scroll = searchRequest.scroll(); this.scroll = searchRequest.scroll();
this.nowInMillis = nowInMillis; this.nowInMillis = nowInMillis;
} }
@ -85,10 +88,11 @@ public class ShardSearchLocalRequest implements ShardSearchRequest {
this.nowInMillis = nowInMillis; this.nowInMillis = nowInMillis;
this.aliasFilter = aliasFilter; this.aliasFilter = aliasFilter;
this.shardId = shardId; this.shardId = shardId;
indexBoost = 1.0f;
} }
public ShardSearchLocalRequest(ShardId shardId, int numberOfShards, SearchType searchType, SearchSourceBuilder source, String[] types, public ShardSearchLocalRequest(ShardId shardId, int numberOfShards, SearchType searchType, SearchSourceBuilder source, String[] types,
Boolean requestCache, AliasFilter aliasFilter) { Boolean requestCache, AliasFilter aliasFilter, float indexBoost) {
this.shardId = shardId; this.shardId = shardId;
this.numberOfShards = numberOfShards; this.numberOfShards = numberOfShards;
this.searchType = searchType; this.searchType = searchType;
@ -96,6 +100,7 @@ public class ShardSearchLocalRequest implements ShardSearchRequest {
this.types = types; this.types = types;
this.requestCache = requestCache; this.requestCache = requestCache;
this.aliasFilter = aliasFilter; this.aliasFilter = aliasFilter;
this.indexBoost = indexBoost;
} }
@ -134,6 +139,11 @@ public class ShardSearchLocalRequest implements ShardSearchRequest {
return aliasFilter.getQueryBuilder(); return aliasFilter.getQueryBuilder();
} }
@Override
public float indexBoost() {
return indexBoost;
}
@Override @Override
public long nowInMillis() { public long nowInMillis() {
return nowInMillis; return nowInMillis;
@ -167,6 +177,20 @@ public class ShardSearchLocalRequest implements ShardSearchRequest {
source = in.readOptionalWriteable(SearchSourceBuilder::new); source = in.readOptionalWriteable(SearchSourceBuilder::new);
types = in.readStringArray(); types = in.readStringArray();
aliasFilter = new AliasFilter(in); aliasFilter = new AliasFilter(in);
if (in.getVersion().onOrAfter(Version.V_5_2_0_UNRELEASED)) {
indexBoost = in.readFloat();
} else {
// Nodes < 5.2.0 doesn't send index boost. Read it from source.
if (source != null) {
Optional<SearchSourceBuilder.IndexBoost> boost = source.indexBoosts()
.stream()
.filter(ib -> ib.getIndex().equals(shardId.getIndexName()))
.findFirst();
indexBoost = boost.isPresent() ? boost.get().getBoost() : 1.0f;
} else {
indexBoost = 1.0f;
}
}
nowInMillis = in.readVLong(); nowInMillis = in.readVLong();
requestCache = in.readOptionalBoolean(); requestCache = in.readOptionalBoolean();
} }
@ -181,6 +205,9 @@ public class ShardSearchLocalRequest implements ShardSearchRequest {
out.writeOptionalWriteable(source); out.writeOptionalWriteable(source);
out.writeStringArray(types); out.writeStringArray(types);
aliasFilter.writeTo(out); aliasFilter.writeTo(out);
if (out.getVersion().onOrAfter(Version.V_5_2_0_UNRELEASED)) {
out.writeFloat(indexBoost);
}
if (!asKey) { if (!asKey) {
out.writeVLong(nowInMillis); out.writeVLong(nowInMillis);
} }

View File

@ -62,6 +62,8 @@ public interface ShardSearchRequest {
QueryBuilder filteringAliases(); QueryBuilder filteringAliases();
float indexBoost();
long nowInMillis(); long nowInMillis();
Boolean requestCache(); Boolean requestCache();

View File

@ -54,8 +54,8 @@ public class ShardSearchTransportRequest extends TransportRequest implements Sha
} }
public ShardSearchTransportRequest(SearchRequest searchRequest, ShardId shardId, int numberOfShards, public ShardSearchTransportRequest(SearchRequest searchRequest, ShardId shardId, int numberOfShards,
AliasFilter aliasFilter, long nowInMillis) { AliasFilter aliasFilter, float indexBoost, long nowInMillis) {
this.shardSearchLocalRequest = new ShardSearchLocalRequest(searchRequest, shardId, numberOfShards, aliasFilter, nowInMillis); this.shardSearchLocalRequest = new ShardSearchLocalRequest(searchRequest, shardId, numberOfShards, aliasFilter, indexBoost, nowInMillis);
this.originalIndices = new OriginalIndices(searchRequest); this.originalIndices = new OriginalIndices(searchRequest);
} }
@ -111,6 +111,11 @@ public class ShardSearchTransportRequest extends TransportRequest implements Sha
return shardSearchLocalRequest.filteringAliases(); return shardSearchLocalRequest.filteringAliases();
} }
@Override
public float indexBoost() {
return shardSearchLocalRequest.indexBoost();
}
@Override @Override
public long nowInMillis() { public long nowInMillis() {
return shardSearchLocalRequest.nowInMillis(); return shardSearchLocalRequest.nowInMillis();

View File

@ -22,14 +22,13 @@ import org.apache.lucene.search.Query;
import org.apache.lucene.util.Counter; import org.apache.lucene.util.Counter;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.query.ParsedQuery; import org.elasticsearch.index.query.ParsedQuery;
import org.elasticsearch.search.fetch.StoredFieldsContext;
import org.elasticsearch.search.aggregations.SearchContextAggregations; import org.elasticsearch.search.aggregations.SearchContextAggregations;
import org.elasticsearch.search.fetch.FetchSearchResult; import org.elasticsearch.search.fetch.FetchSearchResult;
import org.elasticsearch.search.fetch.StoredFieldsContext;
import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext; import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext;
import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.fetch.subphase.FetchSourceContext;
import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext; import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext;
import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight; import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.query.QuerySearchResult;
import org.elasticsearch.search.rescore.RescoreSearchContext; import org.elasticsearch.search.rescore.RescoreSearchContext;
import org.elasticsearch.search.sort.SortAndFormats; import org.elasticsearch.search.sort.SortAndFormats;
@ -85,11 +84,6 @@ public class SubSearchContext extends FilteredSearchContext {
throw new UnsupportedOperationException("this context should be read only"); throw new UnsupportedOperationException("this context should be read only");
} }
@Override
public SearchContext queryBoost(float queryBoost) {
throw new UnsupportedOperationException("Not supported");
}
@Override @Override
public SearchContext scrollContext(ScrollContext scrollContext) { public SearchContext scrollContext(ScrollContext scrollContext) {
throw new UnsupportedOperationException("Not supported"); throw new UnsupportedOperationException("Not supported");

View File

@ -88,7 +88,7 @@ public class SearchAsyncActionTests extends ESTestCase {
lookup.put(primaryNode.getId(), primaryNode); lookup.put(primaryNode.getId(), primaryNode);
Map<String, AliasFilter> aliasFilters = Collections.singletonMap("_na_", new AliasFilter(null, Strings.EMPTY_ARRAY)); Map<String, AliasFilter> aliasFilters = Collections.singletonMap("_na_", new AliasFilter(null, Strings.EMPTY_ARRAY));
AbstractSearchAsyncAction asyncAction = new AbstractSearchAsyncAction<TestSearchPhaseResult>(logger, transportService, lookup::get, AbstractSearchAsyncAction asyncAction = new AbstractSearchAsyncAction<TestSearchPhaseResult>(logger, transportService, lookup::get,
aliasFilters, null, request, responseListener, shardsIter, 0, 0, null) { aliasFilters, Collections.emptyMap(), null, request, responseListener, shardsIter, 0, 0, null) {
TestSearchResponse response = new TestSearchResponse(); TestSearchResponse response = new TestSearchResponse();
@Override @Override

View File

@ -87,6 +87,11 @@ public class SearchSlowLogTests extends ESSingleNodeTestCase {
return null; return null;
} }
@Override
public float indexBoost() {
return 1.0f;
}
@Override @Override
public long nowInMillis() { public long nowInMillis() {
return 0; return 0;

View File

@ -185,7 +185,7 @@ public class SearchServiceTests extends ESSingleNodeTestCase {
try { try {
QuerySearchResultProvider querySearchResultProvider = service.executeQueryPhase( QuerySearchResultProvider querySearchResultProvider = service.executeQueryPhase(
new ShardSearchLocalRequest(indexShard.shardId(), 1, SearchType.DEFAULT, new ShardSearchLocalRequest(indexShard.shardId(), 1, SearchType.DEFAULT,
new SearchSourceBuilder(), new String[0], false, new AliasFilter(null, Strings.EMPTY_ARRAY)), new SearchSourceBuilder(), new String[0], false, new AliasFilter(null, Strings.EMPTY_ARRAY), 1.0f),
new SearchTask(123L, "", "", "", null)); new SearchTask(123L, "", "", "", null));
IntArrayList intCursors = new IntArrayList(1); IntArrayList intCursors = new IntArrayList(1);
intCursors.add(0); intCursors.add(0);
@ -220,7 +220,8 @@ public class SearchServiceTests extends ESSingleNodeTestCase {
new SearchSourceBuilder(), new SearchSourceBuilder(),
new String[0], new String[0],
false, false,
new AliasFilter(null, Strings.EMPTY_ARRAY)), new AliasFilter(null, Strings.EMPTY_ARRAY),
1.0f),
null); null);
// the search context should inherit the default timeout // the search context should inherit the default timeout
assertThat(contextWithDefaultTimeout.timeout(), equalTo(TimeValue.timeValueSeconds(5))); assertThat(contextWithDefaultTimeout.timeout(), equalTo(TimeValue.timeValueSeconds(5)));
@ -234,7 +235,8 @@ public class SearchServiceTests extends ESSingleNodeTestCase {
new SearchSourceBuilder().timeout(TimeValue.timeValueSeconds(seconds)), new SearchSourceBuilder().timeout(TimeValue.timeValueSeconds(seconds)),
new String[0], new String[0],
false, false,
new AliasFilter(null, Strings.EMPTY_ARRAY)), new AliasFilter(null, Strings.EMPTY_ARRAY),
1.0f),
null); null);
// the search context should inherit the query timeout // the search context should inherit the query timeout
assertThat(context.timeout(), equalTo(TimeValue.timeValueSeconds(seconds))); assertThat(context.timeout(), equalTo(TimeValue.timeValueSeconds(seconds)));

View File

@ -301,4 +301,78 @@ public class SearchSourceBuilderTests extends AbstractSearchTestCase {
String query = "{ \"query\": {} }"; String query = "{ \"query\": {} }";
assertParseSearchSource(builder, new BytesArray(query), ParseFieldMatcher.EMPTY); assertParseSearchSource(builder, new BytesArray(query), ParseFieldMatcher.EMPTY);
} }
public void testParseIndicesBoost() throws IOException {
{
String restContent = " { \"indices_boost\": {\"foo\": 1.0, \"bar\": 2.0}}";
try (XContentParser parser = XContentFactory.xContent(restContent).createParser(restContent)) {
SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(createParseContext(parser),
searchRequestParsers.aggParsers, searchRequestParsers.suggesters, searchRequestParsers.searchExtParsers);
assertEquals(2, searchSourceBuilder.indexBoosts().size());
assertEquals(new SearchSourceBuilder.IndexBoost("foo", 1.0f), searchSourceBuilder.indexBoosts().get(0));
assertEquals(new SearchSourceBuilder.IndexBoost("bar", 2.0f), searchSourceBuilder.indexBoosts().get(1));
}
}
{
String restContent = "{" +
" \"indices_boost\" : [\n" +
" { \"foo\" : 1.0 },\n" +
" { \"bar\" : 2.0 },\n" +
" { \"baz\" : 3.0 }\n" +
" ]}";
try (XContentParser parser = XContentFactory.xContent(restContent).createParser(restContent)) {
SearchSourceBuilder searchSourceBuilder = SearchSourceBuilder.fromXContent(createParseContext(parser),
searchRequestParsers.aggParsers, searchRequestParsers.suggesters, searchRequestParsers.searchExtParsers);
assertEquals(3, searchSourceBuilder.indexBoosts().size());
assertEquals(new SearchSourceBuilder.IndexBoost("foo", 1.0f), searchSourceBuilder.indexBoosts().get(0));
assertEquals(new SearchSourceBuilder.IndexBoost("bar", 2.0f), searchSourceBuilder.indexBoosts().get(1));
assertEquals(new SearchSourceBuilder.IndexBoost("baz", 3.0f), searchSourceBuilder.indexBoosts().get(2));
}
}
{
String restContent = "{" +
" \"indices_boost\" : [\n" +
" { \"foo\" : 1.0, \"bar\": 2.0}\n" + // invalid format
" ]}";
assertIndicesBoostParseErrorMessage(restContent, "Expected [END_OBJECT] in [indices_boost] but found [FIELD_NAME]");
}
{
String restContent = "{" +
" \"indices_boost\" : [\n" +
" {}\n" + // invalid format
" ]}";
assertIndicesBoostParseErrorMessage(restContent, "Expected [FIELD_NAME] in [indices_boost] but found [END_OBJECT]");
}
{
String restContent = "{" +
" \"indices_boost\" : [\n" +
" { \"foo\" : \"bar\"}\n" + // invalid format
" ]}";
assertIndicesBoostParseErrorMessage(restContent, "Expected [VALUE_NUMBER] in [indices_boost] but found [VALUE_STRING]");
}
{
String restContent = "{" +
" \"indices_boost\" : [\n" +
" { \"foo\" : {\"bar\": 1}}\n" + // invalid format
" ]}";
assertIndicesBoostParseErrorMessage(restContent, "Expected [VALUE_NUMBER] in [indices_boost] but found [START_OBJECT]");
}
}
private void assertIndicesBoostParseErrorMessage(String restContent, String expectedErrorMessage) throws IOException {
try (XContentParser parser = XContentFactory.xContent(restContent).createParser(restContent)) {
ParsingException e = expectThrows(ParsingException.class, () -> SearchSourceBuilder.fromXContent(createParseContext(parser),
searchRequestParsers.aggParsers, searchRequestParsers.suggesters, searchRequestParsers.searchExtParsers));
assertEquals(expectedErrorMessage, e.getMessage());
}
}
} }

View File

@ -1,107 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.indicesboost;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.test.ESIntegTestCase;
import static org.elasticsearch.client.Requests.indexRequest;
import static org.elasticsearch.client.Requests.searchRequest;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.index.query.QueryBuilders.termQuery;
import static org.elasticsearch.search.builder.SearchSourceBuilder.searchSource;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
import static org.hamcrest.Matchers.equalTo;
public class SimpleIndicesBoostSearchIT extends ESIntegTestCase {
public void testIndicesBoost() throws Exception {
assertHitCount(client().prepareSearch().setQuery(termQuery("test", "value")).get(), 0);
try {
client().prepareSearch("test").setQuery(termQuery("test", "value")).execute().actionGet();
fail("should fail");
} catch (Exception e) {
// ignore, no indices
}
createIndex("test1", "test2");
ensureGreen();
client().index(indexRequest("test1").type("type1").id("1")
.source(jsonBuilder().startObject().field("test", "value check").endObject())).actionGet();
client().index(indexRequest("test2").type("type1").id("1")
.source(jsonBuilder().startObject().field("test", "value beck").endObject())).actionGet();
refresh();
float indexBoost = 1.1f;
logger.info("--- QUERY_THEN_FETCH");
logger.info("Query with test1 boosted");
SearchResponse response = client().search(searchRequest()
.searchType(SearchType.QUERY_THEN_FETCH)
.source(searchSource().explain(true).indexBoost("test1", indexBoost).query(termQuery("test", "value")))
).actionGet();
assertThat(response.getHits().totalHits(), equalTo(2L));
logger.info("Hit[0] {} Explanation {}", response.getHits().getAt(0).index(), response.getHits().getAt(0).explanation());
logger.info("Hit[1] {} Explanation {}", response.getHits().getAt(1).index(), response.getHits().getAt(1).explanation());
assertThat(response.getHits().getAt(0).index(), equalTo("test1"));
assertThat(response.getHits().getAt(1).index(), equalTo("test2"));
logger.info("Query with test2 boosted");
response = client().search(searchRequest()
.searchType(SearchType.QUERY_THEN_FETCH)
.source(searchSource().explain(true).indexBoost("test2", indexBoost).query(termQuery("test", "value")))
).actionGet();
assertThat(response.getHits().totalHits(), equalTo(2L));
logger.info("Hit[0] {} Explanation {}", response.getHits().getAt(0).index(), response.getHits().getAt(0).explanation());
logger.info("Hit[1] {} Explanation {}", response.getHits().getAt(1).index(), response.getHits().getAt(1).explanation());
assertThat(response.getHits().getAt(0).index(), equalTo("test2"));
assertThat(response.getHits().getAt(1).index(), equalTo("test1"));
logger.info("--- DFS_QUERY_THEN_FETCH");
logger.info("Query with test1 boosted");
response = client().search(searchRequest()
.searchType(SearchType.DFS_QUERY_THEN_FETCH)
.source(searchSource().explain(true).indexBoost("test1", indexBoost).query(termQuery("test", "value")))
).actionGet();
assertThat(response.getHits().totalHits(), equalTo(2L));
logger.info("Hit[0] {} Explanation {}", response.getHits().getAt(0).index(), response.getHits().getAt(0).explanation());
logger.info("Hit[1] {} Explanation {}", response.getHits().getAt(1).index(), response.getHits().getAt(1).explanation());
assertThat(response.getHits().getAt(0).index(), equalTo("test1"));
assertThat(response.getHits().getAt(1).index(), equalTo("test2"));
logger.info("Query with test2 boosted");
response = client().search(searchRequest()
.searchType(SearchType.DFS_QUERY_THEN_FETCH)
.source(searchSource().explain(true).indexBoost("test2", indexBoost).query(termQuery("test", "value")))
).actionGet();
assertThat(response.getHits().totalHits(), equalTo(2L));
logger.info("Hit[0] {} Explanation {}", response.getHits().getAt(0).index(), response.getHits().getAt(0).explanation());
logger.info("Hit[1] {} Explanation {}", response.getHits().getAt(1).index(), response.getHits().getAt(1).explanation());
assertThat(response.getHits().getAt(0).index(), equalTo("test2"));
assertThat(response.getHits().getAt(1).index(), equalTo("test1"));
}
}

View File

@ -81,6 +81,7 @@ public class ShardSearchTransportRequestTests extends AbstractSearchTestCase {
assertEquals(deserializedRequest.cacheKey(), shardSearchTransportRequest.cacheKey()); assertEquals(deserializedRequest.cacheKey(), shardSearchTransportRequest.cacheKey());
assertNotSame(deserializedRequest, shardSearchTransportRequest); assertNotSame(deserializedRequest, shardSearchTransportRequest);
assertEquals(deserializedRequest.filteringAliases(), shardSearchTransportRequest.filteringAliases()); assertEquals(deserializedRequest.filteringAliases(), shardSearchTransportRequest.filteringAliases());
assertEquals(deserializedRequest.indexBoost(), shardSearchTransportRequest.indexBoost(), 0.0f);
} }
} }
} }
@ -96,7 +97,7 @@ public class ShardSearchTransportRequestTests extends AbstractSearchTestCase {
filteringAliases = new AliasFilter(null, Strings.EMPTY_ARRAY); filteringAliases = new AliasFilter(null, Strings.EMPTY_ARRAY);
} }
return new ShardSearchTransportRequest(searchRequest, shardId, return new ShardSearchTransportRequest(searchRequest, shardId,
randomIntBetween(1, 100), filteringAliases, Math.abs(randomLong())); randomIntBetween(1, 100), filteringAliases, randomBoolean() ? 1.0f : randomFloat(), Math.abs(randomLong()));
} }
public void testFilteringAliases() throws Exception { public void testFilteringAliases() throws Exception {
@ -213,4 +214,24 @@ public class ShardSearchTransportRequestTests extends AbstractSearchTestCase {
} }
} }
// BWC test for changes from #21393
public void testSerialize50RequestForIndexBoost() throws IOException {
BytesArray requestBytes = new BytesArray(Base64.getDecoder()
// this is a base64 encoded request generated with the same input
.decode("AAZpbmRleDEWTjEyM2trbHFUT21XZDY1Z2VDYlo5ZwABBAABAAIA/wD/////DwABBmluZGV4MUAAAAAAAAAAAP////8PAAAAAAAAAgAAAA" +
"AAAPa/q8mOKwIAJg=="));
try (StreamInput in = new NamedWriteableAwareStreamInput(requestBytes.streamInput(), namedWriteableRegistry)) {
in.setVersion(Version.V_5_0_0);
ShardSearchTransportRequest readRequest = new ShardSearchTransportRequest();
readRequest.readFrom(in);
assertEquals(0, in.available());
assertEquals(2.0f, readRequest.indexBoost(), 0);
BytesStreamOutput output = new BytesStreamOutput();
output.setVersion(Version.V_5_0_0);
readRequest.writeTo(output);
assertEquals(output.bytes().toBytesRef(), requestBytes.toBytesRef());
}
}
} }

View File

@ -368,3 +368,18 @@ buildRestTests.setups['range_index'] = '''
body: | body: |
{"index":{"_id": 1}} {"index":{"_id": 1}}
{"expected_attendees": {"gte": 10, "lte": 20}, "time_frame": {"gte": "2015-10-31 12:00:00", "lte": "2015-11-01"}}''' {"expected_attendees": {"gte": 10, "lte": 20}, "time_frame": {"gte": "2015-10-31 12:00:00", "lte": "2015-11-01"}}'''
// Used by index boost doc
buildRestTests.setups['index_boost'] = '''
- do:
indices.create:
index: index1
- do:
indices.create:
index: index2
- do:
indices.put_alias:
index: index1
name: alias1
'''

View File

@ -6,6 +6,7 @@ across more than one indices. This is very handy when hits coming from
one index matter more than hits coming from another index (think social one index matter more than hits coming from another index (think social
graph where each user has an index). graph where each user has an index).
deprecated[5.2.0, This format is deprecated. Please use array format instead.]
[source,js] [source,js]
-------------------------------------------------- --------------------------------------------------
GET /_search GET /_search
@ -17,3 +18,23 @@ GET /_search
} }
-------------------------------------------------- --------------------------------------------------
// CONSOLE // CONSOLE
// TEST[setup:index_boost warning:Object format in indices_boost is deprecated, please use array format instead]
You can also specify it as an array to control the order of boosts.
[source,js]
--------------------------------------------------
GET /_search
{
"indices_boost" : [
{ "alias1" : 1.4 },
{ "index*" : 1.3 }
]
}
--------------------------------------------------
// CONSOLE
// TEST[continued]
This is important when you use aliases or wildcard expression.
If multiple matches are found, the first match will be used.
For example, if an index is included in both `alias1` and `index*`, boost value of `1.4` is applied.

View File

@ -0,0 +1,196 @@
setup:
- do:
indices.create:
index: test_1
- do:
indices.create:
index: test_2
- do:
indices.put_alias:
index: test_1
name: alias_1
- do:
indices.put_alias:
index: test_2
name: alias_2
- do:
index:
index: test_1
type: test
id: 1
body: { foo: bar }
- do:
index:
index: test_2
type: test
id: 1
body: { foo: bar }
- do:
indices.refresh:
index: [test_1, test_2]
---
"Indices boost using object":
- skip:
version: " - 5.1.99"
reason: deprecation was added in 5.2.0
features: "warnings"
- do:
warnings:
- 'Object format in indices_boost is deprecated, please use array format instead'
search:
index: _all
body:
indices_boost: {test_1: 2.0, test_2: 1.0}
- match: { hits.total: 2 }
- match: { hits.hits.0._index: test_1 }
- match: { hits.hits.1._index: test_2 }
- do:
warnings:
- 'Object format in indices_boost is deprecated, please use array format instead'
search:
index: _all
body:
indices_boost: {test_1: 1.0, test_2: 2.0}
- match: { hits.total: 2 }
- match: { hits.hits.0._index: test_2 }
- match: { hits.hits.1._index: test_1 }
---
"Indices boost using array":
- skip:
version: " - 5.1.99"
reason: array format was added in 5.2.0
- do:
search:
index: _all
body:
indices_boost: [{test_1: 2.0}, {test_2: 1.0}]
- match: { hits.total: 2 }
- match: { hits.hits.0._index: test_1 }
- match: { hits.hits.1._index: test_2 }
- do:
search:
index: _all
body:
indices_boost: [{test_1: 1.0}, {test_2: 2.0}]
- match: { hits.total: 2 }
- match: { hits.hits.0._index: test_2 }
- match: { hits.hits.1._index: test_1 }
---
"Indices boost using array with alias":
- skip:
version: " - 5.1.99"
reason: array format was added in 5.2.0
- do:
search:
index: _all
body:
indices_boost: [{alias_1: 2.0}]
- match: { hits.total: 2}
- match: { hits.hits.0._index: test_1 }
- match: { hits.hits.1._index: test_2 }
- do:
search:
index: _all
body:
indices_boost: [{alias_2: 2.0}]
- match: { hits.total: 2}
- match: { hits.hits.0._index: test_2 }
- match: { hits.hits.1._index: test_1 }
---
"Indices boost using array with wildcard":
- skip:
version: " - 5.1.99"
reason: array format was added in 5.2.0
- do:
search:
index: _all
body:
indices_boost: [{"*_1": 2.0}]
- match: { hits.total: 2}
- match: { hits.hits.0._index: test_1 }
- match: { hits.hits.1._index: test_2 }
- do:
search:
index: _all
body:
indices_boost: [{"*_2": 2.0}]
- match: { hits.total: 2}
- match: { hits.hits.0._index: test_2 }
- match: { hits.hits.1._index: test_1 }
---
"Indices boost using array multiple match":
- skip:
version: " - 5.1.99"
reason: array format was added in 5.2.0
- do:
search:
index: _all
body:
# First match (3.0) is used for test_1
indices_boost: [{"*_1": 3.0}, {alias_1: 1.0}, {test_2: 2.0}]
- match: { hits.total: 2}
- match: { hits.hits.0._index: test_1 }
- match: { hits.hits.1._index: test_2 }
- do:
search:
index: _all
body:
# First match (1.0) is used for test_1
indices_boost: [{"*_1": 1.0}, {test_2: 2.0}, {alias_1: 3.0}]
- match: { hits.total: 2}
- match: { hits.hits.0._index: test_2 }
- match: { hits.hits.1._index: test_1 }
---
"Indices boost for nonexistent index/alias":
- skip:
version: " - 5.1.99"
reason: array format was added in 5.2.0
- do:
catch: /no such index/
search:
index: _all
body:
indices_boost: [{nonexistent: 2.0}, {test_1: 1.0}, {test_2: 2.0}]
- do:
search:
index: _all
ignore_unavailable: true
body:
indices_boost: [{nonexistent: 2.0}, {test_1: 1.0}, {test_2: 2.0}]
- match: { hits.total: 2}
- match: { hits.hits.0._index: test_2 }
- match: { hits.hits.1._index: test_1 }

View File

@ -156,11 +156,6 @@ public class TestSearchContext extends SearchContext {
return 0; return 0;
} }
@Override
public SearchContext queryBoost(float queryBoost) {
return null;
}
@Override @Override
public long getOriginNanoTime() { public long getOriginNanoTime() {
return originNanoTime; return originNanoTime;