MoreLikeThis API: Search documents that are "like" the specified document, closes #45.
This commit is contained in:
parent
c30d790609
commit
8b36281d60
|
@ -22,12 +22,18 @@ package org.elasticsearch.action.mlt;
|
|||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.Actions;
|
||||
import org.elasticsearch.action.search.SearchType;
|
||||
import org.elasticsearch.search.Scroll;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.util.Bytes;
|
||||
import org.elasticsearch.util.Strings;
|
||||
|
||||
import java.io.DataInput;
|
||||
import java.io.DataOutput;
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.search.Scroll.*;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
|
@ -52,6 +58,14 @@ public class MoreLikeThisRequest implements ActionRequest {
|
|||
private Boolean boostTerms = null;
|
||||
private float boostTermsFactor = -1;
|
||||
|
||||
private SearchType searchType = SearchType.DEFAULT;
|
||||
private String searchQueryHint;
|
||||
private String[] searchIndices;
|
||||
private String[] searchTypes;
|
||||
private Scroll searchScroll;
|
||||
private byte[] searchSource;
|
||||
|
||||
|
||||
private boolean threadedListener = false;
|
||||
|
||||
public MoreLikeThisRequest() {
|
||||
|
@ -87,6 +101,11 @@ public class MoreLikeThisRequest implements ActionRequest {
|
|||
return this.fields;
|
||||
}
|
||||
|
||||
public MoreLikeThisRequest fields(String... fields) {
|
||||
this.fields = fields;
|
||||
return this;
|
||||
}
|
||||
|
||||
public MoreLikeThisRequest percentTermsToMatch(float percentTermsToMatch) {
|
||||
this.percentTermsToMatch = percentTermsToMatch;
|
||||
return this;
|
||||
|
@ -159,7 +178,7 @@ public class MoreLikeThisRequest implements ActionRequest {
|
|||
return this.maxWordLen;
|
||||
}
|
||||
|
||||
public MoreLikeThisRequest boostTerms(boolean boostTerms) {
|
||||
public MoreLikeThisRequest boostTerms(Boolean boostTerms) {
|
||||
this.boostTerms = boostTerms;
|
||||
return this;
|
||||
}
|
||||
|
@ -177,6 +196,75 @@ public class MoreLikeThisRequest implements ActionRequest {
|
|||
return this.boostTermsFactor;
|
||||
}
|
||||
|
||||
public MoreLikeThisRequest searchSource(SearchSourceBuilder sourceBuilder) {
|
||||
return searchSource(sourceBuilder.build());
|
||||
}
|
||||
|
||||
public MoreLikeThisRequest searchSource(byte[] searchSource) {
|
||||
this.searchSource = searchSource;
|
||||
return this;
|
||||
}
|
||||
|
||||
public byte[] searchSource() {
|
||||
return this.searchSource;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the search type of the mlt search query.
|
||||
*/
|
||||
public MoreLikeThisRequest searchType(SearchType searchType) {
|
||||
this.searchType = searchType;
|
||||
return this;
|
||||
}
|
||||
|
||||
public SearchType searchType() {
|
||||
return this.searchType;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the indices the resulting mlt query will run against. If not set, will run
|
||||
* against the index the document was fetched from.
|
||||
*/
|
||||
public MoreLikeThisRequest searchIndices(String... searchIndices) {
|
||||
this.searchIndices = searchIndices;
|
||||
return this;
|
||||
}
|
||||
|
||||
public String[] searchIndices() {
|
||||
return this.searchIndices;
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets the types the resulting mlt query will run against. If not set, will run
|
||||
* against the type of the document fetched.
|
||||
*/
|
||||
public MoreLikeThisRequest searchTypes(String... searchTypes) {
|
||||
this.searchTypes = searchTypes;
|
||||
return this;
|
||||
}
|
||||
|
||||
public String[] searchTypes() {
|
||||
return this.searchTypes;
|
||||
}
|
||||
|
||||
public MoreLikeThisRequest searchQueryHint(String searchQueryHint) {
|
||||
this.searchQueryHint = searchQueryHint;
|
||||
return this;
|
||||
}
|
||||
|
||||
public String searchQueryHint() {
|
||||
return this.searchQueryHint;
|
||||
}
|
||||
|
||||
public MoreLikeThisRequest searchScroll(Scroll searchScroll) {
|
||||
this.searchScroll = searchScroll;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Scroll searchScroll() {
|
||||
return this.searchScroll;
|
||||
}
|
||||
|
||||
@Override public ActionRequestValidationException validate() {
|
||||
ActionRequestValidationException validationException = null;
|
||||
if (index == null) {
|
||||
|
@ -233,6 +321,42 @@ public class MoreLikeThisRequest implements ActionRequest {
|
|||
boostTerms = in.readBoolean();
|
||||
}
|
||||
boostTermsFactor = in.readFloat();
|
||||
searchType = SearchType.fromId(in.readByte());
|
||||
if (in.readBoolean()) {
|
||||
searchQueryHint = in.readUTF();
|
||||
}
|
||||
size = in.readInt();
|
||||
if (size == -1) {
|
||||
searchIndices = null;
|
||||
} else if (size == 0) {
|
||||
searchIndices = Strings.EMPTY_ARRAY;
|
||||
} else {
|
||||
searchIndices = new String[size];
|
||||
for (int i = 0; i < size; i++) {
|
||||
searchIndices[i] = in.readUTF();
|
||||
}
|
||||
}
|
||||
size = in.readInt();
|
||||
if (size == -1) {
|
||||
searchTypes = null;
|
||||
} else if (size == 0) {
|
||||
searchTypes = Strings.EMPTY_ARRAY;
|
||||
} else {
|
||||
searchTypes = new String[size];
|
||||
for (int i = 0; i < size; i++) {
|
||||
searchTypes[i] = in.readUTF();
|
||||
}
|
||||
}
|
||||
if (in.readBoolean()) {
|
||||
searchScroll = readScroll(in);
|
||||
}
|
||||
size = in.readInt();
|
||||
if (size == 0) {
|
||||
searchSource = Bytes.EMPTY_ARRAY;
|
||||
} else {
|
||||
searchSource = new byte[in.readInt()];
|
||||
in.readFully(searchSource);
|
||||
}
|
||||
}
|
||||
|
||||
@Override public void writeTo(DataOutput out) throws IOException {
|
||||
|
@ -270,5 +394,41 @@ public class MoreLikeThisRequest implements ActionRequest {
|
|||
out.writeBoolean(boostTerms);
|
||||
}
|
||||
out.writeFloat(boostTermsFactor);
|
||||
|
||||
out.writeByte(searchType.id());
|
||||
if (searchQueryHint == null) {
|
||||
out.writeBoolean(false);
|
||||
} else {
|
||||
out.writeBoolean(true);
|
||||
out.writeUTF(searchQueryHint);
|
||||
}
|
||||
if (searchIndices == null) {
|
||||
out.writeInt(-1);
|
||||
} else {
|
||||
out.writeInt(searchIndices.length);
|
||||
for (String index : searchIndices) {
|
||||
out.writeUTF(index);
|
||||
}
|
||||
}
|
||||
if (searchTypes == null) {
|
||||
out.writeInt(-1);
|
||||
} else {
|
||||
out.writeInt(searchTypes.length);
|
||||
for (String type : searchTypes) {
|
||||
out.writeUTF(type);
|
||||
}
|
||||
}
|
||||
if (searchScroll == null) {
|
||||
out.writeBoolean(false);
|
||||
} else {
|
||||
out.writeBoolean(true);
|
||||
searchScroll.writeTo(out);
|
||||
}
|
||||
if (searchSource == null) {
|
||||
out.writeInt(0);
|
||||
} else {
|
||||
out.writeInt(searchSource.length);
|
||||
out.write(searchSource);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -121,11 +121,24 @@ public class TransportMoreLikeThisAction extends BaseAction<MoreLikeThisRequest,
|
|||
listener.onFailure(e);
|
||||
}
|
||||
|
||||
SearchRequest searchRequest = searchRequest(request.index()).types(request.type())
|
||||
.listenerThreaded(request.listenerThreaded())
|
||||
.source(searchSource()
|
||||
String[] searchIndices = request.searchIndices();
|
||||
if (searchIndices == null) {
|
||||
searchIndices = new String[]{request.index()};
|
||||
}
|
||||
String[] searchTypes = request.searchTypes();
|
||||
if (searchTypes == null) {
|
||||
searchTypes = new String[]{request.type()};
|
||||
}
|
||||
|
||||
SearchRequest searchRequest = searchRequest(searchIndices)
|
||||
.types(searchTypes)
|
||||
.searchType(request.searchType())
|
||||
.source(request.searchSource())
|
||||
.scroll(request.searchScroll())
|
||||
.extraSource(searchSource()
|
||||
.query(boolBuilder)
|
||||
);
|
||||
)
|
||||
.listenerThreaded(request.listenerThreaded());
|
||||
searchAction.execute(searchRequest, new ActionListener<SearchResponse>() {
|
||||
@Override public void onResponse(SearchResponse response) {
|
||||
listener.onResponse(response);
|
||||
|
|
|
@ -23,7 +23,7 @@ import org.elasticsearch.action.ActionRequest;
|
|||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.search.Scroll;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.util.Required;
|
||||
import org.elasticsearch.util.Bytes;
|
||||
import org.elasticsearch.util.Strings;
|
||||
import org.elasticsearch.util.TimeValue;
|
||||
|
||||
|
@ -36,11 +36,11 @@ import static org.elasticsearch.search.Scroll.*;
|
|||
import static org.elasticsearch.util.TimeValue.*;
|
||||
|
||||
/**
|
||||
* @author kimchy (Shay Banon)
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class SearchRequest implements ActionRequest {
|
||||
|
||||
private SearchType searchType = SearchType.QUERY_THEN_FETCH;
|
||||
private SearchType searchType = SearchType.DEFAULT;
|
||||
|
||||
private String[] indices;
|
||||
|
||||
|
@ -48,6 +48,8 @@ public class SearchRequest implements ActionRequest {
|
|||
|
||||
private byte[] source;
|
||||
|
||||
private byte[] extraSource;
|
||||
|
||||
private Scroll scroll;
|
||||
|
||||
private int from = -1;
|
||||
|
@ -116,11 +118,29 @@ public class SearchRequest implements ActionRequest {
|
|||
return this;
|
||||
}
|
||||
|
||||
@Required public SearchRequest source(SearchSourceBuilder sourceBuilder) {
|
||||
/**
|
||||
* The source of the search request.
|
||||
*/
|
||||
public SearchRequest source(SearchSourceBuilder sourceBuilder) {
|
||||
return source(sourceBuilder.build());
|
||||
}
|
||||
|
||||
@Required public SearchRequest source(byte[] source) {
|
||||
public SearchRequest source(byte[] source) {
|
||||
this.source = source;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Allows to provide an additional source that will be used as well.
|
||||
*/
|
||||
public SearchRequest extraSource(SearchSourceBuilder sourceBuilder) {
|
||||
return extraSource(sourceBuilder.build());
|
||||
}
|
||||
|
||||
/**
|
||||
* Allows to provide an additional source that will be used as well.
|
||||
*/
|
||||
public SearchRequest extraSource(byte[] source) {
|
||||
this.source = source;
|
||||
return this;
|
||||
}
|
||||
|
@ -146,6 +166,10 @@ public class SearchRequest implements ActionRequest {
|
|||
return source;
|
||||
}
|
||||
|
||||
public byte[] extraSource() {
|
||||
return this.extraSource;
|
||||
}
|
||||
|
||||
public Scroll scroll() {
|
||||
return scroll;
|
||||
}
|
||||
|
@ -212,8 +236,20 @@ public class SearchRequest implements ActionRequest {
|
|||
if (in.readBoolean()) {
|
||||
timeout = readTimeValue(in);
|
||||
}
|
||||
source = new byte[in.readInt()];
|
||||
in.readFully(source);
|
||||
int size = in.readInt();
|
||||
if (size == 0) {
|
||||
source = Bytes.EMPTY_ARRAY;
|
||||
} else {
|
||||
source = new byte[size];
|
||||
in.readFully(source);
|
||||
}
|
||||
size = in.readInt();
|
||||
if (size == 0) {
|
||||
extraSource = Bytes.EMPTY_ARRAY;
|
||||
} else {
|
||||
extraSource = new byte[size];
|
||||
in.readFully(extraSource);
|
||||
}
|
||||
|
||||
int typesSize = in.readInt();
|
||||
if (typesSize > 0) {
|
||||
|
@ -254,8 +290,18 @@ public class SearchRequest implements ActionRequest {
|
|||
out.writeBoolean(true);
|
||||
timeout.writeTo(out);
|
||||
}
|
||||
out.writeInt(source.length);
|
||||
out.write(source);
|
||||
if (source == null) {
|
||||
out.writeInt(0);
|
||||
} else {
|
||||
out.writeInt(source.length);
|
||||
out.write(source);
|
||||
}
|
||||
if (extraSource == null) {
|
||||
out.writeInt(0);
|
||||
} else {
|
||||
out.writeInt(extraSource.length);
|
||||
out.write(extraSource);
|
||||
}
|
||||
out.writeInt(types.length);
|
||||
for (String type : types) {
|
||||
out.writeUTF(type);
|
||||
|
|
|
@ -30,6 +30,8 @@ public enum SearchType {
|
|||
DFS_QUERY_AND_FETCH((byte) 2),
|
||||
QUERY_AND_FETCH((byte) 3);
|
||||
|
||||
public static final SearchType DEFAULT = QUERY_THEN_FETCH;
|
||||
|
||||
private byte id;
|
||||
|
||||
SearchType(byte id) {
|
||||
|
|
|
@ -61,6 +61,7 @@ public abstract class TransportSearchHelper {
|
|||
|
||||
public static InternalSearchRequest internalSearchRequest(ShardRouting shardRouting, SearchRequest request) {
|
||||
InternalSearchRequest internalRequest = new InternalSearchRequest(shardRouting, request.source());
|
||||
internalRequest.extraSource(request.extraSource());
|
||||
internalRequest.from(request.from()).size(request.size());
|
||||
internalRequest.scroll(request.scroll());
|
||||
internalRequest.timeout(request.timeout());
|
||||
|
|
|
@ -34,6 +34,7 @@ import java.io.InputStream;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import static org.elasticsearch.util.SizeValue.*;
|
||||
import static org.elasticsearch.util.TimeValue.*;
|
||||
|
@ -43,6 +44,8 @@ import static org.elasticsearch.util.TimeValue.*;
|
|||
*/
|
||||
public class NettyHttpRequest implements HttpRequest {
|
||||
|
||||
private final Pattern commaPattern = Pattern.compile(",");
|
||||
|
||||
private final org.jboss.netty.handler.codec.http.HttpRequest request;
|
||||
|
||||
private QueryStringDecoder queryStringDecoder;
|
||||
|
@ -164,6 +167,14 @@ public class NettyHttpRequest implements HttpRequest {
|
|||
return parseSizeValue(param(key), defaultValue);
|
||||
}
|
||||
|
||||
@Override public String[] paramAsStringArray(String key, String[] defaultValue) {
|
||||
String value = param(key);
|
||||
if (value == null) {
|
||||
return defaultValue;
|
||||
}
|
||||
return commaPattern.split(value);
|
||||
}
|
||||
|
||||
@Override public boolean hasParam(String key) {
|
||||
return queryStringDecoder.getParameters().containsKey(key);
|
||||
}
|
||||
|
|
|
@ -61,6 +61,8 @@ public interface RestRequest extends ToJson.Params {
|
|||
|
||||
String param(String key);
|
||||
|
||||
String[] paramAsStringArray(String key, String[] defaultValue);
|
||||
|
||||
float paramAsFloat(String key, float defaultValue);
|
||||
|
||||
int paramAsInt(String key, int defaultValue);
|
||||
|
|
|
@ -40,6 +40,7 @@ import org.elasticsearch.rest.action.deletebyquery.RestDeleteByQueryAction;
|
|||
import org.elasticsearch.rest.action.get.RestGetAction;
|
||||
import org.elasticsearch.rest.action.index.RestIndexAction;
|
||||
import org.elasticsearch.rest.action.main.RestMainAction;
|
||||
import org.elasticsearch.rest.action.mlt.RestMoreLikeThisAction;
|
||||
import org.elasticsearch.rest.action.search.RestSearchAction;
|
||||
import org.elasticsearch.rest.action.terms.RestTermsAction;
|
||||
|
||||
|
@ -83,5 +84,7 @@ public class RestActionModule extends AbstractModule {
|
|||
bind(RestTermsAction.class).asEagerSingleton();
|
||||
|
||||
bind(RestSearchAction.class).asEagerSingleton();
|
||||
|
||||
bind(RestMoreLikeThisAction.class).asEagerSingleton();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,110 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.rest.action.mlt;
|
||||
|
||||
import com.google.inject.Inject;
|
||||
import org.elasticsearch.action.ActionListener;
|
||||
import org.elasticsearch.action.mlt.MoreLikeThisRequest;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.rest.*;
|
||||
import org.elasticsearch.search.Scroll;
|
||||
import org.elasticsearch.util.json.JsonBuilder;
|
||||
import org.elasticsearch.util.settings.Settings;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.client.Requests.*;
|
||||
import static org.elasticsearch.rest.RestRequest.Method.*;
|
||||
import static org.elasticsearch.rest.RestResponse.Status.*;
|
||||
import static org.elasticsearch.rest.action.support.RestActions.*;
|
||||
import static org.elasticsearch.rest.action.support.RestJsonBuilder.*;
|
||||
import static org.elasticsearch.util.TimeValue.*;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class RestMoreLikeThisAction extends BaseRestHandler {
|
||||
|
||||
@Inject public RestMoreLikeThisAction(Settings settings, Client client, RestController controller) {
|
||||
super(settings, client);
|
||||
controller.registerHandler(GET, "/{index}/{type}/{id}/_moreLikeThis", this);
|
||||
controller.registerHandler(POST, "/{index}/{type}/{id}/_moreLikeThis", this);
|
||||
}
|
||||
|
||||
@Override public void handleRequest(final RestRequest request, final RestChannel channel) {
|
||||
MoreLikeThisRequest mltRequest = moreLikeThisRequest(request.param("index")).type(request.param("type")).id(request.param("id"));
|
||||
try {
|
||||
mltRequest.fields(request.paramAsStringArray("fields", null));
|
||||
mltRequest.percentTermsToMatch(request.paramAsFloat("percentTermsToMatch", -1));
|
||||
mltRequest.minTermFrequency(request.paramAsInt("minTermFrequency", -1));
|
||||
mltRequest.maxQueryTerms(request.paramAsInt("maxQueryTerms", -1));
|
||||
mltRequest.stopWords(request.paramAsStringArray("stopWords", null));
|
||||
mltRequest.minDocFreq(request.paramAsInt("minDocFreq", -1));
|
||||
mltRequest.maxDocFreq(request.paramAsInt("maxDocFreq", -1));
|
||||
mltRequest.minWordLen(request.paramAsInt("minWordLen", -1));
|
||||
mltRequest.maxWordLen(request.paramAsInt("maxWordLen", -1));
|
||||
mltRequest.boostTerms(request.paramAsBoolean("boostTerms", null));
|
||||
mltRequest.boostTermsFactor(request.paramAsFloat("boostTermsFactor", -1));
|
||||
|
||||
mltRequest.searchType(parseSearchType(request.param("searchType")));
|
||||
mltRequest.searchIndices(request.paramAsStringArray("searchIndices", null));
|
||||
mltRequest.searchTypes(request.paramAsStringArray("searchTypes", null));
|
||||
mltRequest.searchQueryHint(request.param("searchQueryHint"));
|
||||
String searchScroll = request.param("searchScroll");
|
||||
if (searchScroll != null) {
|
||||
mltRequest.searchScroll(new Scroll(parseTimeValue(searchScroll, null)));
|
||||
}
|
||||
if (request.hasContent()) {
|
||||
mltRequest.searchSource(request.contentAsBytes());
|
||||
}
|
||||
} catch (Exception e) {
|
||||
try {
|
||||
JsonBuilder builder = restJsonBuilder(request);
|
||||
channel.sendResponse(new JsonRestResponse(request, BAD_REQUEST, builder.startObject().field("error", e.getMessage()).endObject()));
|
||||
} catch (IOException e1) {
|
||||
logger.error("Failed to send failure response", e1);
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
client.moreLikeThis(mltRequest, new ActionListener<SearchResponse>() {
|
||||
@Override public void onResponse(SearchResponse response) {
|
||||
try {
|
||||
JsonBuilder builder = restJsonBuilder(request);
|
||||
builder.startObject();
|
||||
response.toJson(builder, request);
|
||||
builder.endObject();
|
||||
channel.sendResponse(new JsonRestResponse(request, OK, builder));
|
||||
} catch (Exception e) {
|
||||
onFailure(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override public void onFailure(Throwable e) {
|
||||
try {
|
||||
channel.sendResponse(new JsonThrowableRestResponse(request, e));
|
||||
} catch (IOException e1) {
|
||||
logger.error("Failed to send failure response", e1);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
|
@ -25,13 +25,11 @@ import org.elasticsearch.action.ActionListener;
|
|||
import org.elasticsearch.action.search.SearchOperationThreading;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.search.SearchType;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.index.query.json.JsonQueryBuilders;
|
||||
import org.elasticsearch.index.query.json.QueryStringJsonQueryBuilder;
|
||||
import org.elasticsearch.rest.*;
|
||||
import org.elasticsearch.rest.action.support.RestActions;
|
||||
import org.elasticsearch.rest.action.support.RestJsonBuilder;
|
||||
import org.elasticsearch.search.Scroll;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.util.json.JsonBuilder;
|
||||
|
@ -43,6 +41,8 @@ import java.util.regex.Pattern;
|
|||
|
||||
import static org.elasticsearch.rest.RestRequest.Method.*;
|
||||
import static org.elasticsearch.rest.RestResponse.Status.*;
|
||||
import static org.elasticsearch.rest.action.support.RestActions.*;
|
||||
import static org.elasticsearch.rest.action.support.RestJsonBuilder.*;
|
||||
import static org.elasticsearch.util.TimeValue.*;
|
||||
|
||||
/**
|
||||
|
@ -82,7 +82,7 @@ public class RestSearchAction extends BaseRestHandler {
|
|||
searchRequest.operationThreading(operationThreading);
|
||||
} catch (Exception e) {
|
||||
try {
|
||||
JsonBuilder builder = RestJsonBuilder.restJsonBuilder(request);
|
||||
JsonBuilder builder = restJsonBuilder(request);
|
||||
channel.sendResponse(new JsonRestResponse(request, BAD_REQUEST, builder.startObject().field("error", e.getMessage()).endObject()));
|
||||
} catch (IOException e1) {
|
||||
logger.error("Failed to send failure response", e1);
|
||||
|
@ -90,11 +90,11 @@ public class RestSearchAction extends BaseRestHandler {
|
|||
return;
|
||||
}
|
||||
client.execSearch(searchRequest, new ActionListener<SearchResponse>() {
|
||||
@Override public void onResponse(SearchResponse result) {
|
||||
@Override public void onResponse(SearchResponse response) {
|
||||
try {
|
||||
JsonBuilder builder = RestJsonBuilder.restJsonBuilder(request);
|
||||
JsonBuilder builder = restJsonBuilder(request);
|
||||
builder.startObject();
|
||||
result.toJson(builder, request);
|
||||
response.toJson(builder, request);
|
||||
builder.endObject();
|
||||
channel.sendResponse(new JsonRestResponse(request, OK, builder));
|
||||
} catch (Exception e) {
|
||||
|
@ -116,42 +116,16 @@ public class RestSearchAction extends BaseRestHandler {
|
|||
String[] indices = RestActions.splitIndices(request.param("index"));
|
||||
SearchRequest searchRequest = new SearchRequest(indices, parseSearchSource(request));
|
||||
|
||||
String searchType = request.param("searchType");
|
||||
if (searchType != null) {
|
||||
if ("dfs_query_then_fetch".equals(searchType)) {
|
||||
searchRequest.searchType(SearchType.DFS_QUERY_THEN_FETCH);
|
||||
} else if ("dfs_query_and_fetch".equals(searchType)) {
|
||||
searchRequest.searchType(SearchType.DFS_QUERY_AND_FETCH);
|
||||
} else if ("query_then_fetch".equals(searchType)) {
|
||||
searchRequest.searchType(SearchType.QUERY_THEN_FETCH);
|
||||
} else if ("query_and_fetch".equals(searchType)) {
|
||||
searchRequest.searchType(SearchType.QUERY_AND_FETCH);
|
||||
} else {
|
||||
throw new ElasticSearchIllegalArgumentException("No search type for [" + searchType + "]");
|
||||
}
|
||||
} else {
|
||||
searchRequest.searchType(SearchType.QUERY_THEN_FETCH);
|
||||
}
|
||||
|
||||
String from = request.param("from");
|
||||
if (from != null) {
|
||||
searchRequest.from(Integer.parseInt(from));
|
||||
}
|
||||
|
||||
String size = request.param("size");
|
||||
if (size != null) {
|
||||
searchRequest.size(Integer.parseInt(size));
|
||||
}
|
||||
searchRequest.searchType(parseSearchType(request.param("searchType")));
|
||||
searchRequest.from(request.paramAsInt("from", -1));
|
||||
searchRequest.size(request.paramAsInt("size", -1));
|
||||
|
||||
String scroll = request.param("scroll");
|
||||
if (scroll != null) {
|
||||
searchRequest.scroll(new Scroll(parseTimeValue(scroll, null)));
|
||||
}
|
||||
|
||||
String timeout = request.param("timeout");
|
||||
if (timeout != null) {
|
||||
searchRequest.timeout(parseTimeValue(timeout, null));
|
||||
}
|
||||
searchRequest.timeout(request.paramAsTime("timeout", null));
|
||||
|
||||
String typesParam = request.param("type");
|
||||
if (typesParam != null) {
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.rest.action.support;
|
|||
|
||||
import org.elasticsearch.ElasticSearchIllegalArgumentException;
|
||||
import org.elasticsearch.action.ShardOperationFailedException;
|
||||
import org.elasticsearch.action.search.SearchType;
|
||||
import org.elasticsearch.action.support.broadcast.BroadcastOperationResponse;
|
||||
import org.elasticsearch.index.query.json.JsonQueryBuilders;
|
||||
import org.elasticsearch.index.query.json.QueryStringJsonQueryBuilder;
|
||||
|
@ -36,16 +37,9 @@ import java.util.regex.Pattern;
|
|||
*/
|
||||
public class RestActions {
|
||||
|
||||
public final static Pattern indicesPattern;
|
||||
public final static Pattern typesPattern;
|
||||
private final static Pattern nodesIdsPattern;
|
||||
|
||||
|
||||
static {
|
||||
indicesPattern = Pattern.compile(",");
|
||||
typesPattern = Pattern.compile(",");
|
||||
nodesIdsPattern = Pattern.compile(",");
|
||||
}
|
||||
public final static Pattern indicesPattern = Pattern.compile(",");
|
||||
public final static Pattern typesPattern = Pattern.compile(",");
|
||||
public final static Pattern nodesIdsPattern = Pattern.compile(",");
|
||||
|
||||
public static void buildBroadcastShardsHeader(JsonBuilder builder, BroadcastOperationResponse response) throws IOException {
|
||||
builder.startObject("_shards");
|
||||
|
@ -70,6 +64,23 @@ public class RestActions {
|
|||
builder.endObject();
|
||||
}
|
||||
|
||||
public static SearchType parseSearchType(String searchType) {
|
||||
if (searchType == null) {
|
||||
return SearchType.DEFAULT;
|
||||
}
|
||||
if ("dfs_query_then_fetch".equals(searchType)) {
|
||||
return SearchType.DFS_QUERY_THEN_FETCH;
|
||||
} else if ("dfs_query_and_fetch".equals(searchType)) {
|
||||
return SearchType.DFS_QUERY_AND_FETCH;
|
||||
} else if ("query_then_fetch".equals(searchType)) {
|
||||
return SearchType.QUERY_THEN_FETCH;
|
||||
} else if ("query_and_fetch".equals(searchType)) {
|
||||
return SearchType.QUERY_AND_FETCH;
|
||||
} else {
|
||||
throw new ElasticSearchIllegalArgumentException("No search type for [" + searchType + "]");
|
||||
}
|
||||
}
|
||||
|
||||
public static byte[] parseQuerySource(RestRequest request) {
|
||||
if (request.hasContent()) {
|
||||
return request.contentAsBytes();
|
||||
|
|
|
@ -245,8 +245,7 @@ public class SearchService extends AbstractComponent implements LifecycleCompone
|
|||
|
||||
SearchShardTarget shardTarget = new SearchShardTarget(clusterService.state().nodes().localNodeId(), request.index(), request.shardId());
|
||||
|
||||
SearchContext context = new SearchContext(idGenerator.incrementAndGet(), shardTarget, request.timeout(),
|
||||
request.source(), request.types(), engineSearcher, indexService);
|
||||
SearchContext context = new SearchContext(idGenerator.incrementAndGet(), shardTarget, request.timeout(), request.types(), engineSearcher, indexService);
|
||||
|
||||
// init the from and size
|
||||
context.from(request.from());
|
||||
|
@ -254,7 +253,8 @@ public class SearchService extends AbstractComponent implements LifecycleCompone
|
|||
|
||||
context.scroll(request.scroll());
|
||||
|
||||
parseSource(context);
|
||||
parseSource(context, request.source());
|
||||
parseSource(context, request.extraSource());
|
||||
|
||||
// if the from and size are still not set, default them
|
||||
if (context.from() == -1) {
|
||||
|
@ -293,9 +293,13 @@ public class SearchService extends AbstractComponent implements LifecycleCompone
|
|||
context.release();
|
||||
}
|
||||
|
||||
private void parseSource(SearchContext context) throws SearchParseException {
|
||||
private void parseSource(SearchContext context, byte[] source) throws SearchParseException {
|
||||
// nothing to parse...
|
||||
if (source == null || source.length == 0) {
|
||||
return;
|
||||
}
|
||||
try {
|
||||
JsonParser jp = jsonFactory.createJsonParser(context.source());
|
||||
JsonParser jp = jsonFactory.createJsonParser(source);
|
||||
JsonToken token;
|
||||
while ((token = jp.nextToken()) != JsonToken.END_OBJECT) {
|
||||
if (token == JsonToken.FIELD_NAME) {
|
||||
|
@ -311,7 +315,7 @@ public class SearchService extends AbstractComponent implements LifecycleCompone
|
|||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
throw new SearchParseException(context, "Failed to parse [" + Unicode.fromBytes(context.source()) + "]", e);
|
||||
throw new SearchParseException(context, "Failed to parse [" + Unicode.fromBytes(source) + "]", e);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -154,8 +154,10 @@ public class SearchSourceBuilder {
|
|||
builder.field("queryParserName", queryParserName);
|
||||
}
|
||||
|
||||
builder.field("query");
|
||||
queryBuilder.toJson(builder, ToJson.EMPTY_PARAMS);
|
||||
if (queryBuilder != null) {
|
||||
builder.field("query");
|
||||
queryBuilder.toJson(builder, ToJson.EMPTY_PARAMS);
|
||||
}
|
||||
|
||||
if (explain != null) {
|
||||
builder.field("explain", explain);
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.elasticsearch.search.internal;
|
|||
|
||||
import org.elasticsearch.cluster.routing.ShardRouting;
|
||||
import org.elasticsearch.search.Scroll;
|
||||
import org.elasticsearch.util.Bytes;
|
||||
import org.elasticsearch.util.Strings;
|
||||
import org.elasticsearch.util.TimeValue;
|
||||
import org.elasticsearch.util.io.Streamable;
|
||||
|
@ -70,6 +71,8 @@ public class InternalSearchRequest implements Streamable {
|
|||
|
||||
private byte[] source;
|
||||
|
||||
private byte[] extraSource;
|
||||
|
||||
public InternalSearchRequest() {
|
||||
}
|
||||
|
||||
|
@ -95,6 +98,15 @@ public class InternalSearchRequest implements Streamable {
|
|||
return this.source;
|
||||
}
|
||||
|
||||
public byte[] extraSource() {
|
||||
return this.extraSource;
|
||||
}
|
||||
|
||||
public InternalSearchRequest extraSource(byte[] extraSource) {
|
||||
this.extraSource = extraSource;
|
||||
return this;
|
||||
}
|
||||
|
||||
public Scroll scroll() {
|
||||
return scroll;
|
||||
}
|
||||
|
@ -150,8 +162,20 @@ public class InternalSearchRequest implements Streamable {
|
|||
if (in.readBoolean()) {
|
||||
timeout = readTimeValue(in);
|
||||
}
|
||||
source = new byte[in.readInt()];
|
||||
in.readFully(source);
|
||||
int size = in.readInt();
|
||||
if (size == 0) {
|
||||
source = Bytes.EMPTY_ARRAY;
|
||||
} else {
|
||||
source = new byte[size];
|
||||
in.readFully(source);
|
||||
}
|
||||
size = in.readInt();
|
||||
if (size == 0) {
|
||||
extraSource = Bytes.EMPTY_ARRAY;
|
||||
} else {
|
||||
extraSource = new byte[size];
|
||||
in.readFully(extraSource);
|
||||
}
|
||||
int typesSize = in.readInt();
|
||||
if (typesSize > 0) {
|
||||
types = new String[typesSize];
|
||||
|
@ -178,8 +202,18 @@ public class InternalSearchRequest implements Streamable {
|
|||
out.writeBoolean(true);
|
||||
timeout.writeTo(out);
|
||||
}
|
||||
out.writeInt(source.length);
|
||||
out.write(source);
|
||||
if (source == null) {
|
||||
out.writeInt(0);
|
||||
} else {
|
||||
out.writeInt(source.length);
|
||||
out.write(source);
|
||||
}
|
||||
if (extraSource == null) {
|
||||
out.writeInt(0);
|
||||
} else {
|
||||
out.writeInt(extraSource.length);
|
||||
out.write(extraSource);
|
||||
}
|
||||
out.writeInt(types.length);
|
||||
for (String type : types) {
|
||||
out.writeUTF(type);
|
||||
|
|
|
@ -50,8 +50,6 @@ public class SearchContext implements Releasable {
|
|||
|
||||
private final SearchShardTarget shardTarget;
|
||||
|
||||
private final byte[] source;
|
||||
|
||||
private final Engine.Searcher engineSearcher;
|
||||
|
||||
private final IndexService indexService;
|
||||
|
@ -100,12 +98,11 @@ public class SearchContext implements Releasable {
|
|||
|
||||
private volatile Timeout keepAliveTimeout;
|
||||
|
||||
public SearchContext(long id, SearchShardTarget shardTarget, TimeValue timeout, byte[] source,
|
||||
public SearchContext(long id, SearchShardTarget shardTarget, TimeValue timeout,
|
||||
String[] types, Engine.Searcher engineSearcher, IndexService indexService) {
|
||||
this.id = id;
|
||||
this.shardTarget = shardTarget;
|
||||
this.timeout = timeout;
|
||||
this.source = source;
|
||||
this.types = types;
|
||||
this.engineSearcher = engineSearcher;
|
||||
this.dfsResult = new DfsSearchResult(id, shardTarget);
|
||||
|
@ -137,10 +134,6 @@ public class SearchContext implements Releasable {
|
|||
return this.shardTarget;
|
||||
}
|
||||
|
||||
public byte[] source() {
|
||||
return source;
|
||||
}
|
||||
|
||||
public String[] types() {
|
||||
return types;
|
||||
}
|
||||
|
|
|
@ -0,0 +1,28 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.util;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class Bytes {
|
||||
|
||||
public static final byte[] EMPTY_ARRAY = new byte[0];
|
||||
}
|
|
@ -0,0 +1,49 @@
|
|||
/*
|
||||
* Licensed to Elastic Search and Shay Banon under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. Elastic Search licenses this
|
||||
* file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing,
|
||||
* software distributed under the License is distributed on an
|
||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||
* KIND, either express or implied. See the License for the
|
||||
* specific language governing permissions and limitations
|
||||
* under the License.
|
||||
*/
|
||||
|
||||
package org.elasticsearch.test.integration.client.transport;
|
||||
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.client.transport.TransportClient;
|
||||
import org.elasticsearch.server.internal.InternalServer;
|
||||
import org.elasticsearch.test.integration.document.MoreLikeThisActionTests;
|
||||
import org.elasticsearch.transport.TransportService;
|
||||
import org.elasticsearch.util.transport.TransportAddress;
|
||||
|
||||
import static org.elasticsearch.util.settings.ImmutableSettings.*;
|
||||
|
||||
/**
|
||||
* @author kimchy (shay.banon)
|
||||
*/
|
||||
public class TransportClientMoreLikeThisActionTests extends MoreLikeThisActionTests {
|
||||
|
||||
@Override protected Client getClient1() {
|
||||
TransportAddress server1Address = ((InternalServer) server("server1")).injector().getInstance(TransportService.class).boundAddress().publishAddress();
|
||||
TransportClient client = new TransportClient(settingsBuilder().putBoolean("discovery.enabled", false).build());
|
||||
client.addTransportAddress(server1Address);
|
||||
return client;
|
||||
}
|
||||
|
||||
@Override protected Client getClient2() {
|
||||
TransportAddress server1Address = ((InternalServer) server("server2")).injector().getInstance(TransportService.class).boundAddress().publishAddress();
|
||||
TransportClient client = new TransportClient(settingsBuilder().putBoolean("discovery.enabled", false).build());
|
||||
client.addTransportAddress(server1Address);
|
||||
return client;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,6 @@
|
|||
cluster:
|
||||
routing:
|
||||
schedule: 100ms
|
||||
index:
|
||||
numberOfShards: 5
|
||||
numberOfReplicas: 1
|
Loading…
Reference in New Issue