Term Vectors: More consistent naming for term vector[s]

We speak of the term vectors of a document, where each field has an associated
stored term vector. Since by default we are requesting all the term vectors of
a document, the HTTP request endpoint should rather be called `_termvectors`
instead of `_termvector`. The usage of `_termvector` is now deprecated, as
well as the transport client call to termVector and prepareTermVector.

Closes #8484
This commit is contained in:
Alex Ksikes 2014-11-13 15:05:09 +01:00
parent a6e6c4efc4
commit 1959275622
57 changed files with 484 additions and 420 deletions

View File

@ -8,7 +8,7 @@ realtime. This can be changed by setting `realtime` parameter to `false`.
[source,js]
--------------------------------------------------
curl -XGET 'http://localhost:9200/twitter/tweet/1/_termvector?pretty=true'
curl -XGET 'http://localhost:9200/twitter/tweet/1/_termvectors?pretty=true'
--------------------------------------------------
Optionally, you can specify the fields for which the information is
@ -16,13 +16,16 @@ retrieved either with a parameter in the url
[source,js]
--------------------------------------------------
curl -XGET 'http://localhost:9200/twitter/tweet/1/_termvector?fields=text,...'
curl -XGET 'http://localhost:9200/twitter/tweet/1/_termvectors?fields=text,...'
--------------------------------------------------
or by adding the requested fields in the request body (see
example below). Fields can also be specified with wildcards
in similar way to the <<query-dsl-multi-match-query,multi match query>>
[WARNING]
Note that the usage of `/_termvector` is deprecated in 2.0, and replaced by `/_termvectors`.
[float]
=== Return values
@ -160,7 +163,7 @@ The following request returns all information and statistics for field
[source,js]
--------------------------------------------------
curl -XGET 'http://localhost:9200/twitter/tweet/1/_termvector?pretty=true' -d '{
curl -XGET 'http://localhost:9200/twitter/tweet/1/_termvectors?pretty=true' -d '{
"fields" : ["text"],
"offsets" : true,
"payloads" : true,
@ -243,7 +246,7 @@ Note that for the field `text`, the terms are not re-generated.
[source,js]
--------------------------------------------------
curl -XGET 'http://localhost:9200/twitter/tweet/1/_termvector?pretty=true' -d '{
curl -XGET 'http://localhost:9200/twitter/tweet/1/_termvectors?pretty=true' -d '{
"fields" : ["text", "some_field_without_term_vectors"],
"offsets" : true,
"positions" : true,
@ -270,7 +273,7 @@ mapping will be dynamically created.
[source,js]
--------------------------------------------------
curl -XGET 'http://localhost:9200/twitter/tweet/_termvector' -d '{
curl -XGET 'http://localhost:9200/twitter/tweet/_termvectors' -d '{
"doc" : {
"fullname" : "John Doe",
"text" : "twitter test test test"
@ -290,7 +293,7 @@ vectors, the term vectors will be re-generated.
[source,js]
--------------------------------------------------
curl -XGET 'http://localhost:9200/twitter/tweet/_termvector' -d '{
curl -XGET 'http://localhost:9200/twitter/tweet/_termvectors' -d '{
"doc" : {
"fullname" : "John Doe",
"text" : "twitter test test test"

View File

@ -52,3 +52,7 @@ In addition, the following node settings related to routing have been deprecated
=== Store
The `memory` / `ram` store (`index.store.type`) option was removed in Elasticsearch 2.0.
=== Term Vectors API
Usage of `/_termvector` is deprecated, and replaced in favor of `/_termvectors`.

View File

@ -1,10 +1,10 @@
{
"termvector" : {
"termvectors" : {
"documentation" : "http://www.elasticsearch.org/guide/en/elasticsearch/reference/master/docs-termvectors.html",
"methods" : ["GET", "POST"],
"url" : {
"path" : "/{index}/{type}/{id}/_termvector",
"paths" : ["/{index}/{type}/{id}/_termvector"],
"path" : "/{index}/{type}/{id}/_termvectors",
"paths" : ["/{index}/{type}/{id}/_termvectors"],
"parts" : {
"index" : {
"type" : "string",

View File

@ -23,7 +23,7 @@ setup:
"Basic tests for termvector get":
- do:
termvector:
termvectors:
index: testidx
type: testtype
id: testing_document

View File

@ -29,7 +29,7 @@
text : "foo bar"
- do:
termvector:
termvectors:
index: testidx
type: doc
id: 1

View File

@ -22,7 +22,7 @@
body: { foo: bar }
- do:
termvector:
termvectors:
index: test_1
type: test
id: 1
@ -31,7 +31,7 @@
- is_false: found
- do:
termvector:
termvectors:
index: test_1
type: test
id: 1

View File

@ -159,8 +159,8 @@ import org.elasticsearch.action.suggest.TransportSuggestAction;
import org.elasticsearch.action.support.ActionFilter;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.TransportAction;
import org.elasticsearch.action.termvector.*;
import org.elasticsearch.action.termvector.dfs.TransportDfsOnlyAction;
import org.elasticsearch.action.termvectors.*;
import org.elasticsearch.action.termvectors.dfs.TransportDfsOnlyAction;
import org.elasticsearch.action.update.TransportUpdateAction;
import org.elasticsearch.action.update.UpdateAction;
import org.elasticsearch.common.inject.AbstractModule;
@ -281,10 +281,10 @@ public class ActionModule extends AbstractModule {
registerAction(IndexAction.INSTANCE, TransportIndexAction.class);
registerAction(GetAction.INSTANCE, TransportGetAction.class);
registerAction(TermVectorAction.INSTANCE, TransportSingleShardTermVectorAction.class,
registerAction(TermVectorsAction.INSTANCE, TransportTermVectorsAction.class,
TransportDfsOnlyAction.class);
registerAction(MultiTermVectorsAction.INSTANCE, TransportMultiTermVectorsAction.class,
TransportSingleShardMultiTermsVectorAction.class);
TransportShardMultiTermsVectorAction.class);
registerAction(DeleteAction.INSTANCE, TransportDeleteAction.class,
TransportIndexDeleteAction.class, TransportShardDeleteAction.class);
registerAction(CountAction.INSTANCE, TransportCountAction.class);

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.action.termvector;
package org.elasticsearch.action.termvectors;
import org.elasticsearch.action.ClientAction;
import org.elasticsearch.client.Client;

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.action.termvector;
package org.elasticsearch.action.termvectors;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
@ -30,14 +30,14 @@ import java.io.IOException;
*/
public class MultiTermVectorsItemResponse implements Streamable {
private TermVectorResponse response;
private TermVectorsResponse response;
private MultiTermVectorsResponse.Failure failure;
MultiTermVectorsItemResponse() {
}
public MultiTermVectorsItemResponse(TermVectorResponse response, MultiTermVectorsResponse.Failure failure) {
public MultiTermVectorsItemResponse(TermVectorsResponse response, MultiTermVectorsResponse.Failure failure) {
assert (((response == null) && (failure != null)) || ((response != null) && (failure == null)));
this.response = response;
this.failure = failure;
@ -83,7 +83,7 @@ public class MultiTermVectorsItemResponse implements Streamable {
/**
* The actual get response, <tt>null</tt> if its a failure.
*/
public TermVectorResponse getResponse() {
public TermVectorsResponse getResponse() {
return this.response;
}
@ -105,7 +105,7 @@ public class MultiTermVectorsItemResponse implements Streamable {
if (in.readBoolean()) {
failure = MultiTermVectorsResponse.Failure.readFailure(in);
} else {
response = new TermVectorResponse();
response = new TermVectorsResponse();
response.readFrom(in);
}
}

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.action.termvector;
package org.elasticsearch.action.termvectors;
import com.google.common.collect.Iterators;
import org.elasticsearch.ElasticsearchIllegalArgumentException;
@ -33,20 +33,20 @@ import org.elasticsearch.common.xcontent.XContentParser;
import java.io.IOException;
import java.util.*;
public class MultiTermVectorsRequest extends ActionRequest<MultiTermVectorsRequest> implements Iterable<TermVectorRequest>, CompositeIndicesRequest {
public class MultiTermVectorsRequest extends ActionRequest<MultiTermVectorsRequest> implements Iterable<TermVectorsRequest>, CompositeIndicesRequest {
String preference;
List<TermVectorRequest> requests = new ArrayList<>();
List<TermVectorsRequest> requests = new ArrayList<>();
final Set<String> ids = new HashSet<>();
public MultiTermVectorsRequest add(TermVectorRequest termVectorRequest) {
requests.add(termVectorRequest);
public MultiTermVectorsRequest add(TermVectorsRequest termVectorsRequest) {
requests.add(termVectorsRequest);
return this;
}
public MultiTermVectorsRequest add(String index, @Nullable String type, String id) {
requests.add(new TermVectorRequest(index, type, id));
requests.add(new TermVectorsRequest(index, type, id));
return this;
}
@ -57,8 +57,8 @@ public class MultiTermVectorsRequest extends ActionRequest<MultiTermVectorsReque
validationException = ValidateActions.addValidationError("multi term vectors: no documents requested", validationException);
} else {
for (int i = 0; i < requests.size(); i++) {
TermVectorRequest termVectorRequest = requests.get(i);
ActionRequestValidationException validationExceptionForDoc = termVectorRequest.validate();
TermVectorsRequest termVectorsRequest = requests.get(i);
ActionRequestValidationException validationExceptionForDoc = termVectorsRequest.validate();
if (validationExceptionForDoc != null) {
validationException = ValidateActions.addValidationError("at multi term vectors for doc " + i,
validationExceptionForDoc);
@ -74,7 +74,7 @@ public class MultiTermVectorsRequest extends ActionRequest<MultiTermVectorsReque
}
@Override
public Iterator<TermVectorRequest> iterator() {
public Iterator<TermVectorsRequest> iterator() {
return Iterators.unmodifiableIterator(requests.iterator());
}
@ -82,11 +82,11 @@ public class MultiTermVectorsRequest extends ActionRequest<MultiTermVectorsReque
return requests.isEmpty() && ids.isEmpty();
}
public List<TermVectorRequest> getRequests() {
public List<TermVectorsRequest> getRequests() {
return requests;
}
public void add(TermVectorRequest template, BytesReference data) throws Exception {
public void add(TermVectorsRequest template, BytesReference data) throws Exception {
XContentParser.Token token;
String currentFieldName = null;
if (data.length() > 0) {
@ -100,9 +100,9 @@ public class MultiTermVectorsRequest extends ActionRequest<MultiTermVectorsReque
if (token != XContentParser.Token.START_OBJECT) {
throw new ElasticsearchIllegalArgumentException("docs array element should include an object");
}
TermVectorRequest termVectorRequest = new TermVectorRequest(template);
TermVectorRequest.parseRequest(termVectorRequest, parser);
add(termVectorRequest);
TermVectorsRequest termVectorsRequest = new TermVectorsRequest(template);
TermVectorsRequest.parseRequest(termVectorsRequest, parser);
add(termVectorsRequest);
}
} else if ("ids".equals(currentFieldName)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
@ -117,7 +117,7 @@ public class MultiTermVectorsRequest extends ActionRequest<MultiTermVectorsReque
}
} else if (token == XContentParser.Token.START_OBJECT && currentFieldName != null) {
if ("parameters".equals(currentFieldName)) {
TermVectorRequest.parseRequest(template, parser);
TermVectorsRequest.parseRequest(template, parser);
} else {
throw new ElasticsearchParseException(
"No parameter named " + currentFieldName + "and type OBJECT");
@ -129,7 +129,7 @@ public class MultiTermVectorsRequest extends ActionRequest<MultiTermVectorsReque
}
}
for (String id : ids) {
TermVectorRequest curRequest = new TermVectorRequest(template);
TermVectorsRequest curRequest = new TermVectorsRequest(template);
curRequest.id(id);
requests.add(curRequest);
}
@ -142,7 +142,7 @@ public class MultiTermVectorsRequest extends ActionRequest<MultiTermVectorsReque
int size = in.readVInt();
requests = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
requests.add(TermVectorRequest.readTermVectorRequest(in));
requests.add(TermVectorsRequest.readTermVectorsRequest(in));
}
}
@ -151,8 +151,8 @@ public class MultiTermVectorsRequest extends ActionRequest<MultiTermVectorsReque
super.writeTo(out);
out.writeOptionalString(preference);
out.writeVInt(requests.size());
for (TermVectorRequest termVectorRequest : requests) {
termVectorRequest.writeTo(out);
for (TermVectorsRequest termVectorsRequest : requests) {
termVectorsRequest.writeTo(out);
}
}

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.action.termvector;
package org.elasticsearch.action.termvectors;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionRequestBuilder;
@ -43,8 +43,8 @@ public class MultiTermVectorsRequestBuilder extends ActionRequestBuilder<MultiTe
return this;
}
public MultiTermVectorsRequestBuilder add(TermVectorRequest termVectorRequest) {
request.add(termVectorRequest);
public MultiTermVectorsRequestBuilder add(TermVectorsRequest termVectorsRequest) {
request.add(termVectorsRequest);
return this;
}

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.action.termvector;
package org.elasticsearch.action.termvectors;
import com.google.common.collect.Iterators;
import org.elasticsearch.action.ActionResponse;
@ -135,7 +135,7 @@ public class MultiTermVectorsResponse extends ActionResponse implements Iterable
builder.field(Fields.ERROR, failure.getMessage());
builder.endObject();
} else {
TermVectorResponse getResponse = response.getResponse();
TermVectorsResponse getResponse = response.getResponse();
builder.startObject();
getResponse.toXContent(builder, params);
builder.endObject();

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.action.termvector;
package org.elasticsearch.action.termvectors;
import com.carrotsearch.hppc.IntArrayList;
import org.elasticsearch.action.support.single.shard.SingleShardOperationRequest;
@ -34,7 +34,7 @@ public class MultiTermVectorsShardRequest extends SingleShardOperationRequest<Mu
private String preference;
IntArrayList locations;
List<TermVectorRequest> requests;
List<TermVectorsRequest> requests;
MultiTermVectorsShardRequest() {
@ -66,7 +66,7 @@ public class MultiTermVectorsShardRequest extends SingleShardOperationRequest<Mu
}
public void add(int location, TermVectorRequest request) {
public void add(int location, TermVectorsRequest request) {
this.locations.add(location);
this.requests.add(request);
}
@ -88,7 +88,7 @@ public class MultiTermVectorsShardRequest extends SingleShardOperationRequest<Mu
requests = new ArrayList<>(size);
for (int i = 0; i < size; i++) {
locations.add(in.readVInt());
requests.add(TermVectorRequest.readTermVectorRequest(in));
requests.add(TermVectorsRequest.readTermVectorsRequest(in));
}
preference = in.readOptionalString();

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.action.termvector;
package org.elasticsearch.action.termvectors;
import com.carrotsearch.hppc.IntArrayList;
import org.elasticsearch.action.ActionResponse;
@ -31,7 +31,7 @@ import java.util.List;
public class MultiTermVectorsShardResponse extends ActionResponse {
IntArrayList locations;
List<TermVectorResponse> responses;
List<TermVectorsResponse> responses;
List<MultiTermVectorsResponse.Failure> failures;
MultiTermVectorsShardResponse() {
@ -40,7 +40,7 @@ public class MultiTermVectorsShardResponse extends ActionResponse {
failures = new ArrayList<>();
}
public void add(int location, TermVectorResponse response) {
public void add(int location, TermVectorsResponse response) {
locations.add(location);
responses.add(response);
failures.add(null);
@ -62,7 +62,7 @@ public class MultiTermVectorsShardResponse extends ActionResponse {
for (int i = 0; i < size; i++) {
locations.add(in.readVInt());
if (in.readBoolean()) {
TermVectorResponse response = new TermVectorResponse();
TermVectorsResponse response = new TermVectorsResponse();
response.readFrom(in);
responses.add(response);
} else {

View File

@ -17,29 +17,29 @@
* under the License.
*/
package org.elasticsearch.action.termvector;
package org.elasticsearch.action.termvectors;
import org.elasticsearch.action.ClientAction;
import org.elasticsearch.client.Client;
/**
*/
public class TermVectorAction extends ClientAction<TermVectorRequest, TermVectorResponse, TermVectorRequestBuilder> {
public class TermVectorsAction extends ClientAction<TermVectorsRequest, TermVectorsResponse, TermVectorsRequestBuilder> {
public static final TermVectorAction INSTANCE = new TermVectorAction();
public static final TermVectorsAction INSTANCE = new TermVectorsAction();
public static final String NAME = "indices:data/read/tv";
private TermVectorAction() {
private TermVectorsAction() {
super(NAME);
}
@Override
public TermVectorResponse newResponse() {
return new TermVectorResponse();
public TermVectorsResponse newResponse() {
return new TermVectorsResponse();
}
@Override
public TermVectorRequestBuilder newRequestBuilder(Client client) {
return new TermVectorRequestBuilder(client);
public TermVectorsRequestBuilder newRequestBuilder(Client client) {
return new TermVectorsRequestBuilder(client);
}
}

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.action.termvector;
package org.elasticsearch.action.termvectors;
import com.carrotsearch.hppc.ObjectLongOpenHashMap;
import com.carrotsearch.hppc.cursors.ObjectLongCursor;
@ -28,17 +28,16 @@ import org.elasticsearch.common.io.stream.BytesStreamInput;
import java.io.IOException;
import java.util.Arrays;
import java.util.Comparator;
import java.util.Iterator;
import static org.apache.lucene.util.ArrayUtil.grow;
/**
* This class represents the result of a {@link TermVectorRequest}. It works
* This class represents the result of a {@link TermVectorsRequest}. It works
* exactly like the {@link Fields} class except for one thing: It can return
* offsets and payloads even if positions are not present. You must call
* nextPosition() anyway to move the counter although this method only returns
* <tt>-1,</tt>, if no positions were returned by the {@link TermVectorRequest}.
* <tt>-1,</tt>, if no positions were returned by the {@link TermVectorsRequest}.
* <p/>
* The data is stored in two byte arrays ({@code headerRef} and
* {@code termVectors}, both {@link ByteRef}) that have the following format:
@ -108,7 +107,7 @@ import static org.apache.lucene.util.ArrayUtil.grow;
* </ul> </ul>
*/
public final class TermVectorFields extends Fields {
public final class TermVectorsFields extends Fields {
private final ObjectLongOpenHashMap<String> fieldMap;
private final BytesReference termVectors;
@ -120,7 +119,7 @@ public final class TermVectorFields extends Fields {
* header information as {@link BytesRef}.
* @param termVectors Stores the actual term vectors as a {@link BytesRef}.
*/
public TermVectorFields(BytesReference headerRef, BytesReference termVectors) throws IOException {
public TermVectorsFields(BytesReference headerRef, BytesReference termVectors) throws IOException {
BytesStreamInput header = new BytesStreamInput(headerRef);
fieldMap = new ObjectLongOpenHashMap<>();

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.action.termvector;
package org.elasticsearch.action.termvectors;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
@ -48,7 +48,7 @@ import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
* Note, the {@link #index()}, {@link #type(String)} and {@link #id(String)} are
* required.
*/
public class TermVectorRequest extends SingleShardOperationRequest<TermVectorRequest> implements DocumentRequest<TermVectorRequest> {
public class TermVectorsRequest extends SingleShardOperationRequest<TermVectorsRequest> implements DocumentRequest<TermVectorsRequest> {
private String type;
@ -72,7 +72,7 @@ public class TermVectorRequest extends SingleShardOperationRequest<TermVectorReq
private EnumSet<Flag> flagsEnum = EnumSet.of(Flag.Positions, Flag.Offsets, Flag.Payloads,
Flag.FieldStatistics);
public TermVectorRequest() {
public TermVectorsRequest() {
}
/**
@ -80,7 +80,7 @@ public class TermVectorRequest extends SingleShardOperationRequest<TermVectorReq
* from the provided index. Use {@link #type(String)} and
* {@link #id(String)} to specify the document to load.
*/
public TermVectorRequest(String index, String type, String id) {
public TermVectorsRequest(String index, String type, String id) {
super(index);
this.id = id;
this.type = type;
@ -91,7 +91,7 @@ public class TermVectorRequest extends SingleShardOperationRequest<TermVectorReq
* from the provided index. Use {@link #type(String)} and
* {@link #id(String)} to specify the document to load.
*/
public TermVectorRequest(TermVectorRequest other) {
public TermVectorsRequest(TermVectorsRequest other) {
super(other.index());
this.id = other.id();
this.type = other.type();
@ -104,7 +104,7 @@ public class TermVectorRequest extends SingleShardOperationRequest<TermVectorReq
this.realtime = other.realtime();
}
public TermVectorRequest(MultiGetRequest.Item item) {
public TermVectorsRequest(MultiGetRequest.Item item) {
super(item.index());
this.id = item.id();
this.type = item.type();
@ -119,7 +119,7 @@ public class TermVectorRequest extends SingleShardOperationRequest<TermVectorReq
/**
* Sets the type of document to get the term vector for.
*/
public TermVectorRequest type(String type) {
public TermVectorsRequest type(String type) {
this.type = type;
return this;
}
@ -141,7 +141,7 @@ public class TermVectorRequest extends SingleShardOperationRequest<TermVectorReq
/**
* Sets the id of document the term vector is requested for.
*/
public TermVectorRequest id(String id) {
public TermVectorsRequest id(String id) {
this.id = id;
return this;
}
@ -156,14 +156,14 @@ public class TermVectorRequest extends SingleShardOperationRequest<TermVectorReq
/**
* Sets an artificial document from which term vectors are requested for.
*/
public TermVectorRequest doc(XContentBuilder documentBuilder) {
public TermVectorsRequest doc(XContentBuilder documentBuilder) {
return this.doc(documentBuilder.bytes(), true);
}
/**
* Sets an artificial document from which term vectors are requested for.
*/
public TermVectorRequest doc(BytesReference doc, boolean generateRandomId) {
public TermVectorsRequest doc(BytesReference doc, boolean generateRandomId) {
// assign a random id to this artificial document, for routing
if (generateRandomId) {
this.id(String.valueOf(randomInt.getAndAdd(1)));
@ -179,7 +179,7 @@ public class TermVectorRequest extends SingleShardOperationRequest<TermVectorReq
return routing;
}
public TermVectorRequest routing(String routing) {
public TermVectorsRequest routing(String routing) {
this.routing = routing;
return this;
}
@ -188,7 +188,7 @@ public class TermVectorRequest extends SingleShardOperationRequest<TermVectorReq
* Sets the parent id of this document. Will simply set the routing to this
* value, as it is only used for routing with delete requests.
*/
public TermVectorRequest parent(String parent) {
public TermVectorsRequest parent(String parent) {
if (routing == null) {
routing = parent;
}
@ -206,7 +206,7 @@ public class TermVectorRequest extends SingleShardOperationRequest<TermVectorReq
* which guarantees that the same order will be used across different
* requests.
*/
public TermVectorRequest preference(String preference) {
public TermVectorsRequest preference(String preference) {
this.preference = preference;
return this;
}
@ -215,7 +215,7 @@ public class TermVectorRequest extends SingleShardOperationRequest<TermVectorReq
* Return the start and stop offsets for each term if they were stored or
* skip offsets.
*/
public TermVectorRequest offsets(boolean offsets) {
public TermVectorsRequest offsets(boolean offsets) {
setFlag(Flag.Offsets, offsets);
return this;
}
@ -231,7 +231,7 @@ public class TermVectorRequest extends SingleShardOperationRequest<TermVectorReq
/**
* Return the positions for each term if stored or skip.
*/
public TermVectorRequest positions(boolean positions) {
public TermVectorsRequest positions(boolean positions) {
setFlag(Flag.Positions, positions);
return this;
}
@ -255,7 +255,7 @@ public class TermVectorRequest extends SingleShardOperationRequest<TermVectorReq
/**
* Return the payloads for each term or skip.
*/
public TermVectorRequest payloads(boolean payloads) {
public TermVectorsRequest payloads(boolean payloads) {
setFlag(Flag.Payloads, payloads);
return this;
}
@ -271,7 +271,7 @@ public class TermVectorRequest extends SingleShardOperationRequest<TermVectorReq
/**
* Return the term statistics for each term in the shard or skip.
*/
public TermVectorRequest termStatistics(boolean termStatistics) {
public TermVectorsRequest termStatistics(boolean termStatistics) {
setFlag(Flag.TermStatistics, termStatistics);
return this;
}
@ -287,7 +287,7 @@ public class TermVectorRequest extends SingleShardOperationRequest<TermVectorReq
/**
* Return the field statistics for each term in the shard or skip.
*/
public TermVectorRequest fieldStatistics(boolean fieldStatistics) {
public TermVectorsRequest fieldStatistics(boolean fieldStatistics) {
setFlag(Flag.FieldStatistics, fieldStatistics);
return this;
}
@ -303,7 +303,7 @@ public class TermVectorRequest extends SingleShardOperationRequest<TermVectorReq
/**
* Use distributed frequencies instead of shard statistics.
*/
public TermVectorRequest dfs(boolean dfs) {
public TermVectorsRequest dfs(boolean dfs) {
setFlag(Flag.Dfs, dfs);
return this;
}
@ -320,7 +320,7 @@ public class TermVectorRequest extends SingleShardOperationRequest<TermVectorReq
* Return only term vectors for special selected fields. Returns the term
* vectors for all fields if selectedFields == null
*/
public TermVectorRequest selectedFields(String... fields) {
public TermVectorsRequest selectedFields(String... fields) {
selectedFields = fields != null && fields.length != 0 ? Sets.newHashSet(fields) : null;
return this;
}
@ -335,7 +335,7 @@ public class TermVectorRequest extends SingleShardOperationRequest<TermVectorReq
/**
* Choose whether term vectors be generated real-time.
*/
public TermVectorRequest realtime(Boolean realtime) {
public TermVectorsRequest realtime(Boolean realtime) {
this.realtime = realtime;
return this;
}
@ -350,7 +350,7 @@ public class TermVectorRequest extends SingleShardOperationRequest<TermVectorReq
/**
* Override the analyzer used at each field when generating term vectors.
*/
public TermVectorRequest perFieldAnalyzer(Map<String, String> perFieldAnalyzer) {
public TermVectorsRequest perFieldAnalyzer(Map<String, String> perFieldAnalyzer) {
this.perFieldAnalyzer = perFieldAnalyzer != null && perFieldAnalyzer.size() != 0 ? Maps.newHashMap(perFieldAnalyzer) : null;
return this;
}
@ -376,10 +376,10 @@ public class TermVectorRequest extends SingleShardOperationRequest<TermVectorReq
return validationException;
}
public static TermVectorRequest readTermVectorRequest(StreamInput in) throws IOException {
TermVectorRequest termVectorRequest = new TermVectorRequest();
termVectorRequest.readFrom(in);
return termVectorRequest;
public static TermVectorsRequest readTermVectorsRequest(StreamInput in) throws IOException {
TermVectorsRequest termVectorsRequest = new TermVectorsRequest();
termVectorsRequest.readFrom(in);
return termVectorsRequest;
}
@ -472,7 +472,7 @@ public class TermVectorRequest extends SingleShardOperationRequest<TermVectorReq
/**
* populates a request object (pre-populated with defaults) based on a parser.
*/
public static void parseRequest(TermVectorRequest termVectorRequest, XContentParser parser) throws IOException {
public static void parseRequest(TermVectorsRequest termVectorsRequest, XContentParser parser) throws IOException {
XContentParser.Token token;
String currentFieldName = null;
List<String> fields = new ArrayList<>();
@ -490,35 +490,35 @@ public class TermVectorRequest extends SingleShardOperationRequest<TermVectorReq
"The parameter fields must be given as an array! Use syntax : \"fields\" : [\"field1\", \"field2\",...]");
}
} else if (currentFieldName.equals("offsets")) {
termVectorRequest.offsets(parser.booleanValue());
termVectorsRequest.offsets(parser.booleanValue());
} else if (currentFieldName.equals("positions")) {
termVectorRequest.positions(parser.booleanValue());
termVectorsRequest.positions(parser.booleanValue());
} else if (currentFieldName.equals("payloads")) {
termVectorRequest.payloads(parser.booleanValue());
termVectorsRequest.payloads(parser.booleanValue());
} else if (currentFieldName.equals("term_statistics") || currentFieldName.equals("termStatistics")) {
termVectorRequest.termStatistics(parser.booleanValue());
termVectorsRequest.termStatistics(parser.booleanValue());
} else if (currentFieldName.equals("field_statistics") || currentFieldName.equals("fieldStatistics")) {
termVectorRequest.fieldStatistics(parser.booleanValue());
termVectorsRequest.fieldStatistics(parser.booleanValue());
} else if (currentFieldName.equals("dfs")) {
termVectorRequest.dfs(parser.booleanValue());
termVectorsRequest.dfs(parser.booleanValue());
} else if (currentFieldName.equals("per_field_analyzer") || currentFieldName.equals("perFieldAnalyzer")) {
termVectorRequest.perFieldAnalyzer(readPerFieldAnalyzer(parser.map()));
termVectorsRequest.perFieldAnalyzer(readPerFieldAnalyzer(parser.map()));
} else if ("_index".equals(currentFieldName)) { // the following is important for multi request parsing.
termVectorRequest.index = parser.text();
termVectorsRequest.index = parser.text();
} else if ("_type".equals(currentFieldName)) {
termVectorRequest.type = parser.text();
termVectorsRequest.type = parser.text();
} else if ("_id".equals(currentFieldName)) {
if (termVectorRequest.doc != null) {
if (termVectorsRequest.doc != null) {
throw new ElasticsearchParseException("Either \"id\" or \"doc\" can be specified, but not both!");
}
termVectorRequest.id = parser.text();
termVectorsRequest.id = parser.text();
} else if ("doc".equals(currentFieldName)) {
if (termVectorRequest.id != null) {
if (termVectorsRequest.id != null) {
throw new ElasticsearchParseException("Either \"id\" or \"doc\" can be specified, but not both!");
}
termVectorRequest.doc(jsonBuilder().copyCurrentStructure(parser));
termVectorsRequest.doc(jsonBuilder().copyCurrentStructure(parser));
} else if ("_routing".equals(currentFieldName) || "routing".equals(currentFieldName)) {
termVectorRequest.routing = parser.text();
termVectorsRequest.routing = parser.text();
} else {
throw new ElasticsearchParseException("The parameter " + currentFieldName
+ " is not valid for term vector request!");
@ -527,7 +527,7 @@ public class TermVectorRequest extends SingleShardOperationRequest<TermVectorReq
}
if (fields.size() > 0) {
String[] fieldsAsArray = new String[fields.size()];
termVectorRequest.selectedFields(fields.toArray(fieldsAsArray));
termVectorsRequest.selectedFields(fields.toArray(fieldsAsArray));
}
}

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.action.termvector;
package org.elasticsearch.action.termvectors;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionRequestBuilder;
@ -33,10 +33,10 @@ import java.util.Map;
* Note, the {@code index}, {@code type} and {@code id} are
* required.
*/
public class TermVectorRequestBuilder extends ActionRequestBuilder<TermVectorRequest, TermVectorResponse, TermVectorRequestBuilder, Client> {
public class TermVectorsRequestBuilder extends ActionRequestBuilder<TermVectorsRequest, TermVectorsResponse, TermVectorsRequestBuilder, Client> {
public TermVectorRequestBuilder(Client client) {
super(client, new TermVectorRequest());
public TermVectorsRequestBuilder(Client client) {
super(client, new TermVectorsRequest());
}
/**
@ -44,14 +44,14 @@ public class TermVectorRequestBuilder extends ActionRequestBuilder<TermVectorReq
* from the provided index. Use {@code index}, {@code type} and
* {@code id} to specify the document to load.
*/
public TermVectorRequestBuilder(Client client, String index, String type, String id) {
super(client, new TermVectorRequest(index, type, id));
public TermVectorsRequestBuilder(Client client, String index, String type, String id) {
super(client, new TermVectorsRequest(index, type, id));
}
/**
* Sets the index where the document is located.
*/
public TermVectorRequestBuilder setIndex(String index) {
public TermVectorsRequestBuilder setIndex(String index) {
request.index(index);
return this;
}
@ -59,7 +59,7 @@ public class TermVectorRequestBuilder extends ActionRequestBuilder<TermVectorReq
/**
* Sets the type of the document.
*/
public TermVectorRequestBuilder setType(String type) {
public TermVectorsRequestBuilder setType(String type) {
request.type(type);
return this;
}
@ -67,7 +67,7 @@ public class TermVectorRequestBuilder extends ActionRequestBuilder<TermVectorReq
/**
* Sets the id of the document.
*/
public TermVectorRequestBuilder setId(String id) {
public TermVectorsRequestBuilder setId(String id) {
request.id(id);
return this;
}
@ -75,7 +75,7 @@ public class TermVectorRequestBuilder extends ActionRequestBuilder<TermVectorReq
/**
* Sets the artificial document from which to generate term vectors.
*/
public TermVectorRequestBuilder setDoc(XContentBuilder xContent) {
public TermVectorsRequestBuilder setDoc(XContentBuilder xContent) {
request.doc(xContent);
return this;
}
@ -83,7 +83,7 @@ public class TermVectorRequestBuilder extends ActionRequestBuilder<TermVectorReq
/**
* Sets the routing. Required if routing isn't id based.
*/
public TermVectorRequestBuilder setRouting(String routing) {
public TermVectorsRequestBuilder setRouting(String routing) {
request.routing(routing);
return this;
}
@ -92,7 +92,7 @@ public class TermVectorRequestBuilder extends ActionRequestBuilder<TermVectorReq
* Sets the parent id of this document. Will simply set the routing to this value, as it is only
* used for routing with delete requests.
*/
public TermVectorRequestBuilder setParent(String parent) {
public TermVectorsRequestBuilder setParent(String parent) {
request.parent(parent);
return this;
}
@ -102,7 +102,7 @@ public class TermVectorRequestBuilder extends ActionRequestBuilder<TermVectorReq
* <tt>_local</tt> to prefer local shards, <tt>_primary</tt> to execute only on primary shards, or
* a custom value, which guarantees that the same order will be used across different requests.
*/
public TermVectorRequestBuilder setPreference(String preference) {
public TermVectorsRequestBuilder setPreference(String preference) {
request.preference(preference);
return this;
}
@ -111,7 +111,7 @@ public class TermVectorRequestBuilder extends ActionRequestBuilder<TermVectorReq
* Sets whether to return the start and stop offsets for each term if they were stored or
* skip offsets.
*/
public TermVectorRequestBuilder setOffsets(boolean offsets) {
public TermVectorsRequestBuilder setOffsets(boolean offsets) {
request.offsets(offsets);
return this;
}
@ -120,7 +120,7 @@ public class TermVectorRequestBuilder extends ActionRequestBuilder<TermVectorReq
/**
* Sets whether to return the positions for each term if stored or skip.
*/
public TermVectorRequestBuilder setPositions(boolean positions) {
public TermVectorsRequestBuilder setPositions(boolean positions) {
request.positions(positions);
return this;
}
@ -128,7 +128,7 @@ public class TermVectorRequestBuilder extends ActionRequestBuilder<TermVectorReq
/**
* Sets whether to return the payloads for each term or skip.
*/
public TermVectorRequestBuilder setPayloads(boolean payloads) {
public TermVectorsRequestBuilder setPayloads(boolean payloads) {
request.payloads(payloads);
return this;
}
@ -136,7 +136,7 @@ public class TermVectorRequestBuilder extends ActionRequestBuilder<TermVectorReq
/**
* Sets whether to return the term statistics for each term in the shard or skip.
*/
public TermVectorRequestBuilder setTermStatistics(boolean termStatistics) {
public TermVectorsRequestBuilder setTermStatistics(boolean termStatistics) {
request.termStatistics(termStatistics);
return this;
}
@ -144,7 +144,7 @@ public class TermVectorRequestBuilder extends ActionRequestBuilder<TermVectorReq
/**
* Sets whether to return the field statistics for each term in the shard or skip.
*/
public TermVectorRequestBuilder setFieldStatistics(boolean fieldStatistics) {
public TermVectorsRequestBuilder setFieldStatistics(boolean fieldStatistics) {
request.fieldStatistics(fieldStatistics);
return this;
}
@ -152,7 +152,7 @@ public class TermVectorRequestBuilder extends ActionRequestBuilder<TermVectorReq
/**
* Sets whether to use distributed frequencies instead of shard statistics.
*/
public TermVectorRequestBuilder setDfs(boolean dfs) {
public TermVectorsRequestBuilder setDfs(boolean dfs) {
request.dfs(dfs);
return this;
}
@ -161,7 +161,7 @@ public class TermVectorRequestBuilder extends ActionRequestBuilder<TermVectorReq
* Sets whether to return only term vectors for special selected fields. Returns the term
* vectors for all fields if selectedFields == null
*/
public TermVectorRequestBuilder setSelectedFields(String... fields) {
public TermVectorsRequestBuilder setSelectedFields(String... fields) {
request.selectedFields(fields);
return this;
}
@ -169,7 +169,7 @@ public class TermVectorRequestBuilder extends ActionRequestBuilder<TermVectorReq
/**
* Sets whether term vectors are generated real-time.
*/
public TermVectorRequestBuilder setRealtime(Boolean realtime) {
public TermVectorsRequestBuilder setRealtime(Boolean realtime) {
request.realtime(realtime);
return this;
}
@ -177,13 +177,13 @@ public class TermVectorRequestBuilder extends ActionRequestBuilder<TermVectorReq
/**
* Sets the analyzer used at each field when generating term vectors.
*/
public TermVectorRequestBuilder setPerFieldAnalyzer(Map<String, String> perFieldAnalyzer) {
public TermVectorsRequestBuilder setPerFieldAnalyzer(Map<String, String> perFieldAnalyzer) {
request.perFieldAnalyzer(perFieldAnalyzer);
return this;
}
@Override
protected void doExecute(ActionListener<TermVectorResponse> listener) {
client.termVector(request, listener);
protected void doExecute(ActionListener<TermVectorsResponse> listener) {
client.termVectors(request, listener);
}
}

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.action.termvector;
package org.elasticsearch.action.termvectors;
import com.google.common.collect.Iterators;
import org.apache.lucene.index.DocsAndPositionsEnum;
@ -29,7 +29,7 @@ import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.CharsRefBuilder;
import org.elasticsearch.ElasticsearchIllegalStateException;
import org.elasticsearch.action.ActionResponse;
import org.elasticsearch.action.termvector.TermVectorRequest.Flag;
import org.elasticsearch.action.termvectors.TermVectorsRequest.Flag;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
@ -46,7 +46,7 @@ import java.util.EnumSet;
import java.util.Iterator;
import java.util.Set;
public class TermVectorResponse extends ActionResponse implements ToXContent {
public class TermVectorsResponse extends ActionResponse implements ToXContent {
private static class FieldStrings {
// term statistics strings
@ -91,13 +91,13 @@ public class TermVectorResponse extends ActionResponse implements ToXContent {
int[] currentEndOffset = new int[0];
BytesReference[] currentPayloads = new BytesReference[0];
public TermVectorResponse(String index, String type, String id) {
public TermVectorsResponse(String index, String type, String id) {
this.index = index;
this.type = type;
this.id = id;
}
TermVectorResponse() {
TermVectorsResponse() {
}
@Override
@ -139,7 +139,7 @@ public class TermVectorResponse extends ActionResponse implements ToXContent {
headerRef = headerRef.copyBytesArray();
termVectors = termVectors.copyBytesArray();
}
return new TermVectorFields(headerRef, termVectors);
return new TermVectorsFields(headerRef, termVectors);
} else {
return new Fields() {
@Override
@ -326,7 +326,7 @@ public class TermVectorResponse extends ActionResponse implements ToXContent {
}
public void setFields(Fields termVectorsByField, Set<String> selectedFields, EnumSet<Flag> flags, Fields topLevelFields, @Nullable AggregatedDfs dfs) throws IOException {
TermVectorWriter tvw = new TermVectorWriter(this);
TermVectorsWriter tvw = new TermVectorsWriter(this);
if (termVectorsByField != null) {
tvw.setFields(termVectorsByField, selectedFields, flags, topLevelFields, dfs);
@ -334,7 +334,7 @@ public class TermVectorResponse extends ActionResponse implements ToXContent {
}
public void setTermVectorField(BytesStreamOutput output) {
public void setTermVectorsField(BytesStreamOutput output) {
termVectors = output.bytes();
}

View File

@ -16,13 +16,13 @@
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.termvector;
package org.elasticsearch.action.termvectors;
import org.apache.lucene.index.*;
import org.apache.lucene.search.CollectionStatistics;
import org.apache.lucene.search.TermStatistics;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.action.termvector.TermVectorRequest.Flag;
import org.elasticsearch.action.termvectors.TermVectorsRequest.Flag;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
@ -35,7 +35,7 @@ import java.util.List;
import java.util.Set;
// package only - this is an internal class!
final class TermVectorWriter {
final class TermVectorsWriter {
final List<String> fields = new ArrayList<>();
final List<Long> fieldOffset = new ArrayList<>();
final BytesStreamOutput output = new BytesStreamOutput(1); // can we somehow
@ -43,10 +43,10 @@ final class TermVectorWriter {
// size here?
private static final String HEADER = "TV";
private static final int CURRENT_VERSION = -1;
TermVectorResponse response = null;
TermVectorsResponse response = null;
TermVectorWriter(TermVectorResponse termVectorResponse) throws IOException {
response = termVectorResponse;
TermVectorsWriter(TermVectorsResponse termVectorsResponse) throws IOException {
response = termVectorsResponse;
}
void setFields(Fields termVectorsByField, Set<String> selectedFields, EnumSet<Flag> flags, Fields topLevelFields, @Nullable AggregatedDfs dfs) throws IOException {
@ -106,7 +106,7 @@ final class TermVectorWriter {
}
numFieldsWritten++;
}
response.setTermVectorField(output);
response.setTermVectorsField(output);
response.setHeader(writeHeader(numFieldsWritten, flags.contains(Flag.TermStatistics), flags.contains(Flag.FieldStatistics)));
}

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.action.termvector;
package org.elasticsearch.action.termvectors;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.action.ActionListener;
@ -41,11 +41,11 @@ public class TransportMultiTermVectorsAction extends HandledTransportAction<Mult
private final ClusterService clusterService;
private final TransportSingleShardMultiTermsVectorAction shardAction;
private final TransportShardMultiTermsVectorAction shardAction;
@Inject
public TransportMultiTermVectorsAction(Settings settings, ThreadPool threadPool, TransportService transportService,
ClusterService clusterService, TransportSingleShardMultiTermsVectorAction shardAction, ActionFilters actionFilters) {
ClusterService clusterService, TransportShardMultiTermsVectorAction shardAction, ActionFilters actionFilters) {
super(settings, MultiTermVectorsAction.NAME, threadPool, transportService, actionFilters);
this.clusterService = clusterService;
this.shardAction = shardAction;
@ -61,21 +61,21 @@ public class TransportMultiTermVectorsAction extends HandledTransportAction<Mult
Map<ShardId, MultiTermVectorsShardRequest> shardRequests = new HashMap<>();
for (int i = 0; i < request.requests.size(); i++) {
TermVectorRequest termVectorRequest = request.requests.get(i);
termVectorRequest.routing(clusterState.metaData().resolveIndexRouting(termVectorRequest.routing(), termVectorRequest.index()));
if (!clusterState.metaData().hasConcreteIndex(termVectorRequest.index())) {
responses.set(i, new MultiTermVectorsItemResponse(null, new MultiTermVectorsResponse.Failure(termVectorRequest.index(),
termVectorRequest.type(), termVectorRequest.id(), "[" + termVectorRequest.index() + "] missing")));
TermVectorsRequest termVectorsRequest = request.requests.get(i);
termVectorsRequest.routing(clusterState.metaData().resolveIndexRouting(termVectorsRequest.routing(), termVectorsRequest.index()));
if (!clusterState.metaData().hasConcreteIndex(termVectorsRequest.index())) {
responses.set(i, new MultiTermVectorsItemResponse(null, new MultiTermVectorsResponse.Failure(termVectorsRequest.index(),
termVectorsRequest.type(), termVectorsRequest.id(), "[" + termVectorsRequest.index() + "] missing")));
continue;
}
String concreteSingleIndex = clusterState.metaData().concreteSingleIndex(termVectorRequest.index(), termVectorRequest.indicesOptions());
if (termVectorRequest.routing() == null && clusterState.getMetaData().routingRequired(concreteSingleIndex, termVectorRequest.type())) {
responses.set(i, new MultiTermVectorsItemResponse(null, new MultiTermVectorsResponse.Failure(concreteSingleIndex, termVectorRequest.type(), termVectorRequest.id(),
"routing is required for [" + concreteSingleIndex + "]/[" + termVectorRequest.type() + "]/[" + termVectorRequest.id() + "]")));
String concreteSingleIndex = clusterState.metaData().concreteSingleIndex(termVectorsRequest.index(), termVectorsRequest.indicesOptions());
if (termVectorsRequest.routing() == null && clusterState.getMetaData().routingRequired(concreteSingleIndex, termVectorsRequest.type())) {
responses.set(i, new MultiTermVectorsItemResponse(null, new MultiTermVectorsResponse.Failure(concreteSingleIndex, termVectorsRequest.type(), termVectorsRequest.id(),
"routing is required for [" + concreteSingleIndex + "]/[" + termVectorsRequest.type() + "]/[" + termVectorsRequest.id() + "]")));
continue;
}
ShardId shardId = clusterService.operationRouting().getShards(clusterState, concreteSingleIndex,
termVectorRequest.type(), termVectorRequest.id(), termVectorRequest.routing(), null).shardId();
termVectorsRequest.type(), termVectorsRequest.id(), termVectorsRequest.routing(), null).shardId();
MultiTermVectorsShardRequest shardRequest = shardRequests.get(shardId);
if (shardRequest == null) {
shardRequest = new MultiTermVectorsShardRequest(request, shardId.index().name(), shardId.id());
@ -83,7 +83,7 @@ public class TransportMultiTermVectorsAction extends HandledTransportAction<Mult
shardRequests.put(shardId, shardRequest);
}
shardRequest.add(i, termVectorRequest);
shardRequest.add(i, termVectorsRequest);
}
if (shardRequests.size() == 0) {
@ -110,10 +110,10 @@ public class TransportMultiTermVectorsAction extends HandledTransportAction<Mult
// create failures for all relevant requests
String message = ExceptionsHelper.detailedMessage(e);
for (int i = 0; i < shardRequest.locations.size(); i++) {
TermVectorRequest termVectorRequest = shardRequest.requests.get(i);
TermVectorsRequest termVectorsRequest = shardRequest.requests.get(i);
responses.set(shardRequest.locations.get(i), new MultiTermVectorsItemResponse(null,
new MultiTermVectorsResponse.Failure(shardRequest.index(), termVectorRequest.type(),
termVectorRequest.id(), message)));
new MultiTermVectorsResponse.Failure(shardRequest.index(), termVectorsRequest.type(),
termVectorsRequest.id(), message)));
}
if (counter.decrementAndGet() == 0) {
finishHim();

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.action.termvector;
package org.elasticsearch.action.termvectors;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ExceptionsHelper;
@ -36,15 +36,15 @@ import org.elasticsearch.indices.IndicesService;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
public class TransportSingleShardMultiTermsVectorAction extends TransportShardSingleOperationAction<MultiTermVectorsShardRequest, MultiTermVectorsShardResponse> {
public class TransportShardMultiTermsVectorAction extends TransportShardSingleOperationAction<MultiTermVectorsShardRequest, MultiTermVectorsShardResponse> {
private final IndicesService indicesService;
private static final String ACTION_NAME = MultiTermVectorsAction.NAME + "[shard]";
@Inject
public TransportSingleShardMultiTermsVectorAction(Settings settings, ClusterService clusterService, TransportService transportService,
IndicesService indicesService, ThreadPool threadPool, ActionFilters actionFilters) {
public TransportShardMultiTermsVectorAction(Settings settings, ClusterService clusterService, TransportService transportService,
IndicesService indicesService, ThreadPool threadPool, ActionFilters actionFilters) {
super(settings, ACTION_NAME, threadPool, clusterService, transportService, actionFilters);
this.indicesService = indicesService;
}
@ -84,19 +84,19 @@ public class TransportSingleShardMultiTermsVectorAction extends TransportShardSi
protected MultiTermVectorsShardResponse shardOperation(MultiTermVectorsShardRequest request, ShardId shardId) throws ElasticsearchException {
MultiTermVectorsShardResponse response = new MultiTermVectorsShardResponse();
for (int i = 0; i < request.locations.size(); i++) {
TermVectorRequest termVectorRequest = request.requests.get(i);
TermVectorsRequest termVectorsRequest = request.requests.get(i);
try {
IndexService indexService = indicesService.indexServiceSafe(request.index());
IndexShard indexShard = indexService.shardSafe(shardId.id());
TermVectorResponse termVectorResponse = indexShard.termVectorService().getTermVector(termVectorRequest, shardId.getIndex());
response.add(request.locations.get(i), termVectorResponse);
TermVectorsResponse termVectorsResponse = indexShard.termVectorsService().getTermVectors(termVectorsRequest, shardId.getIndex());
response.add(request.locations.get(i), termVectorsResponse);
} catch (Throwable t) {
if (TransportActions.isShardNotAvailableException(t)) {
throw (ElasticsearchException) t;
} else {
logger.debug("{} failed to execute multi term vectors for [{}]/[{}]", t, shardId, termVectorRequest.type(), termVectorRequest.id());
logger.debug("{} failed to execute multi term vectors for [{}]/[{}]", t, shardId, termVectorsRequest.type(), termVectorsRequest.id());
response.add(request.locations.get(i),
new MultiTermVectorsResponse.Failure(request.index(), termVectorRequest.type(), termVectorRequest.id(), ExceptionsHelper.detailedMessage(t)));
new MultiTermVectorsResponse.Failure(request.index(), termVectorsRequest.type(), termVectorsRequest.id(), ExceptionsHelper.detailedMessage(t)));
}
}
}

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.action.termvector;
package org.elasticsearch.action.termvectors;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.RoutingMissingException;
@ -38,14 +38,14 @@ import org.elasticsearch.transport.TransportService;
/**
* Performs the get operation.
*/
public class TransportSingleShardTermVectorAction extends TransportShardSingleOperationAction<TermVectorRequest, TermVectorResponse> {
public class TransportTermVectorsAction extends TransportShardSingleOperationAction<TermVectorsRequest, TermVectorsResponse> {
private final IndicesService indicesService;
@Inject
public TransportSingleShardTermVectorAction(Settings settings, ClusterService clusterService, TransportService transportService,
IndicesService indicesService, ThreadPool threadPool, ActionFilters actionFilters) {
super(settings, TermVectorAction.NAME, threadPool, clusterService, transportService, actionFilters);
public TransportTermVectorsAction(Settings settings, ClusterService clusterService, TransportService transportService,
IndicesService indicesService, ThreadPool threadPool, ActionFilters actionFilters) {
super(settings, TermVectorsAction.NAME, threadPool, clusterService, transportService, actionFilters);
this.indicesService = indicesService;
}
@ -77,19 +77,19 @@ public class TransportSingleShardTermVectorAction extends TransportShardSingleOp
}
@Override
protected TermVectorResponse shardOperation(TermVectorRequest request, ShardId shardId) throws ElasticsearchException {
protected TermVectorsResponse shardOperation(TermVectorsRequest request, ShardId shardId) throws ElasticsearchException {
IndexService indexService = indicesService.indexServiceSafe(shardId.getIndex());
IndexShard indexShard = indexService.shardSafe(shardId.id());
return indexShard.termVectorService().getTermVector(request, shardId.getIndex());
return indexShard.termVectorsService().getTermVectors(request, shardId.getIndex());
}
@Override
protected TermVectorRequest newRequest() {
return new TermVectorRequest();
protected TermVectorsRequest newRequest() {
return new TermVectorsRequest();
}
@Override
protected TermVectorResponse newResponse() {
return new TermVectorResponse();
protected TermVectorsResponse newResponse() {
return new TermVectorsResponse();
}
}

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.action.termvector.dfs;
package org.elasticsearch.action.termvectors.dfs;
import org.apache.lucene.index.Fields;
import org.apache.lucene.index.Terms;
@ -48,17 +48,17 @@ public class DfsOnlyRequest extends BroadcastOperationRequest<DfsOnlyRequest> {
}
public DfsOnlyRequest(Fields termVectorFields, String[] indices, String[] types, Set<String> selectedFields) throws IOException {
public DfsOnlyRequest(Fields termVectorsFields, String[] indices, String[] types, Set<String> selectedFields) throws IOException {
super(indices);
// build a search request with a query of all the terms
final BoolQueryBuilder boolBuilder = boolQuery();
TermsEnum iterator = null;
for (String fieldName : termVectorFields) {
for (String fieldName : termVectorsFields) {
if ((selectedFields != null) && (!selectedFields.contains(fieldName))) {
continue;
}
Terms terms = termVectorFields.terms(fieldName);
Terms terms = termVectorsFields.terms(fieldName);
iterator = terms.iterator(iterator);
while (iterator.next() != null) {
String text = iterator.term().utf8ToString();

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.action.termvector.dfs;
package org.elasticsearch.action.termvectors.dfs;
import org.elasticsearch.action.ShardOperationFailedException;
import org.elasticsearch.action.support.broadcast.BroadcastOperationResponse;

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.action.termvector.dfs;
package org.elasticsearch.action.termvectors.dfs;
import org.elasticsearch.action.support.broadcast.BroadcastShardOperationRequest;
import org.elasticsearch.cluster.routing.ShardRouting;

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.action.termvector.dfs;
package org.elasticsearch.action.termvectors.dfs;
import org.elasticsearch.action.support.broadcast.BroadcastShardOperationResponse;
import org.elasticsearch.common.io.stream.StreamInput;

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.action.termvector.dfs;
package org.elasticsearch.action.termvectors.dfs;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.ActionListener;

View File

@ -20,4 +20,4 @@
/**
* Distributed frequencies.
*/
package org.elasticsearch.action.termvector.dfs;
package org.elasticsearch.action.termvectors.dfs;

View File

@ -20,4 +20,4 @@
/**
* Get the term vector for a specific document.
*/
package org.elasticsearch.action.termvector;
package org.elasticsearch.action.termvectors;

View File

@ -59,7 +59,7 @@ import org.elasticsearch.action.search.*;
import org.elasticsearch.action.suggest.SuggestRequest;
import org.elasticsearch.action.suggest.SuggestRequestBuilder;
import org.elasticsearch.action.suggest.SuggestResponse;
import org.elasticsearch.action.termvector.*;
import org.elasticsearch.action.termvectors.*;
import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.action.update.UpdateRequestBuilder;
import org.elasticsearch.action.update.UpdateResponse;
@ -541,7 +541,7 @@ public interface Client extends ElasticsearchClient<Client>, Releasable {
* @param request The term vector request
* @return The response future
*/
ActionFuture<TermVectorResponse> termVector(TermVectorRequest request);
ActionFuture<TermVectorsResponse> termVectors(TermVectorsRequest request);
/**
* An action that returns the term vectors for a specific document.
@ -549,12 +549,12 @@ public interface Client extends ElasticsearchClient<Client>, Releasable {
* @param request The term vector request
* @return The response future
*/
void termVector(TermVectorRequest request, ActionListener<TermVectorResponse> listener);
void termVectors(TermVectorsRequest request, ActionListener<TermVectorsResponse> listener);
/**
* Builder for the term vector request.
*/
TermVectorRequestBuilder prepareTermVector();
TermVectorsRequestBuilder prepareTermVectors();
/**
* Builder for the term vector request.
@ -563,7 +563,41 @@ public interface Client extends ElasticsearchClient<Client>, Releasable {
* @param type The type of the document
* @param id The id of the document
*/
TermVectorRequestBuilder prepareTermVector(String index, String type, String id);
TermVectorsRequestBuilder prepareTermVectors(String index, String type, String id);
/**
* An action that returns the term vectors for a specific document.
*
* @param request The term vector request
* @return The response future
*/
@Deprecated
ActionFuture<TermVectorsResponse> termVector(TermVectorsRequest request);
/**
* An action that returns the term vectors for a specific document.
*
* @param request The term vector request
* @return The response future
*/
@Deprecated
void termVector(TermVectorsRequest request, ActionListener<TermVectorsResponse> listener);
/**
* Builder for the term vector request.
*/
@Deprecated
TermVectorsRequestBuilder prepareTermVector();
/**
* Builder for the term vector request.
*
* @param index The index to load the document from
* @param type The type of the document
* @param id The id of the document
*/
@Deprecated
TermVectorsRequestBuilder prepareTermVector(String index, String type, String id);
/**
* Multi get term vectors.

View File

@ -20,7 +20,6 @@
package org.elasticsearch.client.support;
import org.elasticsearch.action.*;
import org.elasticsearch.action.admin.indices.delete.DeleteIndexAction;
import org.elasticsearch.action.bench.*;
import org.elasticsearch.action.bulk.BulkAction;
import org.elasticsearch.action.bulk.BulkRequest;
@ -72,7 +71,7 @@ import org.elasticsearch.action.suggest.SuggestAction;
import org.elasticsearch.action.suggest.SuggestRequest;
import org.elasticsearch.action.suggest.SuggestRequestBuilder;
import org.elasticsearch.action.suggest.SuggestResponse;
import org.elasticsearch.action.termvector.*;
import org.elasticsearch.action.termvectors.*;
import org.elasticsearch.action.update.UpdateAction;
import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.action.update.UpdateRequestBuilder;
@ -432,23 +431,47 @@ public abstract class AbstractClient implements Client {
}
@Override
public ActionFuture<TermVectorResponse> termVector(final TermVectorRequest request) {
return execute(TermVectorAction.INSTANCE, request);
public ActionFuture<TermVectorsResponse> termVectors(final TermVectorsRequest request) {
return execute(TermVectorsAction.INSTANCE, request);
}
@Override
public void termVector(final TermVectorRequest request, final ActionListener<TermVectorResponse> listener) {
execute(TermVectorAction.INSTANCE, request, listener);
public void termVectors(final TermVectorsRequest request, final ActionListener<TermVectorsResponse> listener) {
execute(TermVectorsAction.INSTANCE, request, listener);
}
@Override
public TermVectorRequestBuilder prepareTermVector() {
return new TermVectorRequestBuilder(this);
public TermVectorsRequestBuilder prepareTermVectors() {
return new TermVectorsRequestBuilder(this);
}
@Override
public TermVectorRequestBuilder prepareTermVector(String index, String type, String id) {
return new TermVectorRequestBuilder(this, index, type, id);
public TermVectorsRequestBuilder prepareTermVectors(String index, String type, String id) {
return new TermVectorsRequestBuilder(this, index, type, id);
}
@Deprecated
@Override
public ActionFuture<TermVectorsResponse> termVector(final TermVectorsRequest request) {
return termVectors(request);
}
@Deprecated
@Override
public void termVector(final TermVectorsRequest request, final ActionListener<TermVectorsResponse> listener) {
termVectors(request, listener);
}
@Deprecated
@Override
public TermVectorsRequestBuilder prepareTermVector() {
return prepareTermVectors();
}
@Deprecated
@Override
public TermVectorsRequestBuilder prepareTermVector(String index, String type, String id) {
return prepareTermVectors(index, type, id);
}
@Override

View File

@ -48,10 +48,10 @@ import org.elasticsearch.action.percolate.PercolateResponse;
import org.elasticsearch.action.search.*;
import org.elasticsearch.action.suggest.SuggestRequest;
import org.elasticsearch.action.suggest.SuggestResponse;
import org.elasticsearch.action.termvector.MultiTermVectorsRequest;
import org.elasticsearch.action.termvector.MultiTermVectorsResponse;
import org.elasticsearch.action.termvector.TermVectorRequest;
import org.elasticsearch.action.termvector.TermVectorResponse;
import org.elasticsearch.action.termvectors.MultiTermVectorsRequest;
import org.elasticsearch.action.termvectors.MultiTermVectorsResponse;
import org.elasticsearch.action.termvectors.TermVectorsRequest;
import org.elasticsearch.action.termvectors.TermVectorsResponse;
import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.action.update.UpdateResponse;
import org.elasticsearch.cache.recycler.PageCacheRecycler;
@ -446,13 +446,13 @@ public class TransportClient extends AbstractClient {
}
@Override
public ActionFuture<TermVectorResponse> termVector(TermVectorRequest request) {
return internalClient.termVector(request);
public ActionFuture<TermVectorsResponse> termVectors(TermVectorsRequest request) {
return internalClient.termVectors(request);
}
@Override
public void termVector(TermVectorRequest request, ActionListener<TermVectorResponse> listener) {
internalClient.termVector(request, listener);
public void termVectors(TermVectorsRequest request, ActionListener<TermVectorsResponse> listener) {
internalClient.termVectors(request, listener);
}
@Override

View File

@ -22,7 +22,6 @@ package org.elasticsearch.index.mapper;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefBuilder;
import org.elasticsearch.action.DocumentRequest;
import org.elasticsearch.action.termvector.TermVectorRequest;
import org.elasticsearch.common.lucene.BytesRefs;
import java.util.Collection;

View File

@ -28,8 +28,8 @@ import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.Query;
import org.elasticsearch.ElasticsearchIllegalArgumentException;
import org.elasticsearch.action.termvector.MultiTermVectorsRequest;
import org.elasticsearch.action.termvector.TermVectorRequest;
import org.elasticsearch.action.termvectors.MultiTermVectorsRequest;
import org.elasticsearch.action.termvectors.TermVectorsRequest;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
@ -166,7 +166,7 @@ public class MoreLikeThisQueryParser implements QueryParser {
if (!token.isValue()) {
throw new ElasticsearchIllegalArgumentException("ids array element should only contain ids");
}
items.add(newTermVectorRequest().id(parser.text()));
items.add(newTermVectorsRequest().id(parser.text()));
}
} else if (Fields.DOCUMENTS.match(currentFieldName, parseContext.parseFlags())) {
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
@ -229,7 +229,7 @@ public class MoreLikeThisQueryParser implements QueryParser {
// handle items
if (!items.isEmpty()) {
// set default index, type and fields if not specified
for (TermVectorRequest item : items) {
for (TermVectorsRequest item : items) {
if (item.index() == null) {
item.index(parseContext.index().name());
}
@ -267,10 +267,10 @@ public class MoreLikeThisQueryParser implements QueryParser {
return mltQuery;
}
private TermVectorRequest parseDocument(XContentParser parser) throws IOException {
TermVectorRequest termVectorRequest = newTermVectorRequest();
TermVectorRequest.parseRequest(termVectorRequest, parser);
return termVectorRequest;
private TermVectorsRequest parseDocument(XContentParser parser) throws IOException {
TermVectorsRequest termVectorsRequest = newTermVectorsRequest();
TermVectorsRequest.parseRequest(termVectorsRequest, parser);
return termVectorsRequest;
}
private void parseLikeField(XContentParser parser, List<String> likeTexts, MultiTermVectorsRequest items) throws IOException {
@ -283,8 +283,8 @@ public class MoreLikeThisQueryParser implements QueryParser {
}
}
private TermVectorRequest newTermVectorRequest() {
return new TermVectorRequest()
private TermVectorsRequest newTermVectorsRequest() {
return new TermVectorsRequest()
.positions(false)
.offsets(false)
.payloads(false)

View File

@ -20,11 +20,10 @@
package org.elasticsearch.index.search.morelikethis;
import org.apache.lucene.index.Fields;
import org.elasticsearch.action.get.MultiGetRequest;
import org.elasticsearch.action.termvector.MultiTermVectorsItemResponse;
import org.elasticsearch.action.termvector.MultiTermVectorsRequest;
import org.elasticsearch.action.termvector.MultiTermVectorsResponse;
import org.elasticsearch.action.termvector.TermVectorResponse;
import org.elasticsearch.action.termvectors.MultiTermVectorsItemResponse;
import org.elasticsearch.action.termvectors.MultiTermVectorsRequest;
import org.elasticsearch.action.termvectors.MultiTermVectorsResponse;
import org.elasticsearch.action.termvectors.TermVectorsResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject;
@ -54,7 +53,7 @@ public class MoreLikeThisFetchService extends AbstractComponent {
if (response.isFailed()) {
continue;
}
TermVectorResponse getResponse = response.getResponse();
TermVectorsResponse getResponse = response.getResponse();
if (!getResponse.isExists()) {
continue;
}

View File

@ -73,7 +73,7 @@ import org.elasticsearch.index.store.IndexStore;
import org.elasticsearch.index.store.Store;
import org.elasticsearch.index.store.StoreModule;
import org.elasticsearch.index.suggest.SuggestShardModule;
import org.elasticsearch.index.termvectors.ShardTermVectorModule;
import org.elasticsearch.index.termvectors.ShardTermVectorsModule;
import org.elasticsearch.index.translog.Translog;
import org.elasticsearch.index.translog.TranslogModule;
import org.elasticsearch.index.translog.TranslogService;
@ -345,7 +345,7 @@ public class InternalIndexService extends AbstractIndexComponent implements Inde
modules.add(new EngineModule(indexSettings));
modules.add(new IndexShardGatewayModule(injector.getInstance(IndexGateway.class)));
modules.add(new PercolatorShardModule());
modules.add(new ShardTermVectorModule());
modules.add(new ShardTermVectorsModule());
modules.add(new IndexShardSnapshotModule());
modules.add(new SuggestShardModule());

View File

@ -55,7 +55,7 @@ import org.elasticsearch.index.shard.*;
import org.elasticsearch.index.store.StoreStats;
import org.elasticsearch.index.suggest.stats.ShardSuggestService;
import org.elasticsearch.index.suggest.stats.SuggestStats;
import org.elasticsearch.index.termvectors.ShardTermVectorService;
import org.elasticsearch.index.termvectors.ShardTermVectorsService;
import org.elasticsearch.index.translog.TranslogStats;
import org.elasticsearch.index.warmer.ShardIndexWarmerService;
import org.elasticsearch.index.warmer.WarmerStats;
@ -123,7 +123,7 @@ public interface IndexShard extends IndexShardComponent {
ShardPercolateService shardPercolateService();
ShardTermVectorService termVectorService();
ShardTermVectorsService termVectorsService();
ShardSuggestService shardSuggestService();

View File

@ -104,7 +104,7 @@ import org.elasticsearch.index.store.Store;
import org.elasticsearch.index.store.StoreStats;
import org.elasticsearch.index.suggest.stats.ShardSuggestService;
import org.elasticsearch.index.suggest.stats.SuggestStats;
import org.elasticsearch.index.termvectors.ShardTermVectorService;
import org.elasticsearch.index.termvectors.ShardTermVectorsService;
import org.elasticsearch.index.translog.Translog;
import org.elasticsearch.index.translog.TranslogStats;
import org.elasticsearch.index.warmer.ShardIndexWarmerService;
@ -150,7 +150,7 @@ public class InternalIndexShard extends AbstractIndexShardComponent implements I
private final PercolatorQueriesRegistry percolatorQueriesRegistry;
private final ShardPercolateService shardPercolateService;
private final CodecService codecService;
private final ShardTermVectorService termVectorService;
private final ShardTermVectorsService termVectorsService;
private final IndexFieldDataService indexFieldDataService;
private final IndexService indexService;
private final ShardSuggestService shardSuggestService;
@ -180,7 +180,7 @@ public class InternalIndexShard extends AbstractIndexShardComponent implements I
public InternalIndexShard(ShardId shardId, @IndexSettings Settings indexSettings, IndexSettingsService indexSettingsService, IndicesLifecycle indicesLifecycle, Store store, Engine engine, MergeSchedulerProvider mergeScheduler, Translog translog,
ThreadPool threadPool, MapperService mapperService, IndexQueryParserService queryParserService, IndexCache indexCache, IndexAliasesService indexAliasesService, ShardIndexingService indexingService, ShardGetService getService, ShardSearchService searchService, ShardIndexWarmerService shardWarmerService,
ShardFilterCache shardFilterCache, ShardFieldData shardFieldData, PercolatorQueriesRegistry percolatorQueriesRegistry, ShardPercolateService shardPercolateService, CodecService codecService,
ShardTermVectorService termVectorService, IndexFieldDataService indexFieldDataService, IndexService indexService, ShardSuggestService shardSuggestService, ShardQueryCache shardQueryCache, ShardBitsetFilterCache shardBitsetFilterCache) {
ShardTermVectorsService termVectorsService, IndexFieldDataService indexFieldDataService, IndexService indexService, ShardSuggestService shardSuggestService, ShardQueryCache shardQueryCache, ShardBitsetFilterCache shardBitsetFilterCache) {
super(shardId, indexSettings);
this.indicesLifecycle = (InternalIndicesLifecycle) indicesLifecycle;
this.indexSettingsService = indexSettingsService;
@ -195,7 +195,7 @@ public class InternalIndexShard extends AbstractIndexShardComponent implements I
this.indexAliasesService = indexAliasesService;
this.indexingService = indexingService;
this.getService = getService.setIndexShard(this);
this.termVectorService = termVectorService.setIndexShard(this);
this.termVectorsService = termVectorsService.setIndexShard(this);
this.searchService = searchService;
this.shardWarmerService = shardWarmerService;
this.shardFilterCache = shardFilterCache;
@ -246,8 +246,8 @@ public class InternalIndexShard extends AbstractIndexShardComponent implements I
}
@Override
public ShardTermVectorService termVectorService() {
return termVectorService;
public ShardTermVectorsService termVectorsService() {
return termVectorsService;
}
@Override

View File

@ -23,10 +23,10 @@ import org.elasticsearch.common.inject.AbstractModule;
/**
*
*/
public class ShardTermVectorModule extends AbstractModule {
public class ShardTermVectorsModule extends AbstractModule {
@Override
protected void configure() {
bind(ShardTermVectorService.class).asEagerSingleton();
bind(ShardTermVectorsService.class).asEagerSingleton();
}
}

View File

@ -23,11 +23,11 @@ import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.index.*;
import org.apache.lucene.index.memory.MemoryIndex;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.action.termvector.TermVectorRequest;
import org.elasticsearch.action.termvector.TermVectorResponse;
import org.elasticsearch.action.termvector.dfs.DfsOnlyRequest;
import org.elasticsearch.action.termvector.dfs.DfsOnlyResponse;
import org.elasticsearch.action.termvector.dfs.TransportDfsOnlyAction;
import org.elasticsearch.action.termvectors.TermVectorsRequest;
import org.elasticsearch.action.termvectors.TermVectorsResponse;
import org.elasticsearch.action.termvectors.dfs.DfsOnlyRequest;
import org.elasticsearch.action.termvectors.dfs.DfsOnlyResponse;
import org.elasticsearch.action.termvectors.dfs.TransportDfsOnlyAction;
import org.elasticsearch.cluster.action.index.MappingUpdatedAction;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
@ -57,29 +57,29 @@ import static org.elasticsearch.index.mapper.SourceToParse.source;
/**
*/
public class ShardTermVectorService extends AbstractIndexShardComponent {
public class ShardTermVectorsService extends AbstractIndexShardComponent {
private IndexShard indexShard;
private final MappingUpdatedAction mappingUpdatedAction;
private final TransportDfsOnlyAction dfsAction;
@Inject
public ShardTermVectorService(ShardId shardId, @IndexSettings Settings indexSettings, MappingUpdatedAction mappingUpdatedAction, TransportDfsOnlyAction dfsAction) {
public ShardTermVectorsService(ShardId shardId, @IndexSettings Settings indexSettings, MappingUpdatedAction mappingUpdatedAction, TransportDfsOnlyAction dfsAction) {
super(shardId, indexSettings);
this.mappingUpdatedAction = mappingUpdatedAction;
this.dfsAction = dfsAction;
}
// sadly, to overcome cyclic dep, we need to do this and inject it ourselves...
public ShardTermVectorService setIndexShard(IndexShard indexShard) {
public ShardTermVectorsService setIndexShard(IndexShard indexShard) {
this.indexShard = indexShard;
return this;
}
public TermVectorResponse getTermVector(TermVectorRequest request, String concreteIndex) {
public TermVectorsResponse getTermVectors(TermVectorsRequest request, String concreteIndex) {
final Engine.Searcher searcher = indexShard.acquireSearcher("term_vector");
IndexReader topLevelReader = searcher.reader();
final TermVectorResponse termVectorResponse = new TermVectorResponse(concreteIndex, request.type(), request.id());
final TermVectorsResponse termVectorsResponse = new TermVectorsResponse(concreteIndex, request.type(), request.id());
final Term uidTerm = new Term(UidFieldMapper.NAME, Uid.createUidAsBytes(request.type(), request.id()));
Engine.GetResult get = indexShard.get(new Engine.Get(request.realtime(), uidTerm));
@ -89,7 +89,7 @@ public class ShardTermVectorService extends AbstractIndexShardComponent {
/* fetched from translog is treated as an artificial document */
if (docFromTranslog) {
request.doc(get.source().source, false);
termVectorResponse.setDocVersion(get.version());
termVectorsResponse.setDocVersion(get.version());
}
/* handle potential wildcards in fields */
@ -110,9 +110,9 @@ public class ShardTermVectorService extends AbstractIndexShardComponent {
if (termVectorsByField != null && useDfs(request)) {
dfs = getAggregatedDfs(termVectorsByField, request);
}
termVectorResponse.setFields(termVectorsByField, request.selectedFields(), request.getFlags(), topLevelFields, dfs);
termVectorResponse.setExists(true);
termVectorResponse.setArtificial(!docFromTranslog);
termVectorsResponse.setFields(termVectorsByField, request.selectedFields(), request.getFlags(), topLevelFields, dfs);
termVectorsResponse.setExists(true);
termVectorsResponse.setArtificial(!docFromTranslog);
}
/* or from an existing document */
else if (docIdAndVersion != null) {
@ -130,11 +130,11 @@ public class ShardTermVectorService extends AbstractIndexShardComponent {
if (termVectorsByField != null && useDfs(request)) {
dfs = getAggregatedDfs(termVectorsByField, request);
}
termVectorResponse.setFields(termVectorsByField, request.selectedFields(), request.getFlags(), topLevelFields, dfs);
termVectorResponse.setDocVersion(docIdAndVersion.version);
termVectorResponse.setExists(true);
termVectorsResponse.setFields(termVectorsByField, request.selectedFields(), request.getFlags(), topLevelFields, dfs);
termVectorsResponse.setDocVersion(docIdAndVersion.version);
termVectorsResponse.setExists(true);
} else {
termVectorResponse.setExists(false);
termVectorsResponse.setExists(false);
}
} catch (Throwable ex) {
throw new ElasticsearchException("failed to execute term vector request", ex);
@ -142,10 +142,10 @@ public class ShardTermVectorService extends AbstractIndexShardComponent {
searcher.close();
get.release();
}
return termVectorResponse;
return termVectorsResponse;
}
private void handleFieldWildcards(TermVectorRequest request) {
private void handleFieldWildcards(TermVectorsRequest request) {
Set<String> fieldNames = new HashSet<>();
for (String pattern : request.selectedFields()) {
fieldNames.addAll(indexShard.mapperService().simpleMatchToIndexNames(pattern));
@ -165,7 +165,7 @@ public class ShardTermVectorService extends AbstractIndexShardComponent {
return true;
}
private Fields addGeneratedTermVectors(Engine.GetResult get, Fields termVectorsByField, TermVectorRequest request, Set<String> selectedFields) throws IOException {
private Fields addGeneratedTermVectors(Engine.GetResult get, Fields termVectorsByField, TermVectorsRequest request, Set<String> selectedFields) throws IOException {
/* only keep valid fields */
Set<String> validFields = new HashSet<>();
for (String field : selectedFields) {
@ -237,7 +237,7 @@ public class ShardTermVectorService extends AbstractIndexShardComponent {
return MultiFields.getFields(index.createSearcher().getIndexReader());
}
private Fields generateTermVectorsFromDoc(TermVectorRequest request, boolean doAllFields) throws IOException {
private Fields generateTermVectorsFromDoc(TermVectorsRequest request, boolean doAllFields) throws IOException {
// parse the document, at the moment we do update the mapping, just like percolate
ParsedDocument parsedDocument = parseDocument(indexShard.shardId().getIndex(), request.type(), request.doc());
@ -328,12 +328,12 @@ public class ShardTermVectorService extends AbstractIndexShardComponent {
}
}
private boolean useDfs(TermVectorRequest request) {
private boolean useDfs(TermVectorsRequest request) {
return request.dfs() && (request.fieldStatistics() || request.termStatistics());
}
private AggregatedDfs getAggregatedDfs(Fields termVectorFields, TermVectorRequest request) throws IOException {
DfsOnlyRequest dfsOnlyRequest = new DfsOnlyRequest(termVectorFields, new String[]{request.index()},
private AggregatedDfs getAggregatedDfs(Fields termVectorsFields, TermVectorsRequest request) throws IOException {
DfsOnlyRequest dfsOnlyRequest = new DfsOnlyRequest(termVectorsFields, new String[]{request.index()},
new String[]{request.type()}, request.selectedFields());
DfsOnlyResponse response = dfsAction.execute(dfsOnlyRequest).actionGet();
return response.getDfs();

View File

@ -107,8 +107,8 @@ import org.elasticsearch.rest.action.suggest.RestSuggestAction;
import org.elasticsearch.rest.action.template.RestDeleteSearchTemplateAction;
import org.elasticsearch.rest.action.template.RestGetSearchTemplateAction;
import org.elasticsearch.rest.action.template.RestPutSearchTemplateAction;
import org.elasticsearch.rest.action.termvector.RestMultiTermVectorsAction;
import org.elasticsearch.rest.action.termvector.RestTermVectorAction;
import org.elasticsearch.rest.action.termvectors.RestMultiTermVectorsAction;
import org.elasticsearch.rest.action.termvectors.RestTermVectorsAction;
import org.elasticsearch.rest.action.update.RestUpdateAction;
import java.util.List;
@ -203,7 +203,7 @@ public class RestActionModule extends AbstractModule {
bind(RestDeleteByQueryAction.class).asEagerSingleton();
bind(org.elasticsearch.rest.action.count.RestCountAction.class).asEagerSingleton();
bind(RestSuggestAction.class).asEagerSingleton();
bind(RestTermVectorAction.class).asEagerSingleton();
bind(RestTermVectorsAction.class).asEagerSingleton();
bind(RestMultiTermVectorsAction.class).asEagerSingleton();
bind(RestBulkAction.class).asEagerSingleton();
bind(RestUpdateAction.class).asEagerSingleton();

View File

@ -17,11 +17,11 @@
* under the License.
*/
package org.elasticsearch.rest.action.termvector;
package org.elasticsearch.rest.action.termvectors;
import org.elasticsearch.action.termvector.MultiTermVectorsRequest;
import org.elasticsearch.action.termvector.MultiTermVectorsResponse;
import org.elasticsearch.action.termvector.TermVectorRequest;
import org.elasticsearch.action.termvectors.MultiTermVectorsRequest;
import org.elasticsearch.action.termvectors.MultiTermVectorsResponse;
import org.elasticsearch.action.termvectors.TermVectorsRequest;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject;
@ -50,10 +50,10 @@ public class RestMultiTermVectorsAction extends BaseRestHandler {
public void handleRequest(final RestRequest request, final RestChannel channel, final Client client) throws Exception {
MultiTermVectorsRequest multiTermVectorsRequest = new MultiTermVectorsRequest();
multiTermVectorsRequest.listenerThreaded(false);
TermVectorRequest template = new TermVectorRequest();
TermVectorsRequest template = new TermVectorsRequest();
template.index(request.param("index"));
template.type(request.param("type"));
RestTermVectorAction.readURIParameters(template, request);
RestTermVectorsAction.readURIParameters(template, request);
multiTermVectorsRequest.ids(Strings.commaDelimitedListToStringArray(request.param("ids")));
multiTermVectorsRequest.add(template, RestActions.getRestContent(request));

View File

@ -17,10 +17,10 @@
* under the License.
*/
package org.elasticsearch.rest.action.termvector;
package org.elasticsearch.rest.action.termvectors;
import org.elasticsearch.action.termvector.TermVectorRequest;
import org.elasticsearch.action.termvector.TermVectorResponse;
import org.elasticsearch.action.termvectors.TermVectorsRequest;
import org.elasticsearch.action.termvectors.TermVectorsResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Inject;
@ -38,13 +38,19 @@ import static org.elasticsearch.rest.RestRequest.Method.POST;
/**
* This class parses the json request and translates it into a
* TermVectorRequest.
* TermVectorsRequest.
*/
public class RestTermVectorAction extends BaseRestHandler {
public class RestTermVectorsAction extends BaseRestHandler {
@Inject
public RestTermVectorAction(Settings settings, RestController controller, Client client) {
public RestTermVectorsAction(Settings settings, RestController controller, Client client) {
super(settings, controller, client);
controller.registerHandler(GET, "/{index}/{type}/_termvectors", this);
controller.registerHandler(POST, "/{index}/{type}/_termvectors", this);
controller.registerHandler(GET, "/{index}/{type}/{id}/_termvectors", this);
controller.registerHandler(POST, "/{index}/{type}/{id}/_termvectors", this);
// we keep usage of _termvector as alias for now
controller.registerHandler(GET, "/{index}/{type}/_termvector", this);
controller.registerHandler(POST, "/{index}/{type}/_termvector", this);
controller.registerHandler(GET, "/{index}/{type}/{id}/_termvector", this);
@ -53,42 +59,42 @@ public class RestTermVectorAction extends BaseRestHandler {
@Override
public void handleRequest(final RestRequest request, final RestChannel channel, final Client client) throws Exception {
TermVectorRequest termVectorRequest = new TermVectorRequest(request.param("index"), request.param("type"), request.param("id"));
TermVectorsRequest termVectorsRequest = new TermVectorsRequest(request.param("index"), request.param("type"), request.param("id"));
XContentParser parser = null;
if (request.hasContent()) {
try {
parser = XContentFactory.xContent(request.content()).createParser(request.content());
TermVectorRequest.parseRequest(termVectorRequest, parser);
TermVectorsRequest.parseRequest(termVectorsRequest, parser);
} finally {
if (parser != null) {
parser.close();
}
}
}
readURIParameters(termVectorRequest, request);
readURIParameters(termVectorsRequest, request);
client.termVector(termVectorRequest, new RestToXContentListener<TermVectorResponse>(channel));
client.termVectors(termVectorsRequest, new RestToXContentListener<TermVectorsResponse>(channel));
}
static public void readURIParameters(TermVectorRequest termVectorRequest, RestRequest request) {
static public void readURIParameters(TermVectorsRequest termVectorsRequest, RestRequest request) {
String fields = request.param("fields");
addFieldStringsFromParameter(termVectorRequest, fields);
termVectorRequest.offsets(request.paramAsBoolean("offsets", termVectorRequest.offsets()));
termVectorRequest.positions(request.paramAsBoolean("positions", termVectorRequest.positions()));
termVectorRequest.payloads(request.paramAsBoolean("payloads", termVectorRequest.payloads()));
termVectorRequest.routing(request.param("routing"));
termVectorRequest.realtime(request.paramAsBoolean("realtime", null));
termVectorRequest.parent(request.param("parent"));
termVectorRequest.preference(request.param("preference"));
termVectorRequest.termStatistics(request.paramAsBoolean("termStatistics", termVectorRequest.termStatistics()));
termVectorRequest.termStatistics(request.paramAsBoolean("term_statistics", termVectorRequest.termStatistics()));
termVectorRequest.fieldStatistics(request.paramAsBoolean("fieldStatistics", termVectorRequest.fieldStatistics()));
termVectorRequest.fieldStatistics(request.paramAsBoolean("field_statistics", termVectorRequest.fieldStatistics()));
termVectorRequest.dfs(request.paramAsBoolean("dfs", termVectorRequest.dfs()));
addFieldStringsFromParameter(termVectorsRequest, fields);
termVectorsRequest.offsets(request.paramAsBoolean("offsets", termVectorsRequest.offsets()));
termVectorsRequest.positions(request.paramAsBoolean("positions", termVectorsRequest.positions()));
termVectorsRequest.payloads(request.paramAsBoolean("payloads", termVectorsRequest.payloads()));
termVectorsRequest.routing(request.param("routing"));
termVectorsRequest.realtime(request.paramAsBoolean("realtime", null));
termVectorsRequest.parent(request.param("parent"));
termVectorsRequest.preference(request.param("preference"));
termVectorsRequest.termStatistics(request.paramAsBoolean("termStatistics", termVectorsRequest.termStatistics()));
termVectorsRequest.termStatistics(request.paramAsBoolean("term_statistics", termVectorsRequest.termStatistics()));
termVectorsRequest.fieldStatistics(request.paramAsBoolean("fieldStatistics", termVectorsRequest.fieldStatistics()));
termVectorsRequest.fieldStatistics(request.paramAsBoolean("field_statistics", termVectorsRequest.fieldStatistics()));
termVectorsRequest.dfs(request.paramAsBoolean("dfs", termVectorsRequest.dfs()));
}
static public void addFieldStringsFromParameter(TermVectorRequest termVectorRequest, String fields) {
Set<String> selectedFields = termVectorRequest.selectedFields();
static public void addFieldStringsFromParameter(TermVectorsRequest termVectorsRequest, String fields) {
Set<String> selectedFields = termVectorsRequest.selectedFields();
if (fields != null) {
String[] paramFieldStrings = Strings.commaDelimitedListToStringArray(fields);
for (String field : paramFieldStrings) {
@ -102,7 +108,7 @@ public class RestTermVectorAction extends BaseRestHandler {
}
}
if (selectedFields != null) {
termVectorRequest.selectedFields(selectedFields.toArray(new String[selectedFields.size()]));
termVectorsRequest.selectedFields(selectedFields.toArray(new String[selectedFields.size()]));
}
}

View File

@ -92,8 +92,8 @@ import org.elasticsearch.action.search.MultiSearchAction;
import org.elasticsearch.action.search.SearchAction;
import org.elasticsearch.action.search.SearchScrollAction;
import org.elasticsearch.action.suggest.SuggestAction;
import org.elasticsearch.action.termvector.MultiTermVectorsAction;
import org.elasticsearch.action.termvector.TermVectorAction;
import org.elasticsearch.action.termvectors.MultiTermVectorsAction;
import org.elasticsearch.action.termvectors.TermVectorsAction;
import org.elasticsearch.action.update.UpdateAction;
import org.elasticsearch.cluster.action.index.MappingUpdatedAction;
import org.elasticsearch.cluster.action.index.NodeIndexDeletedAction;
@ -266,7 +266,7 @@ final class ActionNames {
builder.put(SearchServiceTransportAction.SCAN_SCROLL_ACTION_NAME, "search/phase/scan/scroll");
addShardAction(SuggestAction.NAME, "suggest", builder);
addShardAction(TermVectorAction.NAME, "tv", builder);
addShardAction(TermVectorsAction.NAME, "tv", builder);
builder.put(BulkAction.NAME, "bulk");
builder.put(BulkAction.NAME + "[s]", "bulk/shard");

View File

@ -85,10 +85,10 @@ import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.action.suggest.SuggestAction;
import org.elasticsearch.action.suggest.SuggestRequest;
import org.elasticsearch.action.support.QuerySourceBuilder;
import org.elasticsearch.action.termvector.MultiTermVectorsAction;
import org.elasticsearch.action.termvector.MultiTermVectorsRequest;
import org.elasticsearch.action.termvector.TermVectorAction;
import org.elasticsearch.action.termvector.TermVectorRequest;
import org.elasticsearch.action.termvectors.MultiTermVectorsAction;
import org.elasticsearch.action.termvectors.MultiTermVectorsRequest;
import org.elasticsearch.action.termvectors.TermVectorsAction;
import org.elasticsearch.action.termvectors.TermVectorsRequest;
import org.elasticsearch.action.update.UpdateAction;
import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.action.update.UpdateResponse;
@ -327,14 +327,14 @@ public class IndicesRequestTests extends ElasticsearchIntegrationTest {
@Test
public void testTermVector() {
String termVectorShardAction = TermVectorAction.NAME + "[s]";
String termVectorShardAction = TermVectorsAction.NAME + "[s]";
interceptTransportActions(termVectorShardAction);
TermVectorRequest termVectorRequest = new TermVectorRequest(randomIndexOrAlias(), "type", "id");
internalCluster().clientNodeClient().termVector(termVectorRequest).actionGet();
TermVectorsRequest termVectorsRequest = new TermVectorsRequest(randomIndexOrAlias(), "type", "id");
internalCluster().clientNodeClient().termVectors(termVectorsRequest).actionGet();
clearInterceptedActions();
assertSameIndices(termVectorRequest, termVectorShardAction);
assertSameIndices(termVectorsRequest, termVectorShardAction);
}
@Test

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.action.termvector;
package org.elasticsearch.action.termvectors;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenFilter;
@ -36,7 +36,6 @@ import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.RAMDirectory;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.indices.alias.Alias;
import org.elasticsearch.common.inject.internal.Join;
import org.elasticsearch.common.settings.ImmutableSettings;
@ -45,14 +44,13 @@ import org.elasticsearch.indices.IndexMissingException;
import org.elasticsearch.test.ElasticsearchIntegrationTest;
import java.io.IOException;
import java.io.Reader;
import java.util.*;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.hamcrest.Matchers.equalTo;
public abstract class AbstractTermVectorTests extends ElasticsearchIntegrationTest {
public abstract class AbstractTermVectorsTests extends ElasticsearchIntegrationTest {
protected static class TestFieldSetting {
final public String name;
@ -326,7 +324,7 @@ public abstract class AbstractTermVectorTests extends ElasticsearchIntegrationTe
return DirectoryReader.open(dir);
}
protected void validateResponse(TermVectorResponse esResponse, Fields luceneFields, TestConfig testConfig) throws IOException {
protected void validateResponse(TermVectorsResponse esResponse, Fields luceneFields, TestConfig testConfig) throws IOException {
assertThat(esResponse.getIndex(), equalTo(testConfig.doc.index));
TestDoc testDoc = testConfig.doc;
HashSet<String> selectedFields = testConfig.selectedFields == null ? null : new HashSet<>(
@ -395,8 +393,8 @@ public abstract class AbstractTermVectorTests extends ElasticsearchIntegrationTe
}
}
protected TermVectorRequestBuilder getRequestForConfig(TestConfig config) {
return client().prepareTermVector(randomBoolean() ? config.doc.index : config.doc.alias, config.doc.type, config.doc.id).setPayloads(config.requestPayloads)
protected TermVectorsRequestBuilder getRequestForConfig(TestConfig config) {
return client().prepareTermVectors(randomBoolean() ? config.doc.index : config.doc.alias, config.doc.type, config.doc.id).setPayloads(config.requestPayloads)
.setOffsets(config.requestOffsets).setPositions(config.requestPositions).setFieldStatistics(true).setTermStatistics(true)
.setSelectedFields(config.selectedFields);
}

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.action.termvector;
package org.elasticsearch.action.termvectors;
import org.apache.lucene.index.DocsAndPositionsEnum;
import org.apache.lucene.index.Fields;
@ -40,7 +40,7 @@ import java.io.IOException;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.hamcrest.Matchers.equalTo;
public class GetTermVectorCheckDocFreqTests extends ElasticsearchIntegrationTest {
public class GetTermVectorsCheckDocFreqTests extends ElasticsearchIntegrationTest {
@Override
protected int numberOfShards() {
@ -97,9 +97,9 @@ public class GetTermVectorCheckDocFreqTests extends ElasticsearchIntegrationTest
private void checkWithoutFieldStatistics(int numDocs, String[] values, int[] freq, int[][] pos, int[][] startOffset, int[][] endOffset,
int i) throws IOException {
TermVectorRequestBuilder resp = client().prepareTermVector("test", "type1", Integer.toString(i)).setPayloads(true).setOffsets(true)
TermVectorsRequestBuilder resp = client().prepareTermVectors("test", "type1", Integer.toString(i)).setPayloads(true).setOffsets(true)
.setPositions(true).setTermStatistics(true).setFieldStatistics(false).setSelectedFields();
TermVectorResponse response = resp.execute().actionGet();
TermVectorsResponse response = resp.execute().actionGet();
assertThat("doc id: " + i + " doesn't exists but should", response.isExists(), equalTo(true));
Fields fields = response.getFields();
assertThat(fields.size(), equalTo(1));
@ -156,10 +156,10 @@ public class GetTermVectorCheckDocFreqTests extends ElasticsearchIntegrationTest
private void checkWithoutTermStatistics(int numDocs, String[] values, int[] freq, int[][] pos, int[][] startOffset, int[][] endOffset,
int i) throws IOException {
TermVectorRequestBuilder resp = client().prepareTermVector("test", "type1", Integer.toString(i)).setPayloads(true).setOffsets(true)
TermVectorsRequestBuilder resp = client().prepareTermVectors("test", "type1", Integer.toString(i)).setPayloads(true).setOffsets(true)
.setPositions(true).setTermStatistics(false).setFieldStatistics(true).setSelectedFields();
assertThat(resp.request().termStatistics(), equalTo(false));
TermVectorResponse response = resp.execute().actionGet();
TermVectorsResponse response = resp.execute().actionGet();
assertThat("doc id: " + i + " doesn't exists but should", response.isExists(), equalTo(true));
Fields fields = response.getFields();
assertThat(fields.size(), equalTo(1));
@ -213,10 +213,10 @@ public class GetTermVectorCheckDocFreqTests extends ElasticsearchIntegrationTest
private void checkAllInfo(int numDocs, String[] values, int[] freq, int[][] pos, int[][] startOffset, int[][] endOffset, int i)
throws IOException {
TermVectorRequestBuilder resp = client().prepareTermVector("test", "type1", Integer.toString(i)).setPayloads(true).setOffsets(true)
TermVectorsRequestBuilder resp = client().prepareTermVectors("test", "type1", Integer.toString(i)).setPayloads(true).setOffsets(true)
.setPositions(true).setFieldStatistics(true).setTermStatistics(true).setSelectedFields();
assertThat(resp.request().fieldStatistics(), equalTo(true));
TermVectorResponse response = resp.execute().actionGet();
TermVectorsResponse response = resp.execute().actionGet();
assertThat("doc id: " + i + " doesn't exists but should", response.isExists(), equalTo(true));
Fields fields = response.getFields();
assertThat(fields.size(), equalTo(1));

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.action.termvector;
package org.elasticsearch.action.termvectors;
import com.carrotsearch.hppc.ObjectIntOpenHashMap;
import org.apache.lucene.analysis.payloads.PayloadHelper;
@ -47,7 +47,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcke
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertThrows;
import static org.hamcrest.Matchers.*;
public class GetTermVectorTests extends AbstractTermVectorTests {
public class GetTermVectorsTests extends AbstractTermVectorsTests {
@Test
public void testNoSuchDoc() throws Exception {
@ -66,8 +66,8 @@ public class GetTermVectorTests extends AbstractTermVectorTests {
client().prepareIndex("test", "type1", "666").setSource("field", "foo bar").execute().actionGet();
refresh();
for (int i = 0; i < 20; i++) {
ActionFuture<TermVectorResponse> termVector = client().termVector(new TermVectorRequest(indexOrAlias(), "type1", "" + i));
TermVectorResponse actionGet = termVector.actionGet();
ActionFuture<TermVectorsResponse> termVector = client().termVectors(new TermVectorsRequest(indexOrAlias(), "type1", "" + i));
TermVectorsResponse actionGet = termVector.actionGet();
assertThat(actionGet, notNullValue());
assertThat(actionGet.getIndex(), equalTo("test"));
assertThat(actionGet.isExists(), equalTo(false));
@ -93,11 +93,11 @@ public class GetTermVectorTests extends AbstractTermVectorTests {
// when indexing a field that simply has a question mark, the term vectors will be null
client().prepareIndex("test", "type1", "0").setSource("existingfield", "?").execute().actionGet();
refresh();
ActionFuture<TermVectorResponse> termVector = client().termVector(new TermVectorRequest(indexOrAlias(), "type1", "0")
ActionFuture<TermVectorsResponse> termVector = client().termVectors(new TermVectorsRequest(indexOrAlias(), "type1", "0")
.selectedFields(new String[]{"existingfield"}));
// lets see if the null term vectors are caught...
TermVectorResponse actionGet = termVector.actionGet();
TermVectorsResponse actionGet = termVector.actionGet();
assertThat(actionGet, notNullValue());
assertThat(actionGet.isExists(), equalTo(true));
assertThat(actionGet.getIndex(), equalTo("test"));
@ -121,14 +121,14 @@ public class GetTermVectorTests extends AbstractTermVectorTests {
// when indexing a field that simply has a question mark, the term vectors will be null
client().prepareIndex("test", "type1", "0").setSource("anotherexistingfield", 1).execute().actionGet();
refresh();
ActionFuture<TermVectorResponse> termVector = client().termVector(new TermVectorRequest(indexOrAlias(), "type1", "0")
ActionFuture<TermVectorsResponse> termVectors = client().termVectors(new TermVectorsRequest(indexOrAlias(), "type1", "0")
.selectedFields(randomBoolean() ? new String[]{"existingfield"} : null)
.termStatistics(true)
.fieldStatistics(true)
.dfs(true));
// lets see if the null term vectors are caught...
TermVectorResponse actionGet = termVector.actionGet();
TermVectorsResponse actionGet = termVectors.actionGet();
assertThat(actionGet, notNullValue());
assertThat(actionGet.isExists(), equalTo(true));
assertThat(actionGet.getIndex(), equalTo("test"));
@ -161,7 +161,7 @@ public class GetTermVectorTests extends AbstractTermVectorTests {
indexRandom(true, indexBuilders);
for (int i = 0; i < 4; i++) {
TermVectorResponse resp = client().prepareTermVector(indexOrAlias(), "type1", String.valueOf(i))
TermVectorsResponse resp = client().prepareTermVectors(indexOrAlias(), "type1", String.valueOf(i))
.setSelectedFields("field" + i)
.get();
assertThat(resp, notNullValue());
@ -171,7 +171,7 @@ public class GetTermVectorTests extends AbstractTermVectorTests {
}
for (int i = 4; i < 6; i++) {
TermVectorResponse resp = client().prepareTermVector(indexOrAlias(), "type1", String.valueOf(i))
TermVectorsResponse resp = client().prepareTermVectors(indexOrAlias(), "type1", String.valueOf(i))
.setSelectedFields("field" + i).get();
assertThat(resp.getIndex(), equalTo("test"));
assertThat("field" + i + " :", resp.getFields().terms("field" + i), notNullValue());
@ -205,9 +205,9 @@ public class GetTermVectorTests extends AbstractTermVectorTests {
refresh();
}
for (int i = 0; i < 10; i++) {
TermVectorRequestBuilder resp = client().prepareTermVector(indexOrAlias(), "type1", Integer.toString(i)).setPayloads(true)
TermVectorsRequestBuilder resp = client().prepareTermVectors(indexOrAlias(), "type1", Integer.toString(i)).setPayloads(true)
.setOffsets(true).setPositions(true).setSelectedFields();
TermVectorResponse response = resp.execute().actionGet();
TermVectorsResponse response = resp.execute().actionGet();
assertThat(response.getIndex(), equalTo("test"));
assertThat("doc id: " + i + " doesn't exists but should", response.isExists(), equalTo(true));
Fields fields = response.getFields();
@ -302,9 +302,9 @@ public class GetTermVectorTests extends AbstractTermVectorTests {
boolean isPositionsRequested = randomBoolean();
String infoString = createInfoString(isPositionsRequested, isOffsetRequested, isPayloadRequested, optionString);
for (int i = 0; i < 10; i++) {
TermVectorRequestBuilder resp = client().prepareTermVector("test", "type1", Integer.toString(i))
TermVectorsRequestBuilder resp = client().prepareTermVectors("test", "type1", Integer.toString(i))
.setPayloads(isPayloadRequested).setOffsets(isOffsetRequested).setPositions(isPositionsRequested).setSelectedFields();
TermVectorResponse response = resp.execute().actionGet();
TermVectorsResponse response = resp.execute().actionGet();
assertThat(infoString + "doc id: " + i + " doesn't exists but should", response.isExists(), equalTo(true));
Fields fields = response.getFields();
assertThat(fields.size(), equalTo(ft.storeTermVectors() ? 1 : 0));
@ -389,13 +389,13 @@ public class GetTermVectorTests extends AbstractTermVectorTests {
for (TestConfig test : testConfigs) {
try {
TermVectorRequestBuilder request = getRequestForConfig(test);
TermVectorsRequestBuilder request = getRequestForConfig(test);
if (test.expectedException != null) {
assertThrows(request, test.expectedException);
continue;
}
TermVectorResponse response = request.get();
TermVectorsResponse response = request.get();
Fields luceneTermVectors = getTermVectorsFromLucene(directoryReader, test.doc);
validateResponse(response, luceneTermVectors, test);
} catch (Throwable t) {
@ -439,9 +439,9 @@ public class GetTermVectorTests extends AbstractTermVectorTests {
client().prepareIndex("test", "type1", Integer.toString(1))
.setSource(jsonBuilder().startObject().field("field", queryString).endObject()).execute().actionGet();
refresh();
TermVectorRequestBuilder resp = client().prepareTermVector("test", "type1", Integer.toString(1)).setPayloads(true).setOffsets(true)
TermVectorsRequestBuilder resp = client().prepareTermVectors("test", "type1", Integer.toString(1)).setPayloads(true).setOffsets(true)
.setPositions(true).setSelectedFields();
TermVectorResponse response = resp.execute().actionGet();
TermVectorsResponse response = resp.execute().actionGet();
assertThat("doc id 1 doesn't exists but should", response.isExists(), equalTo(true));
Fields fields = response.getFields();
assertThat(fields.size(), equalTo(1));
@ -609,7 +609,7 @@ public class GetTermVectorTests extends AbstractTermVectorTests {
}
for (int i = 0; i < 10; i++) {
TermVectorResponse response = client().prepareTermVector("test", "type1", Integer.toString(i))
TermVectorsResponse response = client().prepareTermVectors("test", "type1", Integer.toString(i))
.setPayloads(true)
.setOffsets(true)
.setPositions(true)
@ -707,7 +707,7 @@ public class GetTermVectorTests extends AbstractTermVectorTests {
Fields[] fields = new Fields[2];
int idx = 0;
for (String indexName : indexNames) {
TermVectorResponse resp = client().prepareTermVector(indexName, "type1", String.valueOf(i))
TermVectorsResponse resp = client().prepareTermVectors(indexName, "type1", String.valueOf(i))
.setOffsets(true)
.setPositions(true)
.setSelectedFields("field1")
@ -782,7 +782,7 @@ public class GetTermVectorTests extends AbstractTermVectorTests {
client().prepareIndex("test", "type1", "0").setSource(source).get();
refresh();
TermVectorResponse response = client().prepareTermVector(indexOrAlias(), "type1", "0").setSelectedFields("field*").get();
TermVectorsResponse response = client().prepareTermVectors(indexOrAlias(), "type1", "0").setSelectedFields("field*").get();
assertThat("Doc doesn't exists but should", response.isExists(), equalTo(true));
assertThat(response.getIndex(), equalTo("test"));
assertThat("All term vectors should have been generated", response.getFields().size(), equalTo(numFields));
@ -818,7 +818,7 @@ public class GetTermVectorTests extends AbstractTermVectorTests {
for (int i = 0; i < content.length; i++) {
// request tvs from existing document
TermVectorResponse respExisting = client().prepareTermVector("test", "type1", String.valueOf(i))
TermVectorsResponse respExisting = client().prepareTermVectors("test", "type1", String.valueOf(i))
.setOffsets(true)
.setPositions(true)
.setFieldStatistics(true)
@ -827,7 +827,7 @@ public class GetTermVectorTests extends AbstractTermVectorTests {
assertThat("doc with index: test, type1 and id: existing", respExisting.isExists(), equalTo(true));
// request tvs from artificial document
TermVectorResponse respArtificial = client().prepareTermVector()
TermVectorsResponse respArtificial = client().prepareTermVectors()
.setIndex("test")
.setType("type1")
.setRouting(String.valueOf(i)) // ensure we get the stats from the same shard as existing doc
@ -860,7 +860,7 @@ public class GetTermVectorTests extends AbstractTermVectorTests {
// request tvs from artificial document
String text = "the quick brown fox jumps over the lazy dog";
TermVectorResponse resp = client().prepareTermVector()
TermVectorsResponse resp = client().prepareTermVectors()
.setIndex("test")
.setType("type1")
.setDoc(jsonBuilder()
@ -905,7 +905,7 @@ public class GetTermVectorTests extends AbstractTermVectorTests {
.endObject();
for (int i = 0; i < 2; i++) {
TermVectorResponse resp = client().prepareTermVector()
TermVectorsResponse resp = client().prepareTermVectors()
.setIndex("test")
.setType("type1")
.setDoc(doc)
@ -979,7 +979,7 @@ public class GetTermVectorTests extends AbstractTermVectorTests {
}
// selected fields not specified
TermVectorResponse response = client().prepareTermVector(indexOrAlias(), "type1", "0")
TermVectorsResponse response = client().prepareTermVectors(indexOrAlias(), "type1", "0")
.setPerFieldAnalyzer(perFieldAnalyzer)
.get();
@ -987,7 +987,7 @@ public class GetTermVectorTests extends AbstractTermVectorTests {
checkAnalyzedFields(response.getFields(), withTermVectors, perFieldAnalyzer);
// selected fields specified including some not in the mapping
response = client().prepareTermVector(indexOrAlias(), "type1", "0")
response = client().prepareTermVectors(indexOrAlias(), "type1", "0")
.setSelectedFields(selectedFields.toArray(Strings.EMPTY_ARRAY))
.setPerFieldAnalyzer(perFieldAnalyzer)
.get();
@ -1059,7 +1059,7 @@ public class GetTermVectorTests extends AbstractTermVectorTests {
.endObject();
logger.info("Without dfs 'cat' should appear strictly less than {} times.", numDocs);
TermVectorResponse response = client().prepareTermVector("test", "type1", randomIntBetween(0, numDocs - 1) + "")
TermVectorsResponse response = client().prepareTermVectors("test", "type1", randomIntBetween(0, numDocs - 1) + "")
.setSelectedFields("text")
.setFieldStatistics(true)
.setTermStatistics(true)
@ -1067,7 +1067,7 @@ public class GetTermVectorTests extends AbstractTermVectorTests {
checkStats(response.getFields(), expectedStats, false);
logger.info("With dfs 'cat' should appear exactly {} times.", numDocs);
response = client().prepareTermVector("test", "type1", randomIntBetween(0, numDocs - 1) + "")
response = client().prepareTermVectors("test", "type1", randomIntBetween(0, numDocs - 1) + "")
.setSelectedFields("text")
.setFieldStatistics(true)
.setTermStatistics(true)

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.action.termvector;
package org.elasticsearch.action.termvectors;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.Fields;
@ -25,20 +25,20 @@ import org.junit.Test;
import static org.hamcrest.Matchers.equalTo;
public class MultiTermVectorsTests extends AbstractTermVectorTests {
public class MultiTermVectorsTests extends AbstractTermVectorsTests {
@Test
public void testDuelESLucene() throws Exception {
AbstractTermVectorTests.TestFieldSetting[] testFieldSettings = getFieldSettings();
AbstractTermVectorsTests.TestFieldSetting[] testFieldSettings = getFieldSettings();
createIndexBasedOnFieldSettings("test", "alias", testFieldSettings);
//we generate as many docs as many shards we have
TestDoc[] testDocs = generateTestDocs("test", testFieldSettings);
DirectoryReader directoryReader = indexDocsWithLucene(testDocs);
AbstractTermVectorTests.TestConfig[] testConfigs = generateTestConfigs(20, testDocs, testFieldSettings);
AbstractTermVectorsTests.TestConfig[] testConfigs = generateTestConfigs(20, testDocs, testFieldSettings);
MultiTermVectorsRequestBuilder requestBuilder = client().prepareMultiTermVectors();
for (AbstractTermVectorTests.TestConfig test : testConfigs) {
for (AbstractTermVectorsTests.TestConfig test : testConfigs) {
requestBuilder.add(getRequestForConfig(test).request());
}
@ -65,7 +65,7 @@ public class MultiTermVectorsTests extends AbstractTermVectorTests {
@Test
public void testMissingIndexThrowsMissingIndex() throws Exception {
TermVectorRequestBuilder requestBuilder = client().prepareTermVector("testX", "typeX", Integer.toString(1));
TermVectorsRequestBuilder requestBuilder = client().prepareTermVectors("testX", "typeX", Integer.toString(1));
MultiTermVectorsRequestBuilder mtvBuilder = new MultiTermVectorsRequestBuilder(client());
mtvBuilder.add(requestBuilder.request());
MultiTermVectorsResponse response = mtvBuilder.execute().actionGet();

View File

@ -17,7 +17,7 @@
* under the License.
*/
package org.elasticsearch.action.termvector;
package org.elasticsearch.action.termvectors;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.document.*;
@ -28,7 +28,7 @@ import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.store.Directory;
import org.elasticsearch.action.termvector.TermVectorRequest.Flag;
import org.elasticsearch.action.termvectors.TermVectorsRequest.Flag;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.Streams;
@ -41,7 +41,7 @@ import org.elasticsearch.index.mapper.MapperParsingException;
import org.elasticsearch.index.mapper.core.AbstractFieldMapper;
import org.elasticsearch.index.mapper.core.TypeParsers;
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
import org.elasticsearch.rest.action.termvector.RestTermVectorAction;
import org.elasticsearch.rest.action.termvectors.RestTermVectorsAction;
import org.elasticsearch.test.ElasticsearchLuceneTestCase;
import org.hamcrest.Matchers;
import org.junit.Test;
@ -55,12 +55,12 @@ import java.util.Set;
import static org.hamcrest.Matchers.equalTo;
public class TermVectorUnitTests extends ElasticsearchLuceneTestCase {
public class TermVectorsUnitTests extends ElasticsearchLuceneTestCase {
@Test
public void streamResponse() throws Exception {
TermVectorResponse outResponse = new TermVectorResponse("a", "b", "c");
TermVectorsResponse outResponse = new TermVectorsResponse("a", "b", "c");
outResponse.setExists(true);
writeStandardTermVector(outResponse);
@ -72,13 +72,13 @@ public class TermVectorUnitTests extends ElasticsearchLuceneTestCase {
// read
ByteArrayInputStream esInBuffer = new ByteArrayInputStream(outBuffer.toByteArray());
InputStreamStreamInput esBuffer = new InputStreamStreamInput(esInBuffer);
TermVectorResponse inResponse = new TermVectorResponse("a", "b", "c");
TermVectorsResponse inResponse = new TermVectorsResponse("a", "b", "c");
inResponse.readFrom(esBuffer);
// see if correct
checkIfStandardTermVector(inResponse);
outResponse = new TermVectorResponse("a", "b", "c");
outResponse = new TermVectorsResponse("a", "b", "c");
writeEmptyTermVector(outResponse);
// write
outBuffer = new ByteArrayOutputStream();
@ -88,13 +88,13 @@ public class TermVectorUnitTests extends ElasticsearchLuceneTestCase {
// read
esInBuffer = new ByteArrayInputStream(outBuffer.toByteArray());
esBuffer = new InputStreamStreamInput(esInBuffer);
inResponse = new TermVectorResponse("a", "b", "c");
inResponse = new TermVectorsResponse("a", "b", "c");
inResponse.readFrom(esBuffer);
assertTrue(inResponse.isExists());
}
private void writeEmptyTermVector(TermVectorResponse outResponse) throws IOException {
private void writeEmptyTermVector(TermVectorsResponse outResponse) throws IOException {
Directory dir = newDirectory();
IndexWriterConfig conf = new IndexWriterConfig(new StandardAnalyzer());
@ -126,7 +126,7 @@ public class TermVectorUnitTests extends ElasticsearchLuceneTestCase {
}
private void writeStandardTermVector(TermVectorResponse outResponse) throws IOException {
private void writeStandardTermVector(TermVectorsResponse outResponse) throws IOException {
Directory dir = newDirectory();
IndexWriterConfig conf = new IndexWriterConfig(new StandardAnalyzer());
@ -160,7 +160,7 @@ public class TermVectorUnitTests extends ElasticsearchLuceneTestCase {
}
private void checkIfStandardTermVector(TermVectorResponse inResponse) throws IOException {
private void checkIfStandardTermVector(TermVectorsResponse inResponse) throws IOException {
Fields fields = inResponse.getFields();
assertThat(fields.terms("title"), Matchers.notNullValue());
@ -173,9 +173,9 @@ public class TermVectorUnitTests extends ElasticsearchLuceneTestCase {
BytesReference inputBytes = new BytesArray(
" {\"fields\" : [\"a\", \"b\",\"c\"], \"offsets\":false, \"positions\":false, \"payloads\":true}");
TermVectorRequest tvr = new TermVectorRequest(null, null, null);
TermVectorsRequest tvr = new TermVectorsRequest(null, null, null);
XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(inputBytes);
TermVectorRequest.parseRequest(tvr, parser);
TermVectorsRequest.parseRequest(tvr, parser);
Set<String> fields = tvr.selectedFields();
assertThat(fields.contains("a"), equalTo(true));
@ -185,23 +185,23 @@ public class TermVectorUnitTests extends ElasticsearchLuceneTestCase {
assertThat(tvr.positions(), equalTo(false));
assertThat(tvr.payloads(), equalTo(true));
String additionalFields = "b,c ,d, e ";
RestTermVectorAction.addFieldStringsFromParameter(tvr, additionalFields);
RestTermVectorsAction.addFieldStringsFromParameter(tvr, additionalFields);
assertThat(tvr.selectedFields().size(), equalTo(5));
assertThat(fields.contains("d"), equalTo(true));
assertThat(fields.contains("e"), equalTo(true));
additionalFields = "";
RestTermVectorAction.addFieldStringsFromParameter(tvr, additionalFields);
RestTermVectorsAction.addFieldStringsFromParameter(tvr, additionalFields);
inputBytes = new BytesArray(" {\"offsets\":false, \"positions\":false, \"payloads\":true}");
tvr = new TermVectorRequest(null, null, null);
tvr = new TermVectorsRequest(null, null, null);
parser = XContentFactory.xContent(XContentType.JSON).createParser(inputBytes);
TermVectorRequest.parseRequest(tvr, parser);
TermVectorsRequest.parseRequest(tvr, parser);
additionalFields = "";
RestTermVectorAction.addFieldStringsFromParameter(tvr, additionalFields);
RestTermVectorsAction.addFieldStringsFromParameter(tvr, additionalFields);
assertThat(tvr.selectedFields(), equalTo(null));
additionalFields = "b,c ,d, e ";
RestTermVectorAction.addFieldStringsFromParameter(tvr, additionalFields);
RestTermVectorsAction.addFieldStringsFromParameter(tvr, additionalFields);
assertThat(tvr.selectedFields().size(), equalTo(4));
}
@ -210,11 +210,11 @@ public class TermVectorUnitTests extends ElasticsearchLuceneTestCase {
public void testRequestParsingThrowsException() throws Exception {
BytesReference inputBytes = new BytesArray(
" {\"fields\" : \"a, b,c \", \"offsets\":false, \"positions\":false, \"payloads\":true, \"meaningless_term\":2}");
TermVectorRequest tvr = new TermVectorRequest(null, null, null);
TermVectorsRequest tvr = new TermVectorsRequest(null, null, null);
boolean threwException = false;
try {
XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(inputBytes);
TermVectorRequest.parseRequest(tvr, parser);
TermVectorsRequest.parseRequest(tvr, parser);
} catch (Exception e) {
threwException = true;
}
@ -226,7 +226,7 @@ public class TermVectorUnitTests extends ElasticsearchLuceneTestCase {
public void streamRequest() throws IOException {
for (int i = 0; i < 10; i++) {
TermVectorRequest request = new TermVectorRequest("index", "type", "id");
TermVectorsRequest request = new TermVectorsRequest("index", "type", "id");
request.offsets(random().nextBoolean());
request.fieldStatistics(random().nextBoolean());
request.payloads(random().nextBoolean());
@ -245,7 +245,7 @@ public class TermVectorUnitTests extends ElasticsearchLuceneTestCase {
// read
ByteArrayInputStream esInBuffer = new ByteArrayInputStream(outBuffer.toByteArray());
InputStreamStreamInput esBuffer = new InputStreamStreamInput(esInBuffer);
TermVectorRequest req2 = new TermVectorRequest(null, null, null);
TermVectorsRequest req2 = new TermVectorsRequest(null, null, null);
req2.readFrom(esBuffer);
assertThat(request.offsets(), equalTo(req2.offsets()));
@ -291,16 +291,16 @@ public class TermVectorUnitTests extends ElasticsearchLuceneTestCase {
@Test
public void testMultiParser() throws Exception {
byte[] data = Streams.copyToBytesFromClasspath("/org/elasticsearch/action/termvector/multiRequest1.json");
byte[] data = Streams.copyToBytesFromClasspath("/org/elasticsearch/action/termvectors/multiRequest1.json");
BytesReference bytes = new BytesArray(data);
MultiTermVectorsRequest request = new MultiTermVectorsRequest();
request.add(new TermVectorRequest(), bytes);
request.add(new TermVectorsRequest(), bytes);
checkParsedParameters(request);
data = Streams.copyToBytesFromClasspath("/org/elasticsearch/action/termvector/multiRequest2.json");
data = Streams.copyToBytesFromClasspath("/org/elasticsearch/action/termvectors/multiRequest2.json");
bytes = new BytesArray(data);
request = new MultiTermVectorsRequest();
request.add(new TermVectorRequest(), bytes);
request.add(new TermVectorsRequest(), bytes);
checkParsedParameters(request);
}
@ -312,7 +312,7 @@ public class TermVectorUnitTests extends ElasticsearchLuceneTestCase {
fields.add("a");
fields.add("b");
fields.add("c");
for (TermVectorRequest singleRequest : request.requests) {
for (TermVectorsRequest singleRequest : request.requests) {
assertThat(singleRequest.index(), equalTo("testidx"));
assertThat(singleRequest.type(), equalTo("test"));
assertThat(singleRequest.payloads(), equalTo(false));

View File

@ -40,7 +40,7 @@ import org.elasticsearch.action.search.SearchRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.action.termvector.TermVectorResponse;
import org.elasticsearch.action.termvectors.TermVectorsResponse;
import org.elasticsearch.action.update.UpdateRequestBuilder;
import org.elasticsearch.action.update.UpdateResponse;
import org.elasticsearch.cluster.ClusterState;
@ -49,7 +49,6 @@ import org.elasticsearch.cluster.routing.IndexRoutingTable;
import org.elasticsearch.cluster.routing.IndexShardRoutingTable;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.settings.ImmutableSettings;
@ -666,10 +665,10 @@ public class BasicBackwardsCompatibilityTest extends ElasticsearchBackwardsCompa
.setSource("field", "the quick brown fox jumps over the lazy dog").get();
refresh();
TermVectorResponse termVectorResponse = client().prepareTermVector(indexOrAlias(), "type1", "1").get();
assertThat(termVectorResponse.getIndex(), equalTo("test"));
assertThat(termVectorResponse.isExists(), equalTo(true));
Fields fields = termVectorResponse.getFields();
TermVectorsResponse termVectorsResponse = client().prepareTermVectors(indexOrAlias(), "type1", "1").get();
assertThat(termVectorsResponse.getIndex(), equalTo("test"));
assertThat(termVectorsResponse.isExists(), equalTo(true));
Fields fields = termVectorsResponse.getFields();
assertThat(fields.size(), equalTo(1));
assertThat(fields.terms("field").size(), equalTo(8l));
}

View File

@ -69,8 +69,8 @@ import org.apache.lucene.util.NumericUtils;
import org.apache.lucene.util.automaton.TooComplexToDeterminizeException;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ElasticsearchIllegalArgumentException;
import org.elasticsearch.action.termvector.MultiTermVectorsRequest;
import org.elasticsearch.action.termvector.TermVectorRequest;
import org.elasticsearch.action.termvectors.MultiTermVectorsRequest;
import org.elasticsearch.action.termvectors.TermVectorsRequest;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.compress.CompressedString;
@ -1847,7 +1847,7 @@ public class SimpleIndexQueryParserTests extends ElasticsearchSingleNodeTest {
@Override
public Fields[] fetch(MultiTermVectorsRequest items) throws IOException {
List<Fields> likeTexts = new ArrayList<>();
for (TermVectorRequest item : items) {
for (TermVectorsRequest item : items) {
likeTexts.add(generateFields(item.selectedFields().toArray(Strings.EMPTY_ARRAY), item.id()));
}
return likeTexts.toArray(Fields.EMPTY_ARRAY);

View File

@ -25,9 +25,9 @@ import org.elasticsearch.action.admin.indices.alias.Alias;
import org.elasticsearch.action.explain.ExplainResponse;
import org.elasticsearch.action.get.MultiGetRequest;
import org.elasticsearch.action.get.MultiGetResponse;
import org.elasticsearch.action.termvector.MultiTermVectorsResponse;
import org.elasticsearch.action.termvector.TermVectorRequest;
import org.elasticsearch.action.termvector.TermVectorResponse;
import org.elasticsearch.action.termvectors.MultiTermVectorsResponse;
import org.elasticsearch.action.termvectors.TermVectorsRequest;
import org.elasticsearch.action.termvectors.TermVectorsResponse;
import org.elasticsearch.action.update.UpdateResponse;
import org.elasticsearch.client.Requests;
import org.elasticsearch.common.xcontent.XContentFactory;
@ -372,12 +372,12 @@ public class SimpleRoutingTests extends ElasticsearchIntegrationTest {
}
logger.info("--> verifying term vector with id [1], with routing [0], should succeed");
TermVectorResponse termVectorResponse = client().prepareTermVector(indexOrAlias(), "type1", "1").setRouting("0").get();
assertThat(termVectorResponse.isExists(), equalTo(true));
assertThat(termVectorResponse.getId(), equalTo("1"));
TermVectorsResponse termVectorsResponse = client().prepareTermVectors(indexOrAlias(), "type1", "1").setRouting("0").get();
assertThat(termVectorsResponse.isExists(), equalTo(true));
assertThat(termVectorsResponse.getId(), equalTo("1"));
try {
client().prepareTermVector(indexOrAlias(), "type1", "1").get();
client().prepareTermVectors(indexOrAlias(), "type1", "1").get();
fail();
} catch (RoutingMissingException e) {
assertThat(e.getMessage(), equalTo("routing is required for [test]/[type1]/[1]"));
@ -418,8 +418,8 @@ public class SimpleRoutingTests extends ElasticsearchIntegrationTest {
assertThat(multiGetResponse.getResponses()[1].getFailure().getMessage(), equalTo("routing is required for [test]/[type1]/[2]"));
MultiTermVectorsResponse multiTermVectorsResponse = client().prepareMultiTermVectors()
.add(new TermVectorRequest(indexOrAlias(), "type1", "1").routing("0"))
.add(new TermVectorRequest(indexOrAlias(), "type1", "2").routing("0")).get();
.add(new TermVectorsRequest(indexOrAlias(), "type1", "1").routing("0"))
.add(new TermVectorsRequest(indexOrAlias(), "type1", "2").routing("0")).get();
assertThat(multiTermVectorsResponse.getResponses().length, equalTo(2));
assertThat(multiTermVectorsResponse.getResponses()[0].getId(), equalTo("1"));
assertThat(multiTermVectorsResponse.getResponses()[0].isFailed(), equalTo(false));
@ -431,8 +431,8 @@ public class SimpleRoutingTests extends ElasticsearchIntegrationTest {
assertThat(multiTermVectorsResponse.getResponses()[1].getResponse().isExists(), equalTo(true));
multiTermVectorsResponse = client().prepareMultiTermVectors()
.add(new TermVectorRequest(indexOrAlias(), "type1", "1"))
.add(new TermVectorRequest(indexOrAlias(), "type1", "2")).get();
.add(new TermVectorsRequest(indexOrAlias(), "type1", "1"))
.add(new TermVectorsRequest(indexOrAlias(), "type1", "2")).get();
assertThat(multiTermVectorsResponse.getResponses().length, equalTo(2));
assertThat(multiTermVectorsResponse.getResponses()[0].getId(), equalTo("1"));
assertThat(multiTermVectorsResponse.getResponses()[0].isFailed(), equalTo(true));

View File

@ -28,7 +28,7 @@ import org.elasticsearch.action.bench.BenchmarkAction;
import org.elasticsearch.action.bench.BenchmarkService;
import org.elasticsearch.action.bench.BenchmarkStatusAction;
import org.elasticsearch.action.exists.ExistsAction;
import org.elasticsearch.action.termvector.dfs.TransportDfsOnlyAction;
import org.elasticsearch.action.termvectors.dfs.TransportDfsOnlyAction;
import org.elasticsearch.search.action.SearchServiceTransportAction;
import org.elasticsearch.repositories.VerifyNodeRepositoryAction;
import org.elasticsearch.test.ElasticsearchIntegrationTest;