[Remove] Type parameter from TermVectors API (#2104)

Remove document type support from the TermVectors API.

Signed-off-by: Nicholas Walter Knize <nknize@apache.org>
This commit is contained in:
Nick Knize 2022-02-15 14:49:08 -06:00 committed by GitHub
parent 62361ceafc
commit c0c7d7aedb
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
41 changed files with 237 additions and 597 deletions

View File

@ -46,24 +46,14 @@ import java.util.Objects;
public class TermVectorsResponse {
private final String index;
private final String type;
private final String id;
private final long docVersion;
private final boolean found;
private final long tookInMillis;
private final List<TermVector> termVectorList;
public TermVectorsResponse(
String index,
String type,
String id,
long version,
boolean found,
long tookInMillis,
List<TermVector> termVectorList
) {
public TermVectorsResponse(String index, String id, long version, boolean found, long tookInMillis, List<TermVector> termVectorList) {
this.index = index;
this.type = type;
this.id = id;
this.docVersion = version;
this.found = found;
@ -75,19 +65,18 @@ public class TermVectorsResponse {
"term_vectors",
true,
args -> {
// as the response comes from server, we are sure that args[6] will be a list of TermVector
// as the response comes from server, we are sure that args[5] will be a list of TermVector
@SuppressWarnings("unchecked")
List<TermVector> termVectorList = (List<TermVector>) args[6];
List<TermVector> termVectorList = (List<TermVector>) args[5];
if (termVectorList != null) {
Collections.sort(termVectorList, Comparator.comparing(TermVector::getFieldName));
}
return new TermVectorsResponse(
(String) args[0],
(String) args[1],
(String) args[2],
(long) args[3],
(boolean) args[4],
(long) args[5],
(long) args[2],
(boolean) args[3],
(long) args[4],
termVectorList
);
}
@ -95,7 +84,6 @@ public class TermVectorsResponse {
static {
PARSER.declareString(constructorArg(), new ParseField("_index"));
PARSER.declareString(constructorArg(), new ParseField("_type"));
PARSER.declareString(optionalConstructorArg(), new ParseField("_id"));
PARSER.declareLong(constructorArg(), new ParseField("_version"));
PARSER.declareBoolean(constructorArg(), new ParseField("found"));
@ -118,16 +106,6 @@ public class TermVectorsResponse {
return index;
}
/**
* Returns the type for the response
*
* @deprecated Types are in the process of being removed.
*/
@Deprecated
public String getType() {
return type;
}
/**
* Returns the id of the request
* can be NULL if there is no document ID
@ -171,7 +149,6 @@ public class TermVectorsResponse {
if (!(obj instanceof TermVectorsResponse)) return false;
TermVectorsResponse other = (TermVectorsResponse) obj;
return index.equals(other.index)
&& type.equals(other.type)
&& Objects.equals(id, other.id)
&& docVersion == other.docVersion
&& found == other.found
@ -181,7 +158,7 @@ public class TermVectorsResponse {
@Override
public int hashCode() {
return Objects.hash(index, type, id, docVersion, found, tookInMillis, termVectorList);
return Objects.hash(index, id, docVersion, found, tookInMillis, termVectorList);
}
public static final class TermVector {

View File

@ -59,7 +59,6 @@ public class TermVectorsResponseTests extends OpenSearchTestCase {
static void toXContent(TermVectorsResponse response, XContentBuilder builder) throws IOException {
builder.startObject();
builder.field("_index", response.getIndex());
builder.field("_type", response.getType());
if (response.getId() != null) {
builder.field("_id", response.getId());
}
@ -130,7 +129,6 @@ public class TermVectorsResponseTests extends OpenSearchTestCase {
static TermVectorsResponse createTestInstance() {
String index = randomAlphaOfLength(5);
String type = randomAlphaOfLength(5);
String id = String.valueOf(randomIntBetween(1, 100));
long version = randomNonNegativeLong();
long tookInMillis = randomNonNegativeLong();
@ -154,7 +152,7 @@ public class TermVectorsResponseTests extends OpenSearchTestCase {
);
}
}
TermVectorsResponse tvresponse = new TermVectorsResponse(index, type, id, version, found, tookInMillis, tvList);
TermVectorsResponse tvresponse = new TermVectorsResponse(index, id, version, found, tookInMillis, tvList);
return tvresponse;
}

View File

@ -1719,9 +1719,8 @@ public class CRUDDocumentationIT extends OpenSearchRestHighLevelClientTestCase {
// tag::term-vectors-response
String index = response.getIndex(); // <1>
String type = response.getType(); // <2>
String id = response.getId(); // <3>
boolean found = response.getFound(); // <4>
String id = response.getId(); // <2>
boolean found = response.getFound(); // <3>
// end::term-vectors-response
if (response.getTermVectorsList() != null) {

View File

@ -41,12 +41,10 @@
like:
-
_index: test_1
_type: _doc
doc:
foo: bar
-
_index: test_1
_type: _doc
_id: 2
-
_id: 3

View File

@ -40,11 +40,9 @@
more_like_this:
like:
_index: test_1
_type: _doc
_id: 1
unlike:
_index: test_1
_type: _doc
_id: 3
include: true
min_doc_freq: 0

View File

@ -47,7 +47,6 @@ setup:
"docs":
-
"_index" : "testidx"
"_type" : "_doc"
"_id" : "testing_document"
"version" : 1
"_version_type" : "external"

View File

@ -39,6 +39,5 @@ setup:
realtime: false
- match: { _index: "testidx" }
- match: { _type: "_doc" }
- match: { _id: "1" }
- is_false: found

View File

@ -360,7 +360,7 @@ public class IndicesRequestIT extends OpenSearchIntegTestCase {
String termVectorShardAction = TermVectorsAction.NAME + "[s]";
interceptTransportActions(termVectorShardAction);
TermVectorsRequest termVectorsRequest = new TermVectorsRequest(randomIndexOrAlias(), "type", "id");
TermVectorsRequest termVectorsRequest = new TermVectorsRequest(randomIndexOrAlias(), "id");
internalCluster().coordOnlyNodeClient().termVectors(termVectorsRequest).actionGet();
clearInterceptedActions();
@ -376,7 +376,7 @@ public class IndicesRequestIT extends OpenSearchIntegTestCase {
int numDocs = iterations(1, 30);
for (int i = 0; i < numDocs; i++) {
String indexOrAlias = randomIndexOrAlias();
multiTermVectorsRequest.add(indexOrAlias, "type", Integer.toString(i));
multiTermVectorsRequest.add(indexOrAlias, Integer.toString(i));
indices.add(indexOrAlias);
}
internalCluster().coordOnlyNodeClient().multiTermVectors(multiTermVectorsRequest).actionGet();

View File

@ -95,7 +95,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
client().prepareIndex("test", "type1", "666").setSource("field", "foo bar").execute().actionGet();
refresh();
for (int i = 0; i < 20; i++) {
ActionFuture<TermVectorsResponse> termVector = client().termVectors(new TermVectorsRequest(indexOrAlias(), "type1", "" + i));
ActionFuture<TermVectorsResponse> termVector = client().termVectors(new TermVectorsRequest(indexOrAlias(), "" + i));
TermVectorsResponse actionGet = termVector.actionGet();
assertThat(actionGet, notNullValue());
assertThat(actionGet.getIndex(), equalTo("test"));
@ -122,7 +122,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
client().prepareIndex("test", "type1", "0").setSource("existingfield", "?").execute().actionGet();
refresh();
ActionFuture<TermVectorsResponse> termVector = client().termVectors(
new TermVectorsRequest(indexOrAlias(), "type1", "0").selectedFields(new String[] { "existingfield" })
new TermVectorsRequest(indexOrAlias(), "0").selectedFields(new String[] { "existingfield" })
);
// lets see if the null term vectors are caught...
@ -150,7 +150,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
client().prepareIndex("test", "type1", "0").setSource("anotherexistingfield", 1).execute().actionGet();
refresh();
ActionFuture<TermVectorsResponse> termVectors = client().termVectors(
new TermVectorsRequest(indexOrAlias(), "type1", "0").selectedFields(randomBoolean() ? new String[] { "existingfield" } : null)
new TermVectorsRequest(indexOrAlias(), "0").selectedFields(randomBoolean() ? new String[] { "existingfield" } : null)
.termStatistics(true)
.fieldStatistics(true)
);
@ -191,9 +191,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
indexRandom(true, indexBuilders);
for (int i = 0; i < 4; i++) {
TermVectorsResponse resp = client().prepareTermVectors(indexOrAlias(), "type1", String.valueOf(i))
.setSelectedFields("field" + i)
.get();
TermVectorsResponse resp = client().prepareTermVectors(indexOrAlias(), String.valueOf(i)).setSelectedFields("field" + i).get();
assertThat(resp, notNullValue());
assertThat(resp.isExists(), equalTo(true));
assertThat(resp.getIndex(), equalTo("test"));
@ -201,9 +199,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
}
for (int i = 4; i < 6; i++) {
TermVectorsResponse resp = client().prepareTermVectors(indexOrAlias(), "type1", String.valueOf(i))
.setSelectedFields("field" + i)
.get();
TermVectorsResponse resp = client().prepareTermVectors(indexOrAlias(), String.valueOf(i)).setSelectedFields("field" + i).get();
assertThat(resp.getIndex(), equalTo("test"));
assertThat("field" + i + " :", resp.getFields().terms("field" + i), notNullValue());
}
@ -245,7 +241,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
refresh();
}
for (int i = 0; i < 10; i++) {
TermVectorsRequestBuilder resp = client().prepareTermVectors(indexOrAlias(), "type1", Integer.toString(i))
TermVectorsRequestBuilder resp = client().prepareTermVectors(indexOrAlias(), Integer.toString(i))
.setPayloads(true)
.setOffsets(true)
.setPositions(true)
@ -362,7 +358,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
boolean isPositionsRequested = randomBoolean();
String infoString = createInfoString(isPositionsRequested, isOffsetRequested, optionString);
for (int i = 0; i < 10; i++) {
TermVectorsRequestBuilder resp = client().prepareTermVectors("test", "_doc", Integer.toString(i))
TermVectorsRequestBuilder resp = client().prepareTermVectors("test", Integer.toString(i))
.setOffsets(isOffsetRequested)
.setPositions(isPositionsRequested)
.setSelectedFields();
@ -501,7 +497,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
}
for (int i = 0; i < 10; i++) {
TermVectorsResponse response = client().prepareTermVectors("test", "type1", Integer.toString(i))
TermVectorsResponse response = client().prepareTermVectors("test", Integer.toString(i))
.setPayloads(true)
.setOffsets(true)
.setPositions(true)
@ -590,7 +586,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
for (int id = 0; id < content.length; id++) {
Fields[] fields = new Fields[2];
for (int j = 0; j < indexNames.length; j++) {
TermVectorsResponse resp = client().prepareTermVectors(indexNames[j], "type1", String.valueOf(id))
TermVectorsResponse resp = client().prepareTermVectors(indexNames[j], String.valueOf(id))
.setOffsets(true)
.setPositions(true)
.setSelectedFields("field1")
@ -661,7 +657,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
client().prepareIndex("test", "type1", "0").setSource(source).get();
refresh();
TermVectorsResponse response = client().prepareTermVectors(indexOrAlias(), "type1", "0").setSelectedFields("field*").get();
TermVectorsResponse response = client().prepareTermVectors(indexOrAlias(), "0").setSelectedFields("field*").get();
assertThat("Doc doesn't exists but should", response.isExists(), equalTo(true));
assertThat(response.getIndex(), equalTo("test"));
assertThat("All term vectors should have been generated", response.getFields().size(), equalTo(numFields));
@ -692,7 +688,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
for (int i = 0; i < content.length; i++) {
// request tvs from existing document
TermVectorsResponse respExisting = client().prepareTermVectors("test", "type1", String.valueOf(i))
TermVectorsResponse respExisting = client().prepareTermVectors("test", String.valueOf(i))
.setOffsets(true)
.setPositions(true)
.setFieldStatistics(true)
@ -703,7 +699,6 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
// request tvs from artificial document
TermVectorsResponse respArtificial = client().prepareTermVectors()
.setIndex("test")
.setType("type1")
.setRouting(String.valueOf(i)) // ensure we get the stats from the same shard as existing doc
.setDoc(jsonBuilder().startObject().field("field1", content[i]).endObject())
.setOffsets(true)
@ -728,7 +723,6 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
String text = "the quick brown fox jumps over the lazy dog";
TermVectorsResponse resp = client().prepareTermVectors()
.setIndex("test")
.setType("type1")
.setDoc(jsonBuilder().startObject().field("field1", text).endObject())
.setOffsets(true)
.setPositions(true)
@ -798,15 +792,13 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
}
// selected fields not specified
TermVectorsResponse response = client().prepareTermVectors(indexOrAlias(), "type1", "0")
.setPerFieldAnalyzer(perFieldAnalyzer)
.get();
TermVectorsResponse response = client().prepareTermVectors(indexOrAlias(), "0").setPerFieldAnalyzer(perFieldAnalyzer).get();
// should return all fields that have terms vectors, some with overridden analyzer
checkAnalyzedFields(response.getFields(), withTermVectors, perFieldAnalyzer);
// selected fields specified including some not in the mapping
response = client().prepareTermVectors(indexOrAlias(), "type1", "0")
response = client().prepareTermVectors(indexOrAlias(), "0")
.setSelectedFields(selectedFields.toArray(Strings.EMPTY_ARRAY))
.setPerFieldAnalyzer(perFieldAnalyzer)
.get();
@ -848,7 +840,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
assertAcked(prepareCreate("test").addAlias(new Alias("alias")).setSettings(Settings.builder().put("index.refresh_interval", -1)));
ensureGreen();
TermVectorsResponse response = client().prepareTermVectors("test", "type1", "1").get();
TermVectorsResponse response = client().prepareTermVectors("test", "1").get();
assertThat(response.isExists(), equalTo(false));
logger.info("--> index doc 1");
@ -857,12 +849,12 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
// From translog:
// version 0 means ignore version, which is the default
response = client().prepareTermVectors(indexOrAlias(), "type1", "1").setVersion(Versions.MATCH_ANY).get();
response = client().prepareTermVectors(indexOrAlias(), "1").setVersion(Versions.MATCH_ANY).get();
assertThat(response.isExists(), equalTo(true));
assertThat(response.getId(), equalTo("1"));
assertThat(response.getVersion(), equalTo(1L));
response = client().prepareTermVectors(indexOrAlias(), "type1", "1").setVersion(1).get();
response = client().prepareTermVectors(indexOrAlias(), "1").setVersion(1).get();
assertThat(response.isExists(), equalTo(true));
assertThat(response.getId(), equalTo("1"));
assertThat(response.getVersion(), equalTo(1L));
@ -878,13 +870,13 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
refresh();
// version 0 means ignore version, which is the default
response = client().prepareTermVectors(indexOrAlias(), "type1", "1").setVersion(Versions.MATCH_ANY).setRealtime(false).get();
response = client().prepareTermVectors(indexOrAlias(), "1").setVersion(Versions.MATCH_ANY).setRealtime(false).get();
assertThat(response.isExists(), equalTo(true));
assertThat(response.getId(), equalTo("1"));
assertThat(response.getIndex(), equalTo("test"));
assertThat(response.getVersion(), equalTo(1L));
response = client().prepareTermVectors(indexOrAlias(), "type1", "1").setVersion(1).setRealtime(false).get();
response = client().prepareTermVectors(indexOrAlias(), "1").setVersion(1).setRealtime(false).get();
assertThat(response.isExists(), equalTo(true));
assertThat(response.getId(), equalTo("1"));
assertThat(response.getIndex(), equalTo("test"));
@ -903,7 +895,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
// From translog:
// version 0 means ignore version, which is the default
response = client().prepareTermVectors(indexOrAlias(), "type1", "1").setVersion(Versions.MATCH_ANY).get();
response = client().prepareTermVectors(indexOrAlias(), "1").setVersion(Versions.MATCH_ANY).get();
assertThat(response.isExists(), equalTo(true));
assertThat(response.getId(), equalTo("1"));
assertThat(response.getIndex(), equalTo("test"));
@ -916,7 +908,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
// all good
}
response = client().prepareTermVectors(indexOrAlias(), "type1", "1").setVersion(2).get();
response = client().prepareTermVectors(indexOrAlias(), "1").setVersion(2).get();
assertThat(response.isExists(), equalTo(true));
assertThat(response.getId(), equalTo("1"));
assertThat(response.getIndex(), equalTo("test"));
@ -926,7 +918,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
refresh();
// version 0 means ignore version, which is the default
response = client().prepareTermVectors(indexOrAlias(), "type1", "1").setVersion(Versions.MATCH_ANY).setRealtime(false).get();
response = client().prepareTermVectors(indexOrAlias(), "1").setVersion(Versions.MATCH_ANY).setRealtime(false).get();
assertThat(response.isExists(), equalTo(true));
assertThat(response.getId(), equalTo("1"));
assertThat(response.getIndex(), equalTo("test"));
@ -939,7 +931,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
// all good
}
response = client().prepareTermVectors(indexOrAlias(), "type1", "1").setVersion(2).setRealtime(false).get();
response = client().prepareTermVectors(indexOrAlias(), "1").setVersion(2).setRealtime(false).get();
assertThat(response.isExists(), equalTo(true));
assertThat(response.getId(), equalTo("1"));
assertThat(response.getIndex(), equalTo("test"));
@ -969,7 +961,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
TermVectorsResponse response;
for (int i = 0; i < numTerms; i++) {
filterSettings.minWordLength = numTerms - i;
response = client().prepareTermVectors("test", "type1", "1")
response = client().prepareTermVectors("test", "1")
.setSelectedFields("tags")
.setFieldStatistics(true)
.setTermStatistics(true)
@ -1004,7 +996,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
TermVectorsResponse response;
for (int i = 0; i < numTerms; i++) {
filterSettings.maxNumTerms = i + 1;
response = client().prepareTermVectors("test", "type1", "1")
response = client().prepareTermVectors("test", "1")
.setSelectedFields("tags")
.setFieldStatistics(true)
.setTermStatistics(true)
@ -1037,7 +1029,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
TermVectorsResponse response;
for (int i = 0; i < numDocs; i++) {
filterSettings.maxNumTerms = i + 1;
response = client().prepareTermVectors("test", "type1", (numDocs - 1) + "")
response = client().prepareTermVectors("test", (numDocs - 1) + "")
.setSelectedFields("tags")
.setFieldStatistics(true)
.setTermStatistics(true)
@ -1068,7 +1060,6 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
for (Integer shardId : shardIds) {
TermVectorsResponse tvResponse = client().prepareTermVectors()
.setIndex("test")
.setType("type1")
.setPreference("_shards:" + shardId)
.setDoc(jsonBuilder().startObject().field("field1", "random permutation").endObject())
.setFieldStatistics(true)
@ -1132,7 +1123,7 @@ public class GetTermVectorsIT extends AbstractTermVectorsTestCase {
for (int id = 0; id < content.length; id++) {
Fields[] fields = new Fields[2];
for (int j = 0; j < indexNames.length; j++) {
TermVectorsResponse resp = client().prepareTermVectors(indexNames[j], "type1", String.valueOf(id))
TermVectorsResponse resp = client().prepareTermVectors(indexNames[j], String.valueOf(id))
.setOffsets(true)
.setPositions(true)
.setSelectedFields("field1", "field2")

View File

@ -83,7 +83,7 @@ public class MultiTermVectorsIT extends AbstractTermVectorsTestCase {
}
public void testMissingIndexThrowsMissingIndex() throws Exception {
TermVectorsRequestBuilder requestBuilder = client().prepareTermVectors("testX", "typeX", Integer.toString(1));
TermVectorsRequestBuilder requestBuilder = client().prepareTermVectors("testX", Integer.toString(1));
MultiTermVectorsRequestBuilder mtvBuilder = client().prepareMultiTermVectors();
mtvBuilder.add(requestBuilder.request());
MultiTermVectorsResponse response = mtvBuilder.execute().actionGet();
@ -96,7 +96,7 @@ public class MultiTermVectorsIT extends AbstractTermVectorsTestCase {
assertAcked(prepareCreate("test").addAlias(new Alias("alias")).setSettings(Settings.builder().put("index.refresh_interval", -1)));
ensureGreen();
MultiTermVectorsResponse response = client().prepareMultiTermVectors().add(indexOrAlias(), "type1", "1").get();
MultiTermVectorsResponse response = client().prepareMultiTermVectors().add(indexOrAlias(), "1").get();
assertThat(response.getResponses().length, equalTo(1));
assertThat(response.getResponses()[0].getResponse().isExists(), equalTo(false));
@ -106,9 +106,9 @@ public class MultiTermVectorsIT extends AbstractTermVectorsTestCase {
// Version from translog
response = client().prepareMultiTermVectors()
.add(new TermVectorsRequest(indexOrAlias(), "type1", "1").selectedFields("field").version(Versions.MATCH_ANY))
.add(new TermVectorsRequest(indexOrAlias(), "type1", "1").selectedFields("field").version(1))
.add(new TermVectorsRequest(indexOrAlias(), "type1", "1").selectedFields("field").version(2))
.add(new TermVectorsRequest(indexOrAlias(), "1").selectedFields("field").version(Versions.MATCH_ANY))
.add(new TermVectorsRequest(indexOrAlias(), "1").selectedFields("field").version(1))
.add(new TermVectorsRequest(indexOrAlias(), "1").selectedFields("field").version(2))
.get();
assertThat(response.getResponses().length, equalTo(3));
// [0] version doesn't matter, which is the default
@ -130,9 +130,9 @@ public class MultiTermVectorsIT extends AbstractTermVectorsTestCase {
// Version from Lucene index
refresh();
response = client().prepareMultiTermVectors()
.add(new TermVectorsRequest(indexOrAlias(), "type1", "1").selectedFields("field").version(Versions.MATCH_ANY).realtime(false))
.add(new TermVectorsRequest(indexOrAlias(), "type1", "1").selectedFields("field").version(1).realtime(false))
.add(new TermVectorsRequest(indexOrAlias(), "type1", "1").selectedFields("field").version(2).realtime(false))
.add(new TermVectorsRequest(indexOrAlias(), "1").selectedFields("field").version(Versions.MATCH_ANY).realtime(false))
.add(new TermVectorsRequest(indexOrAlias(), "1").selectedFields("field").version(1).realtime(false))
.add(new TermVectorsRequest(indexOrAlias(), "1").selectedFields("field").version(2).realtime(false))
.get();
assertThat(response.getResponses().length, equalTo(3));
// [0] version doesn't matter, which is the default
@ -155,9 +155,9 @@ public class MultiTermVectorsIT extends AbstractTermVectorsTestCase {
// Version from translog
response = client().prepareMultiTermVectors()
.add(new TermVectorsRequest(indexOrAlias(), "type1", "2").selectedFields("field").version(Versions.MATCH_ANY))
.add(new TermVectorsRequest(indexOrAlias(), "type1", "2").selectedFields("field").version(1))
.add(new TermVectorsRequest(indexOrAlias(), "type1", "2").selectedFields("field").version(2))
.add(new TermVectorsRequest(indexOrAlias(), "2").selectedFields("field").version(Versions.MATCH_ANY))
.add(new TermVectorsRequest(indexOrAlias(), "2").selectedFields("field").version(1))
.add(new TermVectorsRequest(indexOrAlias(), "2").selectedFields("field").version(2))
.get();
assertThat(response.getResponses().length, equalTo(3));
// [0] version doesn't matter, which is the default
@ -180,9 +180,9 @@ public class MultiTermVectorsIT extends AbstractTermVectorsTestCase {
// Version from Lucene index
refresh();
response = client().prepareMultiTermVectors()
.add(new TermVectorsRequest(indexOrAlias(), "type1", "2").selectedFields("field").version(Versions.MATCH_ANY))
.add(new TermVectorsRequest(indexOrAlias(), "type1", "2").selectedFields("field").version(1))
.add(new TermVectorsRequest(indexOrAlias(), "type1", "2").selectedFields("field").version(2))
.add(new TermVectorsRequest(indexOrAlias(), "2").selectedFields("field").version(Versions.MATCH_ANY))
.add(new TermVectorsRequest(indexOrAlias(), "2").selectedFields("field").version(1))
.add(new TermVectorsRequest(indexOrAlias(), "2").selectedFields("field").version(2))
.get();
assertThat(response.getResponses().length, equalTo(3));
// [0] version doesn't matter, which is the default

View File

@ -618,15 +618,15 @@ public class SimpleRoutingIT extends OpenSearchIntegTestCase {
}
logger.info("--> verifying term vector with id [1], with routing [0], should succeed");
TermVectorsResponse termVectorsResponse = client().prepareTermVectors(indexOrAlias(), "type1", "1").setRouting(routingValue).get();
TermVectorsResponse termVectorsResponse = client().prepareTermVectors(indexOrAlias(), "1").setRouting(routingValue).get();
assertThat(termVectorsResponse.isExists(), equalTo(true));
assertThat(termVectorsResponse.getId(), equalTo("1"));
try {
client().prepareTermVectors(indexOrAlias(), "type1", "1").get();
client().prepareTermVectors(indexOrAlias(), "1").get();
fail();
} catch (RoutingMissingException e) {
assertThat(e.getMessage(), equalTo("routing is required for [test]/[type1]/[1]"));
assertThat(e.getMessage(), equalTo("routing is required for [test]/[_doc]/[1]"));
}
UpdateResponse updateResponse = client().prepareUpdate(indexOrAlias(), "type1", "1")
@ -668,8 +668,8 @@ public class SimpleRoutingIT extends OpenSearchIntegTestCase {
assertThat(multiGetResponse.getResponses()[1].getFailure().getMessage(), equalTo("routing is required for [test]/[type1]/[2]"));
MultiTermVectorsResponse multiTermVectorsResponse = client().prepareMultiTermVectors()
.add(new TermVectorsRequest(indexOrAlias(), "type1", "1").routing(routingValue))
.add(new TermVectorsRequest(indexOrAlias(), "type1", "2").routing(routingValue))
.add(new TermVectorsRequest(indexOrAlias(), "1").routing(routingValue))
.add(new TermVectorsRequest(indexOrAlias(), "2").routing(routingValue))
.get();
assertThat(multiTermVectorsResponse.getResponses().length, equalTo(2));
assertThat(multiTermVectorsResponse.getResponses()[0].getId(), equalTo("1"));
@ -682,15 +682,15 @@ public class SimpleRoutingIT extends OpenSearchIntegTestCase {
assertThat(multiTermVectorsResponse.getResponses()[1].getResponse().isExists(), equalTo(true));
multiTermVectorsResponse = client().prepareMultiTermVectors()
.add(new TermVectorsRequest(indexOrAlias(), "type1", "1"))
.add(new TermVectorsRequest(indexOrAlias(), "type1", "2"))
.add(new TermVectorsRequest(indexOrAlias(), "1"))
.add(new TermVectorsRequest(indexOrAlias(), "2"))
.get();
assertThat(multiTermVectorsResponse.getResponses().length, equalTo(2));
assertThat(multiTermVectorsResponse.getResponses()[0].getId(), equalTo("1"));
assertThat(multiTermVectorsResponse.getResponses()[0].isFailed(), equalTo(true));
assertThat(
multiTermVectorsResponse.getResponses()[0].getFailure().getCause().getMessage(),
equalTo("routing is required for [test]/[type1]/[1]")
equalTo("routing is required for [test]/[_doc]/[1]")
);
assertThat(multiTermVectorsResponse.getResponses()[0].getResponse(), nullValue());
assertThat(multiTermVectorsResponse.getResponses()[1].getId(), equalTo("2"));
@ -698,7 +698,7 @@ public class SimpleRoutingIT extends OpenSearchIntegTestCase {
assertThat(multiTermVectorsResponse.getResponses()[1].getResponse(), nullValue());
assertThat(
multiTermVectorsResponse.getResponses()[1].getFailure().getCause().getMessage(),
equalTo("routing is required for [test]/[type1]/[2]")
equalTo("routing is required for [test]/[_doc]/[2]")
);
}

View File

@ -148,7 +148,7 @@ public class MoreLikeThisIT extends OpenSearchIntegTestCase {
logger.info("Running moreLikeThis");
SearchResponse response = client().prepareSearch()
.setQuery(new MoreLikeThisQueryBuilder(null, new Item[] { new Item("test", "type1", "1") }).minTermFreq(1).minDocFreq(1))
.setQuery(new MoreLikeThisQueryBuilder(null, new Item[] { new Item("test", "1") }).minTermFreq(1).minDocFreq(1))
.get();
assertHitCount(response, 1L);
}
@ -319,7 +319,7 @@ public class MoreLikeThisIT extends OpenSearchIntegTestCase {
refresh(indexName);
SearchResponse response = client().prepareSearch()
.setQuery(new MoreLikeThisQueryBuilder(null, new Item[] { new Item(aliasName, typeName, "1") }).minTermFreq(1).minDocFreq(1))
.setQuery(new MoreLikeThisQueryBuilder(null, new Item[] { new Item(aliasName, "1") }).minTermFreq(1).minDocFreq(1))
.get();
assertHitCount(response, 2L);
assertThat(response.getHits().getAt(0).getId(), equalTo("3"));
@ -337,11 +337,11 @@ public class MoreLikeThisIT extends OpenSearchIntegTestCase {
assertThat(ensureGreen(), equalTo(ClusterHealthStatus.GREEN));
SearchResponse response = client().prepareSearch()
.setQuery(new MoreLikeThisQueryBuilder(null, new Item[] { new Item("foo", "bar", "1") }))
.setQuery(new MoreLikeThisQueryBuilder(null, new Item[] { new Item("foo", "1") }))
.get();
assertNoFailures(response);
assertThat(response, notNullValue());
response = client().prepareSearch().setQuery(new MoreLikeThisQueryBuilder(null, new Item[] { new Item("foo", "bar", "1") })).get();
response = client().prepareSearch().setQuery(new MoreLikeThisQueryBuilder(null, new Item[] { new Item("foo", "1") })).get();
assertNoFailures(response);
assertThat(response, notNullValue());
}
@ -361,7 +361,7 @@ public class MoreLikeThisIT extends OpenSearchIntegTestCase {
client().admin().indices().prepareRefresh("foo").get();
SearchResponse response = client().prepareSearch()
.setQuery(new MoreLikeThisQueryBuilder(null, new Item[] { new Item("foo", "bar", "1").routing("2") }))
.setQuery(new MoreLikeThisQueryBuilder(null, new Item[] { new Item("foo", "1").routing("2") }))
.get();
assertNoFailures(response);
assertThat(response, notNullValue());
@ -387,7 +387,7 @@ public class MoreLikeThisIT extends OpenSearchIntegTestCase {
.get();
client().admin().indices().prepareRefresh("foo").get();
SearchResponse response = client().prepareSearch()
.setQuery(new MoreLikeThisQueryBuilder(null, new Item[] { new Item("foo", "bar", "1").routing("4000") }))
.setQuery(new MoreLikeThisQueryBuilder(null, new Item[] { new Item("foo", "1").routing("4000") }))
.get();
assertNoFailures(response);
assertThat(response, notNullValue());
@ -530,7 +530,7 @@ public class MoreLikeThisIT extends OpenSearchIntegTestCase {
index("test", "_doc", "2", "text", "lucene release");
refresh();
Item item = new Item("test", "_doc", "1");
Item item = new Item("test", "1");
QueryBuilder query = QueryBuilders.moreLikeThisQuery(new String[] { "alias" }, null, new Item[] { item })
.minTermFreq(1)
.minDocFreq(1);
@ -588,7 +588,7 @@ public class MoreLikeThisIT extends OpenSearchIntegTestCase {
response = client().prepareSearch()
.setQuery(
new MoreLikeThisQueryBuilder(null, new Item[] { new Item("test", "type1", "2") }).minTermFreq(1)
new MoreLikeThisQueryBuilder(null, new Item[] { new Item("test", "2") }).minTermFreq(1)
.minDocFreq(1)
.include(true)
.minimumShouldMatch("0%")
@ -635,7 +635,7 @@ public class MoreLikeThisIT extends OpenSearchIntegTestCase {
indexRandom(true, builders);
logger.info("Running MoreLikeThis");
Item[] items = new Item[] { new Item(null, null, "1") };
Item[] items = new Item[] { new Item(null, "1") };
MoreLikeThisQueryBuilder queryBuilder = QueryBuilders.moreLikeThisQuery(new String[] { "text" }, null, items)
.include(true)
.minTermFreq(1)
@ -667,7 +667,7 @@ public class MoreLikeThisIT extends OpenSearchIntegTestCase {
for (int i = 0; i < maxIters; i++) {
int max_query_terms = randomIntBetween(1, values.length);
logger.info("Running More Like This with max_query_terms = {}", max_query_terms);
MoreLikeThisQueryBuilder mltQuery = moreLikeThisQuery(new String[] { "text" }, null, new Item[] { new Item(null, null, "0") })
MoreLikeThisQueryBuilder mltQuery = moreLikeThisQuery(new String[] { "text" }, null, new Item[] { new Item(null, "0") })
.minTermFreq(1)
.minDocFreq(1)
.maxQueryTerms(max_query_terms)
@ -731,7 +731,7 @@ public class MoreLikeThisIT extends OpenSearchIntegTestCase {
logger.info("Checking the document matches ...");
// routing to ensure we hit the shard with the doc
MoreLikeThisQueryBuilder mltQuery = moreLikeThisQuery(new Item[] { new Item("test", "type1", doc).routing("0") }).minTermFreq(0)
MoreLikeThisQueryBuilder mltQuery = moreLikeThisQuery(new Item[] { new Item("test", doc).routing("0") }).minTermFreq(0)
.minDocFreq(0)
.maxQueryTerms(100)
.minimumShouldMatch("100%"); // strict all terms must match!
@ -740,6 +740,7 @@ public class MoreLikeThisIT extends OpenSearchIntegTestCase {
assertHitCount(response, 1);
}
@AwaitsFix(bugUrl = "https://github.com/opensearch-project/OpenSearch/issues/2107")
public void testMoreLikeThisMalformedArtificialDocs() throws Exception {
logger.info("Creating the index ...");
assertAcked(prepareCreate("test").addMapping("type1", "text", "type=text,analyzer=whitespace", "date", "type=date"));
@ -757,19 +758,17 @@ public class MoreLikeThisIT extends OpenSearchIntegTestCase {
.field("text", "Hello World!")
.field("date", "this is not a date!")
.endObject();
MoreLikeThisQueryBuilder mltQuery = moreLikeThisQuery(new Item[] { new Item("test", "type1", malformedFieldDoc) }).minTermFreq(0)
MoreLikeThisQueryBuilder mltQuery = moreLikeThisQuery(new Item[] { new Item("test", malformedFieldDoc) }).minTermFreq(0)
.minDocFreq(0)
.minimumShouldMatch("0%");
SearchResponse response = client().prepareSearch("test").setTypes("type1").setQuery(mltQuery).get();
SearchResponse response = client().prepareSearch("test").setQuery(mltQuery).get();
assertSearchResponse(response);
assertHitCount(response, 0);
logger.info("Checking with an empty document ...");
XContentBuilder emptyDoc = jsonBuilder().startObject().endObject();
mltQuery = moreLikeThisQuery(null, new Item[] { new Item("test", "type1", emptyDoc) }).minTermFreq(0)
.minDocFreq(0)
.minimumShouldMatch("0%");
response = client().prepareSearch("test").setTypes("type1").setQuery(mltQuery).get();
mltQuery = moreLikeThisQuery(null, new Item[] { new Item("test", emptyDoc) }).minTermFreq(0).minDocFreq(0).minimumShouldMatch("0%");
response = client().prepareSearch("test").setQuery(mltQuery).get();
assertSearchResponse(response);
assertHitCount(response, 0);
@ -778,10 +777,10 @@ public class MoreLikeThisIT extends OpenSearchIntegTestCase {
.field("text", "Hello World!")
.field("date", "1000-01-01") // should be properly parsed but ignored ...
.endObject();
mltQuery = moreLikeThisQuery(null, new Item[] { new Item("test", "type1", normalDoc) }).minTermFreq(0)
mltQuery = moreLikeThisQuery(null, new Item[] { new Item("test", normalDoc) }).minTermFreq(0)
.minDocFreq(0)
.minimumShouldMatch("100%"); // strict all terms must match but date is ignored
response = client().prepareSearch("test").setTypes("type1").setQuery(mltQuery).get();
response = client().prepareSearch("test").setQuery(mltQuery).get();
assertSearchResponse(response);
assertHitCount(response, 1);
}
@ -806,7 +805,7 @@ public class MoreLikeThisIT extends OpenSearchIntegTestCase {
indexRandom(true, builders);
logger.info("First check the document matches all indexed docs.");
MoreLikeThisQueryBuilder mltQuery = moreLikeThisQuery(new Item[] { new Item("test", "type1", doc) }).minTermFreq(0)
MoreLikeThisQueryBuilder mltQuery = moreLikeThisQuery(new Item[] { new Item("test", doc) }).minTermFreq(0)
.minDocFreq(0)
.maxQueryTerms(100)
.minimumShouldMatch("0%");
@ -817,8 +816,8 @@ public class MoreLikeThisIT extends OpenSearchIntegTestCase {
logger.info("Now check like this doc, but ignore one doc in the index, then two and so on...");
List<Item> docs = new ArrayList<>(numFields);
for (int i = 0; i < numFields; i++) {
docs.add(new Item("test", "type1", i + ""));
mltQuery = moreLikeThisQuery(null, new Item[] { new Item("test", "type1", doc) }).unlike(docs.toArray(new Item[docs.size()]))
docs.add(new Item("test", i + ""));
mltQuery = moreLikeThisQuery(null, new Item[] { new Item("test", doc) }).unlike(docs.toArray(new Item[docs.size()]))
.minTermFreq(0)
.minDocFreq(0)
.maxQueryTerms(100)
@ -868,7 +867,7 @@ public class MoreLikeThisIT extends OpenSearchIntegTestCase {
client().prepareIndex("index", "type", "3").setRouting("4").setSource("text", "this is yet another document").get();
refresh("index");
Item item = new Item("index", "type", "2").routing("1");
Item item = new Item("index", "2").routing("1");
MoreLikeThisQueryBuilder moreLikeThisQueryBuilder = new MoreLikeThisQueryBuilder(
new String[] { "text" },
null,
@ -926,10 +925,7 @@ public class MoreLikeThisIT extends OpenSearchIntegTestCase {
.setQuery(
new MoreLikeThisQueryBuilder(
null,
new Item[] {
new Item("test", "1").routing("1"),
new Item("test", "type1", "2"),
new Item("test", "type1", "3") }
new Item[] { new Item("test", "1").routing("1"), new Item("test", "2"), new Item("test", "3") }
).minTermFreq(1).minDocFreq(1)
)
.get()
@ -937,7 +933,7 @@ public class MoreLikeThisIT extends OpenSearchIntegTestCase {
Throwable cause = exception.getCause();
assertThat(cause, instanceOf(RoutingMissingException.class));
assertThat(cause.getMessage(), equalTo("routing is required for [test]/[type1]/[2]"));
assertThat(cause.getMessage(), equalTo("routing is required for [test]/[_doc]/[2]"));
}
}
}

View File

@ -357,7 +357,7 @@ public class SimpleValidateQueryIT extends OpenSearchIntegTestCase {
assertExplanation(QueryBuilders.fuzzyQuery("field", "jump"), containsString("(field:jumps)^0.75"), true);
// more like this queries
Item[] items = new Item[] { new Item(null, null, "1") };
Item[] items = new Item[] { new Item(null, "1") };
assertExplanation(
QueryBuilders.moreLikeThisQuery(new String[] { "field" }, null, items)
.include(true)

View File

@ -35,6 +35,7 @@ package org.opensearch.action;
import org.opensearch.OpenSearchException;
import org.opensearch.common.io.stream.StreamInput;
import org.opensearch.common.io.stream.StreamOutput;
import org.opensearch.index.mapper.MapperService;
import org.opensearch.rest.RestStatus;
import java.io.IOException;
@ -46,6 +47,10 @@ public class RoutingMissingException extends OpenSearchException {
private final String id;
public RoutingMissingException(String index, String id) {
this(index, MapperService.SINGLE_MAPPING_NAME, id);
}
public RoutingMissingException(String index, String type, String id) {
super("routing is required for [" + index + "]/[" + type + "]/[" + id + "]");
Objects.requireNonNull(index, "index must not be null");

View File

@ -72,16 +72,6 @@ public class MultiTermVectorsItemResponse implements Writeable {
return response.getIndex();
}
/**
* The type of the document.
*/
public String getType() {
if (failure != null) {
return failure.getType();
}
return response.getType();
}
/**
* The id of the document.
*/

View File

@ -42,7 +42,6 @@ import org.opensearch.common.Nullable;
import org.opensearch.common.io.stream.StreamInput;
import org.opensearch.common.io.stream.StreamOutput;
import org.opensearch.common.xcontent.XContentParser;
import org.opensearch.index.mapper.MapperService;
import java.io.IOException;
import java.util.ArrayList;
@ -80,8 +79,8 @@ public class MultiTermVectorsRequest extends ActionRequest
return this;
}
public MultiTermVectorsRequest add(String index, @Nullable String type, String id) {
requests.add(new TermVectorsRequest(index, type, id));
public MultiTermVectorsRequest add(String index, String id) {
requests.add(new TermVectorsRequest(index, id));
return this;
}
@ -132,9 +131,6 @@ public class MultiTermVectorsRequest extends ActionRequest
throw new IllegalArgumentException("docs array element should include an object");
}
TermVectorsRequest termVectorsRequest = new TermVectorsRequest(template);
if (termVectorsRequest.type() == null) {
termVectorsRequest.type(MapperService.SINGLE_MAPPING_NAME);
}
TermVectorsRequest.parseRequest(termVectorsRequest, parser);
add(termVectorsRequest);
}

View File

@ -34,7 +34,6 @@ package org.opensearch.action.termvectors;
import org.opensearch.action.ActionRequestBuilder;
import org.opensearch.client.OpenSearchClient;
import org.opensearch.common.Nullable;
public class MultiTermVectorsRequestBuilder extends ActionRequestBuilder<MultiTermVectorsRequest, MultiTermVectorsResponse> {
@ -42,16 +41,16 @@ public class MultiTermVectorsRequestBuilder extends ActionRequestBuilder<MultiTe
super(client, action, new MultiTermVectorsRequest());
}
public MultiTermVectorsRequestBuilder add(String index, @Nullable String type, Iterable<String> ids) {
public MultiTermVectorsRequestBuilder add(String index, Iterable<String> ids) {
for (String id : ids) {
request.add(index, type, id);
request.add(index, id);
}
return this;
}
public MultiTermVectorsRequestBuilder add(String index, @Nullable String type, String... ids) {
public MultiTermVectorsRequestBuilder add(String index, String... ids) {
for (String id : ids) {
request.add(index, type, id);
request.add(index, id);
}
return this;
}

View File

@ -33,6 +33,7 @@
package org.opensearch.action.termvectors;
import org.opensearch.OpenSearchException;
import org.opensearch.Version;
import org.opensearch.action.ActionResponse;
import org.opensearch.common.io.stream.StreamInput;
import org.opensearch.common.io.stream.StreamOutput;
@ -51,20 +52,21 @@ public class MultiTermVectorsResponse extends ActionResponse implements Iterable
*/
public static class Failure implements Writeable {
private final String index;
private final String type;
private final String id;
private final Exception cause;
public Failure(String index, String type, String id, Exception cause) {
public Failure(String index, String id, Exception cause) {
this.index = index;
this.type = type;
this.id = id;
this.cause = cause;
}
public Failure(StreamInput in) throws IOException {
index = in.readString();
type = in.readOptionalString();
if (in.getVersion().before(Version.V_2_0_0)) {
// ignore removed type from pre-2.0.0 versions
in.readOptionalString();
}
id = in.readString();
cause = in.readException();
}
@ -76,16 +78,6 @@ public class MultiTermVectorsResponse extends ActionResponse implements Iterable
return this.index;
}
/**
* The type of the action.
*
* @deprecated Types are in the process of being removed.
*/
@Deprecated
public String getType() {
return type;
}
/**
* The id of the action.
*/
@ -103,7 +95,10 @@ public class MultiTermVectorsResponse extends ActionResponse implements Iterable
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(index);
out.writeOptionalString(type);
if (out.getVersion().before(Version.V_2_0_0)) {
// types no longer supported
out.writeOptionalString(null);
}
out.writeString(id);
out.writeException(cause);
}
@ -138,7 +133,6 @@ public class MultiTermVectorsResponse extends ActionResponse implements Iterable
builder.startObject();
Failure failure = response.getFailure();
builder.field(Fields._INDEX, failure.getIndex());
builder.field(Fields._TYPE, failure.getType());
builder.field(Fields._ID, failure.getId());
OpenSearchException.generateFailureXContent(builder, params, failure.getCause(), true);
builder.endObject();

View File

@ -34,6 +34,7 @@ package org.opensearch.action.termvectors;
import org.opensearch.LegacyESVersion;
import org.opensearch.OpenSearchParseException;
import org.opensearch.Version;
import org.opensearch.action.ActionRequestValidationException;
import org.opensearch.action.RealtimeRequest;
import org.opensearch.action.ValidateActions;
@ -45,7 +46,6 @@ import org.opensearch.common.bytes.BytesArray;
import org.opensearch.common.bytes.BytesReference;
import org.opensearch.common.io.stream.StreamInput;
import org.opensearch.common.io.stream.StreamOutput;
import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.common.lucene.uid.Versions;
import org.opensearch.common.util.set.Sets;
import org.opensearch.common.xcontent.XContentBuilder;
@ -53,7 +53,7 @@ import org.opensearch.common.xcontent.XContentHelper;
import org.opensearch.common.xcontent.XContentParser;
import org.opensearch.common.xcontent.XContentType;
import org.opensearch.index.VersionType;
import org.opensearch.rest.action.document.RestTermVectorsAction;
import org.opensearch.index.mapper.MapperService;
import java.io.IOException;
import java.util.ArrayList;
@ -71,14 +71,11 @@ import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder;
* Request returning the term vector (doc frequency, positions, offsets) for a
* document.
* <p>
* Note, the {@link #index()}, {@link #type(String)} and {@link #id(String)} are
* Note, the {@link #index()}, and {@link #id(String)} are
* required.
*/
public class TermVectorsRequest extends SingleShardRequest<TermVectorsRequest> implements RealtimeRequest {
private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(TermVectorsRequest.class);
private static final ParseField INDEX = new ParseField("_index");
private static final ParseField TYPE = new ParseField("_type");
private static final ParseField ID = new ParseField("_id");
private static final ParseField ROUTING = new ParseField("routing");
private static final ParseField VERSION = new ParseField("version");
@ -91,8 +88,6 @@ public class TermVectorsRequest extends SingleShardRequest<TermVectorsRequest> i
private static final ParseField FILTER = new ParseField("filter");
private static final ParseField DOC = new ParseField("doc");
private String type;
private String id;
private BytesReference doc;
@ -176,7 +171,10 @@ public class TermVectorsRequest extends SingleShardRequest<TermVectorsRequest> i
TermVectorsRequest(StreamInput in) throws IOException {
super(in);
type = in.readString();
if (in.getVersion().before(Version.V_2_0_0)) {
// types no longer supported; ignore for BWC
in.readString();
}
id = in.readString();
if (in.readBoolean()) {
@ -218,24 +216,20 @@ public class TermVectorsRequest extends SingleShardRequest<TermVectorsRequest> i
/**
* Constructs a new term vector request for a document that will be fetch
* from the provided index. Use {@link #type(String)} and
* {@link #id(String)} to specify the document to load.
* from the provided index. Use {@link #id(String)} to specify the document to load.
*/
public TermVectorsRequest(String index, String type, String id) {
public TermVectorsRequest(String index, String id) {
super(index);
this.id = id;
this.type = type;
}
/**
* Constructs a new term vector request for a document that will be fetch
* from the provided index. Use {@link #type(String)} and
* {@link #id(String)} to specify the document to load.
* from the provided index. Use {@link #id(String)} to specify the document to load.
*/
public TermVectorsRequest(TermVectorsRequest other) {
super(other.index());
this.id = other.id();
this.type = other.type();
if (other.doc != null) {
this.doc = new BytesArray(other.doc().toBytesRef(), true);
this.xContentType = other.xContentType;
@ -258,7 +252,6 @@ public class TermVectorsRequest extends SingleShardRequest<TermVectorsRequest> i
public TermVectorsRequest(MultiGetRequest.Item item) {
super(item.index());
this.id = item.id();
this.type = item.type();
this.selectedFields(item.storedFields());
this.routing(item.routing());
}
@ -267,21 +260,6 @@ public class TermVectorsRequest extends SingleShardRequest<TermVectorsRequest> i
return flagsEnum;
}
/**
* Sets the type of document to get the term vector for.
*/
public TermVectorsRequest type(String type) {
this.type = type;
return this;
}
/**
* Returns the type of document to get the term vector for.
*/
public String type() {
return type;
}
/**
* Returns the id of document the term vector is requested for.
*/
@ -535,9 +513,6 @@ public class TermVectorsRequest extends SingleShardRequest<TermVectorsRequest> i
@Override
public ActionRequestValidationException validate() {
ActionRequestValidationException validationException = super.validateNonNullIndex();
if (type == null) {
validationException = ValidateActions.addValidationError("type is missing", validationException);
}
if (id == null && doc == null) {
validationException = ValidateActions.addValidationError("id or doc is missing", validationException);
}
@ -547,7 +522,10 @@ public class TermVectorsRequest extends SingleShardRequest<TermVectorsRequest> i
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeString(type);
if (out.getVersion().before(Version.V_2_0_0)) {
// types no longer supported; send "_doc" for bwc
out.writeString(MapperService.SINGLE_MAPPING_NAME);
}
out.writeString(id);
out.writeBoolean(doc != null);
@ -631,9 +609,6 @@ public class TermVectorsRequest extends SingleShardRequest<TermVectorsRequest> i
} else if (INDEX.match(currentFieldName, parser.getDeprecationHandler())) {
// the following is important for multi request parsing.
termVectorsRequest.index = parser.text();
} else if (TYPE.match(currentFieldName, parser.getDeprecationHandler())) {
termVectorsRequest.type = parser.text();
deprecationLogger.deprecate("termvectors_with_types", RestTermVectorsAction.TYPES_DEPRECATION_MESSAGE);
} else if (ID.match(currentFieldName, parser.getDeprecationHandler())) {
if (termVectorsRequest.doc != null) {
throw new OpenSearchParseException(

View File

@ -54,11 +54,10 @@ public class TermVectorsRequestBuilder extends ActionRequestBuilder<TermVectorsR
/**
* Constructs a new term vector request builder for a document that will be fetch
* from the provided index. Use {@code index}, {@code type} and
* {@code id} to specify the document to load.
* from the provided index. Use {@code index}, and {@code id} to specify the document to load.
*/
public TermVectorsRequestBuilder(OpenSearchClient client, TermVectorsAction action, String index, String type, String id) {
super(client, action, new TermVectorsRequest(index, type, id));
public TermVectorsRequestBuilder(OpenSearchClient client, TermVectorsAction action, String index, String id) {
super(client, action, new TermVectorsRequest(index, id));
}
/**
@ -69,14 +68,6 @@ public class TermVectorsRequestBuilder extends ActionRequestBuilder<TermVectorsR
return this;
}
/**
* Sets the type of the document.
*/
public TermVectorsRequestBuilder setType(String type) {
request.type(type);
return this;
}
/**
* Sets the id of the document.
*/

View File

@ -40,6 +40,7 @@ import org.apache.lucene.search.BoostAttribute;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.CharsRefBuilder;
import org.opensearch.Version;
import org.opensearch.action.ActionResponse;
import org.opensearch.common.bytes.BytesArray;
import org.opensearch.common.bytes.BytesReference;
@ -49,6 +50,7 @@ import org.opensearch.common.io.stream.StreamOutput;
import org.opensearch.common.unit.TimeValue;
import org.opensearch.common.xcontent.ToXContentObject;
import org.opensearch.common.xcontent.XContentBuilder;
import org.opensearch.index.mapper.MapperService;
import java.io.IOException;
import java.util.Collections;
@ -77,7 +79,6 @@ public class TermVectorsResponse extends ActionResponse implements ToXContentObj
public static final String END_OFFSET = "end_offset";
public static final String PAYLOAD = "payload";
public static final String _INDEX = "_index";
public static final String _TYPE = "_type";
public static final String _ID = "_id";
public static final String _VERSION = "_version";
public static final String FOUND = "found";
@ -89,7 +90,6 @@ public class TermVectorsResponse extends ActionResponse implements ToXContentObj
private BytesReference termVectors;
private BytesReference headerRef;
private String index;
private String type;
private String id;
private long docVersion;
private boolean exists = false;
@ -104,9 +104,8 @@ public class TermVectorsResponse extends ActionResponse implements ToXContentObj
int[] currentEndOffset = new int[0];
BytesReference[] currentPayloads = new BytesReference[0];
public TermVectorsResponse(String index, String type, String id) {
public TermVectorsResponse(String index, String id) {
this.index = index;
this.type = type;
this.id = id;
}
@ -114,7 +113,10 @@ public class TermVectorsResponse extends ActionResponse implements ToXContentObj
TermVectorsResponse(StreamInput in) throws IOException {
index = in.readString();
type = in.readString();
if (in.getVersion().before(Version.V_2_0_0)) {
// ignore deprecated/removed type
in.readString();
}
id = in.readString();
docVersion = in.readVLong();
exists = in.readBoolean();
@ -129,7 +131,10 @@ public class TermVectorsResponse extends ActionResponse implements ToXContentObj
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeString(index);
out.writeString(type);
if (out.getVersion().before(Version.V_2_0_0)) {
// send empty array to previous version since types are no longer supported
out.writeString(MapperService.SINGLE_MAPPING_NAME);
}
out.writeString(id);
out.writeVLong(docVersion);
final boolean docExists = isExists();
@ -180,11 +185,9 @@ public class TermVectorsResponse extends ActionResponse implements ToXContentObj
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
assert index != null;
assert type != null;
assert id != null;
builder.startObject();
builder.field(FieldStrings._INDEX, index);
builder.field(FieldStrings._TYPE, type);
if (!isArtificial()) {
builder.field(FieldStrings._ID, id);
}
@ -420,10 +423,6 @@ public class TermVectorsResponse extends ActionResponse implements ToXContentObj
return index;
}
public String getType() {
return type;
}
public String getId() {
return id;
}

View File

@ -92,7 +92,6 @@ public class TransportMultiTermVectorsAction extends HandledTransportAction<Mult
null,
new MultiTermVectorsResponse.Failure(
termVectorsRequest.index(),
termVectorsRequest.type(),
termVectorsRequest.id(),
new IndexNotFoundException(termVectorsRequest.index())
)
@ -108,9 +107,8 @@ public class TransportMultiTermVectorsAction extends HandledTransportAction<Mult
null,
new MultiTermVectorsResponse.Failure(
concreteSingleIndex,
termVectorsRequest.type(),
termVectorsRequest.id(),
new RoutingMissingException(concreteSingleIndex, termVectorsRequest.type(), termVectorsRequest.id())
new RoutingMissingException(concreteSingleIndex, termVectorsRequest.id())
)
)
);
@ -166,12 +164,7 @@ public class TransportMultiTermVectorsAction extends HandledTransportAction<Mult
shardRequest.locations.get(i),
new MultiTermVectorsItemResponse(
null,
new MultiTermVectorsResponse.Failure(
shardRequest.index(),
termVectorsRequest.type(),
termVectorsRequest.id(),
e
)
new MultiTermVectorsResponse.Failure(shardRequest.index(), termVectorsRequest.id(), e)
)
);
}

View File

@ -117,16 +117,15 @@ public class TransportShardMultiTermsVectorAction extends TransportSingleShardAc
} else {
logger.debug(
() -> new ParameterizedMessage(
"{} failed to execute multi term vectors for [{}]/[{}]",
"{} failed to execute multi term vectors for [{}]",
shardId,
termVectorsRequest.type(),
termVectorsRequest.id()
),
e
);
response.add(
request.locations.get(i),
new MultiTermVectorsResponse.Failure(request.index(), termVectorsRequest.type(), termVectorsRequest.id(), e)
new MultiTermVectorsResponse.Failure(request.index(), termVectorsRequest.id(), e)
);
}
}

View File

@ -107,7 +107,7 @@ public class TransportTermVectorsAction extends TransportSingleShardAction<TermV
request.request().routing(state.metadata().resolveIndexRouting(request.request().routing(), request.request().index()));
// Fail fast on the node that received the request.
if (request.request().routing() == null && state.getMetadata().routingRequired(request.concreteIndex())) {
throw new RoutingMissingException(request.concreteIndex(), request.request().type(), request.request().id());
throw new RoutingMissingException(request.concreteIndex(), request.request().id());
}
}

View File

@ -375,10 +375,9 @@ public interface Client extends OpenSearchClient, Releasable {
* Builder for the term vector request.
*
* @param index The index to load the document from
* @param type The type of the document
* @param id The id of the document
*/
TermVectorsRequestBuilder prepareTermVectors(String index, String type, String id);
TermVectorsRequestBuilder prepareTermVectors(String index, String id);
/**
* Multi get term vectors.

View File

@ -612,8 +612,8 @@ public abstract class AbstractClient implements Client {
}
@Override
public TermVectorsRequestBuilder prepareTermVectors(String index, String type, String id) {
return new TermVectorsRequestBuilder(this, TermVectorsAction.INSTANCE, index, type, id);
public TermVectorsRequestBuilder prepareTermVectors(String index, String id) {
return new TermVectorsRequestBuilder(this, TermVectorsAction.INSTANCE, index, id);
}
@Override

View File

@ -39,6 +39,7 @@ import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.Query;
import org.opensearch.OpenSearchParseException;
import org.opensearch.ExceptionsHelper;
import org.opensearch.Version;
import org.opensearch.action.RoutingMissingException;
import org.opensearch.action.termvectors.MultiTermVectorsItemResponse;
import org.opensearch.action.termvectors.MultiTermVectorsRequest;
@ -54,7 +55,6 @@ import org.opensearch.common.bytes.BytesReference;
import org.opensearch.common.io.stream.StreamInput;
import org.opensearch.common.io.stream.StreamOutput;
import org.opensearch.common.io.stream.Writeable;
import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.common.lucene.search.MoreLikeThisQuery;
import org.opensearch.common.lucene.search.XMoreLikeThis;
import org.opensearch.common.lucene.uid.Versions;
@ -67,7 +67,6 @@ import org.opensearch.index.VersionType;
import org.opensearch.index.mapper.IdFieldMapper;
import org.opensearch.index.mapper.KeywordFieldMapper.KeywordFieldType;
import org.opensearch.index.mapper.MappedFieldType;
import org.opensearch.index.mapper.MapperService;
import org.opensearch.index.mapper.TextFieldMapper.TextFieldType;
import java.io.IOException;
@ -81,7 +80,6 @@ import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Stream;
import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder;
@ -92,7 +90,6 @@ import static org.opensearch.common.xcontent.XContentFactory.jsonBuilder;
*/
public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQueryBuilder> {
public static final String NAME = "more_like_this";
private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(MoreLikeThisQueryBuilder.class);
static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Types are deprecated in [more_like_this] "
+ "queries. The type should no longer be specified in the [like] and [unlike] sections.";
@ -128,7 +125,6 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
private static final ParseField FAIL_ON_UNSUPPORTED_FIELD = new ParseField("fail_on_unsupported_field");
private static final ParseField INDEX = new ParseField("_index");
private static final ParseField TYPE = new ParseField("_type");
private static final ParseField ID = new ParseField("_id");
public static final ParseField DOC = new ParseField("doc");
private static final ParseField PER_FIELD_ANALYZER = new ParseField("per_field_analyzer");
@ -168,7 +164,6 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
public static final Item[] EMPTY_ARRAY = new Item[0];
private String index;
private String type;
private String id;
private BytesReference doc;
private XContentType xContentType;
@ -185,7 +180,6 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
throw new IllegalArgumentException("Item requires either id or doc to be non-null");
}
this.index = copy.index;
this.type = copy.type;
this.id = copy.id;
this.routing = copy.routing;
this.doc = copy.doc;
@ -225,51 +219,15 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
this.xContentType = doc.contentType();
}
/**
* Constructor for a given item / document request
*
* @param index the index where the document is located
* @param type the type of the document
* @param id and its id
*
* @deprecated Types are in the process of being removed, use {@link Item(String, String)} instead.
*/
@Deprecated
public Item(@Nullable String index, @Nullable String type, String id) {
if (id == null) {
throw new IllegalArgumentException("Item requires id to be non-null");
}
this.index = index;
this.type = type;
this.id = id;
}
/**
* Constructor for an artificial document request, that is not present in the index.
*
* @param index the index to be used for parsing the doc
* @param type the type to be used for parsing the doc
* @param doc the document specification
*
* @deprecated Types are in the process of being removed, use {@link Item(String, XContentBuilder)} instead.
*/
@Deprecated
public Item(@Nullable String index, @Nullable String type, XContentBuilder doc) {
if (doc == null) {
throw new IllegalArgumentException("Item requires doc to be non-null");
}
this.index = index;
this.type = type;
this.doc = BytesReference.bytes(doc);
this.xContentType = doc.contentType();
}
/**
* Read from a stream.
*/
Item(StreamInput in) throws IOException {
index = in.readOptionalString();
type = in.readOptionalString();
if (in.getVersion().before(Version.V_2_0_0)) {
// types no longer supported so ignore
in.readOptionalString();
}
if (in.readBoolean()) {
doc = (BytesReference) in.readGenericValue();
xContentType = in.readEnum(XContentType.class);
@ -286,7 +244,10 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeOptionalString(index);
out.writeOptionalString(type);
if (out.getVersion().before(Version.V_2_0_0)) {
// types not supported so send an empty array to previous versions
out.writeOptionalString(null);
}
out.writeBoolean(doc != null);
if (doc != null) {
out.writeGenericValue(doc);
@ -310,23 +271,6 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
return this;
}
/**
* @deprecated Types are in the process of being removed.
*/
@Deprecated
public String type() {
return type;
}
/**
* @deprecated Types are in the process of being removed.
*/
@Deprecated
public Item type(String type) {
this.type = type;
return this;
}
public String id() {
return id;
}
@ -391,7 +335,7 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
* Convert this to a {@link TermVectorsRequest} for fetching the terms of the document.
*/
TermVectorsRequest toTermVectorsRequest() {
TermVectorsRequest termVectorsRequest = new TermVectorsRequest(index, type, id).selectedFields(fields)
TermVectorsRequest termVectorsRequest = new TermVectorsRequest(index, id).selectedFields(fields)
.routing(routing)
.version(version)
.versionType(versionType)
@ -421,8 +365,6 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
} else if (currentFieldName != null) {
if (INDEX.match(currentFieldName, parser.getDeprecationHandler())) {
item.index = parser.text();
} else if (TYPE.match(currentFieldName, parser.getDeprecationHandler())) {
item.type = parser.text();
} else if (ID.match(currentFieldName, parser.getDeprecationHandler())) {
item.id = parser.text();
} else if (DOC.match(currentFieldName, parser.getDeprecationHandler())) {
@ -468,9 +410,6 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
if (this.index != null) {
builder.field(INDEX.getPreferredName(), this.index);
}
if (this.type != null) {
builder.field(TYPE.getPreferredName(), this.type);
}
if (this.id != null) {
builder.field(ID.getPreferredName(), this.id);
}
@ -511,7 +450,7 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
@Override
public int hashCode() {
return Objects.hash(index, type, id, doc, Arrays.hashCode(fields), perFieldAnalyzer, routing, version, versionType);
return Objects.hash(index, id, doc, Arrays.hashCode(fields), perFieldAnalyzer, routing, version, versionType);
}
@Override
@ -520,7 +459,6 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
if (!(o instanceof Item)) return false;
Item other = (Item) o;
return Objects.equals(index, other.index)
&& Objects.equals(type, other.type)
&& Objects.equals(id, other.id)
&& Objects.equals(doc, other.doc)
&& Arrays.equals(fields, other.fields) // otherwise we are comparing pointers
@ -973,16 +911,9 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
moreLikeThisQueryBuilder.stopWords(stopWords);
}
if (moreLikeThisQueryBuilder.isTypeless() == false) {
deprecationLogger.deprecate("more_like_this_query_with_types", TYPES_DEPRECATION_MESSAGE);
}
return moreLikeThisQueryBuilder;
}
public boolean isTypeless() {
return Stream.concat(Arrays.stream(likeItems), Arrays.stream(unlikeItems)).allMatch(item -> item.type == null);
}
private static void parseLikeField(XContentParser parser, List<String> texts, List<Item> items) throws IOException {
if (parser.currentToken().isValue()) {
texts.add(parser.text());
@ -1150,9 +1081,6 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
if (item.index() == null) {
item.index(context.index().getName());
}
if (item.type() == null) {
item.type(MapperService.SINGLE_MAPPING_NAME);
}
// default fields if not present but don't override for artificial docs
if ((item.fields() == null || item.fields().length == 0) && item.doc() == null) {
if (useDefaultField) {

View File

@ -93,11 +93,7 @@ public class TermVectorsService {
static TermVectorsResponse getTermVectors(IndexShard indexShard, TermVectorsRequest request, LongSupplier nanoTimeSupplier) {
final long startTime = nanoTimeSupplier.getAsLong();
final TermVectorsResponse termVectorsResponse = new TermVectorsResponse(
indexShard.shardId().getIndex().getName(),
request.type(),
request.id()
);
final TermVectorsResponse termVectorsResponse = new TermVectorsResponse(indexShard.shardId().getIndex().getName(), request.id());
final Term uidTerm = new Term(IdFieldMapper.NAME, Uid.encodeId(request.id()));
Fields termVectorsByField = null;
@ -110,8 +106,9 @@ public class TermVectorsService {
try (
Engine.GetResult get = indexShard.get(
new Engine.Get(request.realtime(), false, request.type(), request.id(), uidTerm).version(request.version())
.versionType(request.versionType())
new Engine.Get(request.realtime(), false, MapperService.SINGLE_MAPPING_NAME, request.id(), uidTerm).version(
request.version()
).versionType(request.versionType())
);
Engine.Searcher searcher = indexShard.acquireSearcher("term_vector")
) {
@ -238,7 +235,7 @@ public class TermVectorsService {
/* generate term vectors from fetched document fields */
String[] getFields = validFields.toArray(new String[validFields.size() + 1]);
getFields[getFields.length - 1] = SourceFieldMapper.NAME;
GetResult getResult = indexShard.getService().get(get, request.id(), request.type(), getFields, null);
GetResult getResult = indexShard.getService().get(get, request.id(), MapperService.SINGLE_MAPPING_NAME, getFields, null);
Fields generatedTermVectors = generateTermVectors(
indexShard,
getResult.sourceAsMap(),
@ -329,7 +326,6 @@ public class TermVectorsService {
ParsedDocument parsedDocument = parseDocument(
indexShard,
indexShard.shardId().getIndexName(),
request.type(),
request.doc(),
request.xContentType(),
request.routing()
@ -389,15 +385,14 @@ public class TermVectorsService {
private static ParsedDocument parseDocument(
IndexShard indexShard,
String index,
String type,
BytesReference doc,
XContentType xContentType,
String routing
) {
MapperService mapperService = indexShard.mapperService();
DocumentMapperForType docMapper = mapperService.documentMapperWithAutoCreate(type);
DocumentMapperForType docMapper = mapperService.documentMapperWithAutoCreate(MapperService.SINGLE_MAPPING_NAME);
ParsedDocument parsedDocument = docMapper.getDocumentMapper()
.parse(new SourceToParse(index, type, "_id_for_tv_api", doc, xContentType, routing));
.parse(new SourceToParse(index, MapperService.SINGLE_MAPPING_NAME, "_id_for_tv_api", doc, xContentType, routing));
if (docMapper.getMapping() != null) {
parsedDocument.addDynamicMappingsUpdate(docMapper.getMapping());
}

View File

@ -37,7 +37,6 @@ import org.opensearch.action.termvectors.TermVectorsRequest;
import org.opensearch.client.node.NodeClient;
import org.opensearch.common.Strings;
import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.index.mapper.MapperService;
import org.opensearch.rest.BaseRestHandler;
import org.opensearch.rest.RestRequest;
import org.opensearch.rest.action.RestToXContentListener;
@ -79,13 +78,6 @@ public class RestMultiTermVectorsAction extends BaseRestHandler {
MultiTermVectorsRequest multiTermVectorsRequest = new MultiTermVectorsRequest();
TermVectorsRequest template = new TermVectorsRequest().index(request.param("index"));
if (request.hasParam("type")) {
deprecationLogger.deprecate("mtermvectors_with_types", TYPES_DEPRECATION_MESSAGE);
template.type(request.param("type"));
} else {
template.type(MapperService.SINGLE_MAPPING_NAME);
}
RestTermVectorsAction.readURIParameters(template, request);
multiTermVectorsRequest.ids(Strings.commaDelimitedListToStringArray(request.param("ids")));
request.withContentOrSourceParamParserOrNull(p -> multiTermVectorsRequest.add(template, p));

View File

@ -35,10 +35,8 @@ package org.opensearch.rest.action.document;
import org.opensearch.action.termvectors.TermVectorsRequest;
import org.opensearch.client.node.NodeClient;
import org.opensearch.common.Strings;
import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.common.xcontent.XContentParser;
import org.opensearch.index.VersionType;
import org.opensearch.index.mapper.MapperService;
import org.opensearch.rest.BaseRestHandler;
import org.opensearch.rest.RestRequest;
import org.opensearch.rest.action.RestActions;
@ -59,7 +57,6 @@ import static org.opensearch.rest.RestRequest.Method.POST;
* TermVectorsRequest.
*/
public class RestTermVectorsAction extends BaseRestHandler {
private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestTermVectorsAction.class);
public static final String TYPES_DEPRECATION_MESSAGE = "[types removal] " + "Specifying types in term vector requests is deprecated.";
@Override
@ -86,14 +83,7 @@ public class RestTermVectorsAction extends BaseRestHandler {
@Override
public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
TermVectorsRequest termVectorsRequest;
if (request.hasParam("type")) {
deprecationLogger.deprecate("termvectors_with_types", TYPES_DEPRECATION_MESSAGE);
termVectorsRequest = new TermVectorsRequest(request.param("index"), request.param("type"), request.param("id"));
} else {
termVectorsRequest = new TermVectorsRequest(request.param("index"), MapperService.SINGLE_MAPPING_NAME, request.param("id"));
}
TermVectorsRequest termVectorsRequest = new TermVectorsRequest(request.param("index"), request.param("id"));
if (request.hasContentOrSourceParam()) {
try (XContentParser parser = request.contentOrSourceParamParser()) {
TermVectorsRequest.parseRequest(termVectorsRequest, parser);

View File

@ -431,7 +431,7 @@ public abstract class AbstractTermVectorsTestCase extends OpenSearchIntegTestCas
}
protected TermVectorsRequestBuilder getRequestForConfig(TestConfig config) {
return client().prepareTermVectors(randomBoolean() ? config.doc.index : config.doc.alias, config.doc.type, config.doc.id)
return client().prepareTermVectors(randomBoolean() ? config.doc.index : config.doc.alias, config.doc.id)
.setPayloads(config.requestPayloads)
.setOffsets(config.requestOffsets)
.setPositions(config.requestPositions)

View File

@ -193,7 +193,7 @@ public class GetTermVectorsTests extends OpenSearchSingleNodeTestCase {
.execute()
.actionGet();
client().admin().indices().prepareRefresh().get();
TermVectorsRequestBuilder resp = client().prepareTermVectors("test", "type1", Integer.toString(1))
TermVectorsRequestBuilder resp = client().prepareTermVectors("test", Integer.toString(1))
.setPayloads(true)
.setOffsets(true)
.setPositions(true)

View File

@ -49,6 +49,7 @@ import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.store.Directory;
import org.opensearch.LegacyESVersion;
import org.opensearch.action.termvectors.TermVectorsRequest.Flag;
import org.opensearch.common.bytes.BytesArray;
import org.opensearch.common.bytes.BytesReference;
@ -57,7 +58,9 @@ import org.opensearch.common.io.stream.OutputStreamStreamOutput;
import org.opensearch.common.xcontent.XContentParser;
import org.opensearch.common.xcontent.XContentType;
import org.opensearch.common.xcontent.json.JsonXContent;
import org.opensearch.index.shard.ShardId;
import org.opensearch.rest.action.document.RestTermVectorsAction;
import org.opensearch.tasks.TaskId;
import org.opensearch.test.OpenSearchTestCase;
import org.opensearch.test.StreamsUtils;
import org.hamcrest.Matchers;
@ -74,7 +77,7 @@ import static org.hamcrest.Matchers.equalTo;
public class TermVectorsUnitTests extends OpenSearchTestCase {
public void testStreamResponse() throws Exception {
TermVectorsResponse outResponse = new TermVectorsResponse("a", "b", "c");
TermVectorsResponse outResponse = new TermVectorsResponse("a", "c");
outResponse.setExists(true);
writeStandardTermVector(outResponse);
@ -91,7 +94,7 @@ public class TermVectorsUnitTests extends OpenSearchTestCase {
// see if correct
checkIfStandardTermVector(inResponse);
outResponse = new TermVectorsResponse("a", "b", "c");
outResponse = new TermVectorsResponse("a", "c");
writeEmptyTermVector(outResponse);
// write
outBuffer = new ByteArrayOutputStream();
@ -185,7 +188,7 @@ public class TermVectorsUnitTests extends OpenSearchTestCase {
" {\"fields\" : [\"a\", \"b\",\"c\"], \"offsets\":false, \"positions\":false, \"payloads\":true}"
);
TermVectorsRequest tvr = new TermVectorsRequest(null, null, null);
TermVectorsRequest tvr = new TermVectorsRequest(null, null);
XContentParser parser = createParser(JsonXContent.jsonXContent, inputBytes);
TermVectorsRequest.parseRequest(tvr, parser);
@ -206,7 +209,7 @@ public class TermVectorsUnitTests extends OpenSearchTestCase {
RestTermVectorsAction.addFieldStringsFromParameter(tvr, additionalFields);
inputBytes = new BytesArray(" {\"offsets\":false, \"positions\":false, \"payloads\":true}");
tvr = new TermVectorsRequest(null, null, null);
tvr = new TermVectorsRequest(null, null);
parser = createParser(JsonXContent.jsonXContent, inputBytes);
TermVectorsRequest.parseRequest(tvr, parser);
additionalFields = "";
@ -222,7 +225,7 @@ public class TermVectorsUnitTests extends OpenSearchTestCase {
BytesReference inputBytes = new BytesArray(
" {\"fields\" : \"a, b,c \", \"offsets\":false, \"positions\":false, \"payloads\":true, \"meaningless_term\":2}"
);
TermVectorsRequest tvr = new TermVectorsRequest(null, null, null);
TermVectorsRequest tvr = new TermVectorsRequest(null, null);
boolean threwException = false;
try {
XContentParser parser = createParser(JsonXContent.jsonXContent, inputBytes);
@ -236,7 +239,7 @@ public class TermVectorsUnitTests extends OpenSearchTestCase {
public void testStreamRequest() throws IOException {
for (int i = 0; i < 10; i++) {
TermVectorsRequest request = new TermVectorsRequest("index", "type", "id");
TermVectorsRequest request = new TermVectorsRequest("index", "id");
request.offsets(random().nextBoolean());
request.fieldStatistics(random().nextBoolean());
request.payloads(random().nextBoolean());
@ -252,9 +255,55 @@ public class TermVectorsUnitTests extends OpenSearchTestCase {
request.writeTo(out);
// read
ByteArrayInputStream esInBuffer = new ByteArrayInputStream(outBuffer.toByteArray());
InputStreamStreamInput esBuffer = new InputStreamStreamInput(esInBuffer);
TermVectorsRequest req2 = new TermVectorsRequest(esBuffer);
ByteArrayInputStream opensearchInBuffer = new ByteArrayInputStream(outBuffer.toByteArray());
InputStreamStreamInput opensearchBuffer = new InputStreamStreamInput(opensearchInBuffer);
TermVectorsRequest req2 = new TermVectorsRequest(opensearchBuffer);
assertThat(request.offsets(), equalTo(req2.offsets()));
assertThat(request.fieldStatistics(), equalTo(req2.fieldStatistics()));
assertThat(request.payloads(), equalTo(req2.payloads()));
assertThat(request.positions(), equalTo(req2.positions()));
assertThat(request.termStatistics(), equalTo(req2.termStatistics()));
assertThat(request.preference(), equalTo(pref));
assertThat(request.routing(), equalTo(null));
assertEquals(new BytesArray("{}"), request.doc());
assertEquals(XContentType.JSON, request.xContentType());
}
}
public void testStreamRequestLegacyVersion() throws IOException {
for (int i = 0; i < 10; i++) {
TermVectorsRequest request = new TermVectorsRequest("index", "id");
request.offsets(random().nextBoolean());
request.fieldStatistics(random().nextBoolean());
request.payloads(random().nextBoolean());
request.positions(random().nextBoolean());
request.termStatistics(random().nextBoolean());
String pref = random().nextBoolean() ? "somePreference" : null;
request.preference(pref);
request.doc(new BytesArray("{}"), randomBoolean(), XContentType.JSON);
// write using older version which contains types
ByteArrayOutputStream outBuffer = new ByteArrayOutputStream();
OutputStreamStreamOutput out = new OutputStreamStreamOutput(outBuffer);
out.setVersion(LegacyESVersion.V_7_2_0);
request.writeTo(out);
// First check the type on the stream was written as "_doc" by manually parsing the stream until the type
ByteArrayInputStream opensearchInBuffer = new ByteArrayInputStream(outBuffer.toByteArray());
InputStreamStreamInput opensearchBuffer = new InputStreamStreamInput(opensearchInBuffer);
TaskId.readFromStream(opensearchBuffer);
if (opensearchBuffer.readBoolean()) {
new ShardId(opensearchBuffer);
}
opensearchBuffer.readOptionalString();
assertThat(opensearchBuffer.readString(), equalTo("_doc"));
// now read the stream as normal to check it is parsed correct if received from an older node
opensearchInBuffer = new ByteArrayInputStream(outBuffer.toByteArray());
opensearchBuffer = new InputStreamStreamInput(opensearchInBuffer);
opensearchBuffer.setVersion(LegacyESVersion.V_7_2_0);
TermVectorsRequest req2 = new TermVectorsRequest(opensearchBuffer);
assertThat(request.offsets(), equalTo(req2.offsets()));
assertThat(request.fieldStatistics(), equalTo(req2.fieldStatistics()));
@ -281,7 +330,6 @@ public class TermVectorsUnitTests extends OpenSearchTestCase {
request.add(new TermVectorsRequest(), data);
checkParsedParameters(request);
assertWarnings(RestTermVectorsAction.TYPES_DEPRECATION_MESSAGE);
}
void checkParsedParameters(MultiTermVectorsRequest request) {
@ -294,7 +342,6 @@ public class TermVectorsUnitTests extends OpenSearchTestCase {
fields.add("c");
for (TermVectorsRequest singleRequest : request.requests) {
assertThat(singleRequest.index(), equalTo("testidx"));
assertThat(singleRequest.type(), equalTo("test"));
assertThat(singleRequest.payloads(), equalTo(false));
assertThat(singleRequest.positions(), equalTo(false));
assertThat(singleRequest.offsets(), equalTo(false));
@ -313,14 +360,12 @@ public class TermVectorsUnitTests extends OpenSearchTestCase {
request.add(new TermVectorsRequest(), data);
checkParsedFilterParameters(request);
assertWarnings(RestTermVectorsAction.TYPES_DEPRECATION_MESSAGE);
}
void checkParsedFilterParameters(MultiTermVectorsRequest multiRequest) {
Set<String> ids = new HashSet<>(Arrays.asList("1", "2"));
for (TermVectorsRequest request : multiRequest.requests) {
assertThat(request.index(), equalTo("testidx"));
assertThat(request.type(), equalTo("test"));
assertTrue(ids.remove(request.id()));
assertNotNull(request.filterSettings());
assertThat(request.filterSettings().maxNumTerms, equalTo(20));

View File

@ -230,8 +230,8 @@ public class TransportMultiTermVectorsActionTests extends OpenSearchTestCase {
final Task task = createTask();
final NodeClient client = new NodeClient(Settings.EMPTY, threadPool);
final MultiTermVectorsRequestBuilder request = new MultiTermVectorsRequestBuilder(client, MultiTermVectorsAction.INSTANCE);
request.add(new TermVectorsRequest("index1", "_doc", "1"));
request.add(new TermVectorsRequest("index2", "_doc", "2"));
request.add(new TermVectorsRequest("index1", "1"));
request.add(new TermVectorsRequest("index2", "2"));
final AtomicBoolean shardActionInvoked = new AtomicBoolean(false);
transportAction = new TransportMultiTermVectorsAction(
@ -262,8 +262,8 @@ public class TransportMultiTermVectorsActionTests extends OpenSearchTestCase {
final Task task = createTask();
final NodeClient client = new NodeClient(Settings.EMPTY, threadPool);
final MultiTermVectorsRequestBuilder request = new MultiTermVectorsRequestBuilder(client, MultiTermVectorsAction.INSTANCE);
request.add(new TermVectorsRequest("index2", "_doc", "1").routing("1"));
request.add(new TermVectorsRequest("index2", "_doc", "2"));
request.add(new TermVectorsRequest("index2", "1").routing("1"));
request.add(new TermVectorsRequest("index2", "2"));
final AtomicBoolean shardActionInvoked = new AtomicBoolean(false);
transportAction = new TransportMultiTermVectorsAction(

View File

@ -112,14 +112,7 @@ public class MoreLikeThisQueryBuilderTests extends AbstractQueryTestCase<MoreLik
private Item generateRandomItem() {
String index = randomBoolean() ? getIndex().getName() : null;
// indexed item or artificial document
Item item;
if (randomBoolean()) {
item = randomBoolean() ? new Item(index, randomAlphaOfLength(10)) : new Item(index, randomArtificialDoc());
} else {
String type = "doc";
item = randomBoolean() ? new Item(index, type, randomAlphaOfLength(10)) : new Item(index, type, randomArtificialDoc());
}
Item item = randomBoolean() ? new Item(index, randomAlphaOfLength(10)) : new Item(index, randomArtificialDoc());
// if no field is specified MLT uses all mapped fields for this item
if (randomBoolean()) {
@ -247,7 +240,7 @@ public class MoreLikeThisQueryBuilderTests extends AbstractQueryTestCase<MoreLik
MultiTermVectorsItemResponse[] responses = new MultiTermVectorsItemResponse[mtvRequest.size()];
int i = 0;
for (TermVectorsRequest request : mtvRequest) {
TermVectorsResponse response = new TermVectorsResponse(request.index(), request.type(), request.id());
TermVectorsResponse response = new TermVectorsResponse(request.index(), request.id());
response.setExists(true);
Fields generatedFields;
if (request.doc() != null) {
@ -449,11 +442,9 @@ public class MoreLikeThisQueryBuilderTests extends AbstractQueryTestCase<MoreLik
+ " \"fields\" : [ \"title\", \"description\" ],\n"
+ " \"like\" : [ \"and potentially some more text here as well\", {\n"
+ " \"_index\" : \"imdb\",\n"
+ " \"_type\" : \"movies\",\n"
+ " \"_id\" : \"1\"\n"
+ " }, {\n"
+ " \"_index\" : \"imdb\",\n"
+ " \"_type\" : \"movies\",\n"
+ " \"_id\" : \"2\"\n"
+ " } ],\n"
+ " \"max_query_terms\" : 12,\n"
@ -481,12 +472,6 @@ public class MoreLikeThisQueryBuilderTests extends AbstractQueryTestCase<MoreLik
protected QueryBuilder parseQuery(XContentParser parser) throws IOException {
QueryBuilder query = super.parseQuery(parser);
assertThat(query, instanceOf(MoreLikeThisQueryBuilder.class));
MoreLikeThisQueryBuilder mltQuery = (MoreLikeThisQueryBuilder) query;
if (mltQuery.isTypeless() == false && !assertedWarnings.contains(MoreLikeThisQueryBuilder.TYPES_DEPRECATION_MESSAGE)) {
assertWarnings(MoreLikeThisQueryBuilder.TYPES_DEPRECATION_MESSAGE);
assertedWarnings.add(MoreLikeThisQueryBuilder.TYPES_DEPRECATION_MESSAGE);
}
return query;
}

View File

@ -81,7 +81,7 @@ public class TermVectorsServiceTests extends OpenSearchSingleNodeTestCase {
List<Long> longs = Stream.of(abs(randomLong()), abs(randomLong())).sorted().collect(toList());
TermVectorsRequest request = new TermVectorsRequest("test", "type1", "0");
TermVectorsRequest request = new TermVectorsRequest("test", "0");
TermVectorsResponse response = TermVectorsService.getTermVectors(shard, request, longs.iterator()::next);
assertThat(response, notNullValue());
@ -112,7 +112,7 @@ public class TermVectorsServiceTests extends OpenSearchSingleNodeTestCase {
}
bulk.get();
TermVectorsRequest request = new TermVectorsRequest("test", "_doc", "0").termStatistics(true);
TermVectorsRequest request = new TermVectorsRequest("test", "0").termStatistics(true);
IndicesService indicesService = getInstanceFromNode(IndicesService.class);
IndexService test = indicesService.indexService(resolveIndex("test"));
@ -153,7 +153,7 @@ public class TermVectorsServiceTests extends OpenSearchSingleNodeTestCase {
}
bulk.get();
TermVectorsRequest request = new TermVectorsRequest("test", "_doc", "0").termStatistics(true);
TermVectorsRequest request = new TermVectorsRequest("test", "0").termStatistics(true);
IndicesService indicesService = getInstanceFromNode(IndicesService.class);
IndexService test = indicesService.indexService(resolveIndex("test"));

View File

@ -1,106 +0,0 @@
/*
* SPDX-License-Identifier: Apache-2.0
*
* The OpenSearch Contributors require contributions made to
* this file be licensed under the Apache-2.0 license or a
* compatible open source license.
*/
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Modifications Copyright OpenSearch Contributors. See
* GitHub history for details.
*/
package org.opensearch.rest.action.document;
import org.opensearch.common.bytes.BytesReference;
import org.opensearch.common.xcontent.XContentBuilder;
import org.opensearch.common.xcontent.XContentFactory;
import org.opensearch.common.xcontent.XContentType;
import org.opensearch.rest.RestRequest;
import org.opensearch.rest.RestRequest.Method;
import org.opensearch.test.rest.FakeRestRequest;
import org.opensearch.test.rest.RestActionTestCase;
import org.junit.Before;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
public class RestMultiTermVectorsActionTests extends RestActionTestCase {
@Before
public void setUpAction() {
controller().registerHandler(new RestMultiTermVectorsAction());
}
public void testTypeInPath() {
RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(Method.POST)
.withPath("/some_index/some_type/_mtermvectors")
.build();
// We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset.
verifyingClient.setExecuteVerifier((arg1, arg2) -> null);
dispatchRequest(request);
assertWarnings(RestMultiTermVectorsAction.TYPES_DEPRECATION_MESSAGE);
}
public void testTypeParameter() {
Map<String, String> params = new HashMap<>();
params.put("type", "some_type");
RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(Method.GET)
.withPath("/some_index/_mtermvectors")
.withParams(params)
.build();
// We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset.
verifyingClient.setExecuteVerifier((arg1, arg2) -> null);
dispatchRequest(request);
assertWarnings(RestMultiTermVectorsAction.TYPES_DEPRECATION_MESSAGE);
}
public void testTypeInBody() throws IOException {
XContentBuilder content = XContentFactory.jsonBuilder()
.startObject()
.startArray("docs")
.startObject()
.field("_type", "some_type")
.field("_id", 1)
.endObject()
.endArray()
.endObject();
RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(Method.GET)
.withPath("/some_index/_mtermvectors")
.withContent(BytesReference.bytes(content), XContentType.JSON)
.build();
// We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset.
verifyingClient.setExecuteVerifier((arg1, arg2) -> null);
dispatchRequest(request);
assertWarnings(RestTermVectorsAction.TYPES_DEPRECATION_MESSAGE);
}
}

View File

@ -1,80 +0,0 @@
/*
* SPDX-License-Identifier: Apache-2.0
*
* The OpenSearch Contributors require contributions made to
* this file be licensed under the Apache-2.0 license or a
* compatible open source license.
*/
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Modifications Copyright OpenSearch Contributors. See
* GitHub history for details.
*/
package org.opensearch.rest.action.document;
import org.opensearch.common.bytes.BytesReference;
import org.opensearch.common.xcontent.XContentBuilder;
import org.opensearch.common.xcontent.XContentFactory;
import org.opensearch.common.xcontent.XContentType;
import org.opensearch.rest.RestRequest;
import org.opensearch.rest.RestRequest.Method;
import org.opensearch.test.rest.FakeRestRequest;
import org.opensearch.test.rest.RestActionTestCase;
import org.junit.Before;
import java.io.IOException;
public class RestTermVectorsActionTests extends RestActionTestCase {
@Before
public void setUpAction() {
controller().registerHandler(new RestTermVectorsAction());
}
public void testTypeInPath() {
RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(Method.POST)
.withPath("/some_index/some_type/some_id/_termvectors")
.build();
// We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset.
verifyingClient.setExecuteVerifier((arg1, arg2) -> null);
dispatchRequest(request);
assertWarnings(RestTermVectorsAction.TYPES_DEPRECATION_MESSAGE);
}
public void testTypeInBody() throws IOException {
XContentBuilder content = XContentFactory.jsonBuilder().startObject().field("_type", "some_type").field("_id", 1).endObject();
RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(Method.GET)
.withPath("/some_index/_termvectors/some_id")
.withContent(BytesReference.bytes(content), XContentType.JSON)
.build();
// We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset.
verifyingClient.setExecuteVerifier((arg1, arg2) -> null);
dispatchRequest(request);
assertWarnings(RestTermVectorsAction.TYPES_DEPRECATION_MESSAGE);
}
}

View File

@ -7,7 +7,6 @@
"offsets":false,
"positions":false,
"fields":["a","b","c"],
"_index": "testidx",
"_type":"test"
"_index": "testidx"
}
}
}

View File

@ -8,8 +8,7 @@
"offsets": false,
"positions": false,
"fields":["a","b","c"],
"_index": "testidx",
"_type": "test"
"_index": "testidx"
},
{
"_id": "2",
@ -19,8 +18,7 @@
"offsets": false,
"positions": false,
"fields":["a","b","c"],
"_index": "testidx",
"_type": "test"
"_index": "testidx"
}
]
}
}

View File

@ -2,7 +2,6 @@
"ids": ["1","2"],
"parameters": {
"_index": "testidx",
"_type": "test",
"filter": {
"max_num_terms": 20,
"min_term_freq": 1,
@ -13,4 +12,4 @@
"max_word_length": 20
}
}
}
}