[Remove] Type from Search Internals (#2109)

With types deprecation the type support is removed from internal search API
(SearchRequest and QueryShardContext).

Signed-off-by: Nicholas Walter Knize <nknize@apache.org>
This commit is contained in:
Nick Knize 2022-02-17 16:17:31 -06:00 committed by GitHub
parent 32a761df2c
commit 7fe642fda5
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
127 changed files with 178 additions and 2314 deletions

View File

@ -432,7 +432,7 @@ final class RequestConverters {
* for standard searches
*/
static Request search(SearchRequest searchRequest, String searchEndpoint) throws IOException {
Request request = new Request(HttpPost.METHOD_NAME, endpoint(searchRequest.indices(), searchRequest.types(), searchEndpoint));
Request request = new Request(HttpPost.METHOD_NAME, endpoint(searchRequest.indices(), searchEndpoint));
Params params = new Params();
addSearchRequestParams(params, searchRequest);
@ -502,7 +502,7 @@ final class RequestConverters {
request = new Request(HttpGet.METHOD_NAME, "_render/template");
} else {
SearchRequest searchRequest = searchTemplateRequest.getRequest();
String endpoint = endpoint(searchRequest.indices(), searchRequest.types(), "_search/template");
String endpoint = endpoint(searchRequest.indices(), "_search/template");
request = new Request(HttpGet.METHOD_NAME, endpoint);
Params params = new Params();
@ -633,7 +633,7 @@ final class RequestConverters {
private static Request prepareDeleteByQueryRequest(DeleteByQueryRequest deleteByQueryRequest, boolean waitForCompletion)
throws IOException {
String endpoint = endpoint(deleteByQueryRequest.indices(), deleteByQueryRequest.getDocTypes(), "_delete_by_query");
String endpoint = endpoint(deleteByQueryRequest.indices(), "_delete_by_query");
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
Params params = new Params().withRouting(deleteByQueryRequest.getRouting())
.withRefresh(deleteByQueryRequest.isRefresh())
@ -661,7 +661,7 @@ final class RequestConverters {
}
static Request prepareUpdateByQueryRequest(UpdateByQueryRequest updateByQueryRequest, boolean waitForCompletion) throws IOException {
String endpoint = endpoint(updateByQueryRequest.indices(), updateByQueryRequest.getDocTypes(), "_update_by_query");
String endpoint = endpoint(updateByQueryRequest.indices(), "_update_by_query");
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
Params params = new Params().withRouting(updateByQueryRequest.getRouting())
.withPipeline(updateByQueryRequest.getPipeline())
@ -799,10 +799,12 @@ final class RequestConverters {
return new NByteArrayEntity(source.bytes, source.offset, source.length, createContentType(xContentType));
}
@Deprecated
static String endpoint(String index, String type, String id) {
return new EndpointBuilder().addPathPart(index, type, id).build();
}
@Deprecated
static String endpoint(String index, String type, String id, String endpoint) {
return new EndpointBuilder().addPathPart(index, type, id).addPathPartAsIs(endpoint).build();
}
@ -815,6 +817,7 @@ final class RequestConverters {
return new EndpointBuilder().addCommaSeparatedPathParts(indices).addPathPartAsIs(endpoint).build();
}
@Deprecated
static String endpoint(String[] indices, String[] types, String endpoint) {
return new EndpointBuilder().addCommaSeparatedPathParts(indices)
.addCommaSeparatedPathParts(types)
@ -829,6 +832,7 @@ final class RequestConverters {
.build();
}
@Deprecated
static String endpoint(String[] indices, String endpoint, String type) {
return new EndpointBuilder().addCommaSeparatedPathParts(indices).addPathPartAsIs(endpoint).addPathPart(type).build();
}

View File

@ -468,9 +468,6 @@ public class RequestConvertersTests extends OpenSearchTestCase {
);
reindexRequest.setRemoteInfo(remoteInfo);
}
if (randomBoolean()) {
reindexRequest.setSourceDocTypes("doc", "tweet");
}
if (randomBoolean()) {
reindexRequest.setSourceBatchSize(randomInt(100));
}
@ -536,9 +533,6 @@ public class RequestConvertersTests extends OpenSearchTestCase {
UpdateByQueryRequest updateByQueryRequest = new UpdateByQueryRequest();
updateByQueryRequest.indices(randomIndicesNames(1, 5));
Map<String, String> expectedParams = new HashMap<>();
if (randomBoolean()) {
updateByQueryRequest.setDocTypes(generateRandomStringArray(5, 5, false, false));
}
if (randomBoolean()) {
int batchSize = randomInt(100);
updateByQueryRequest.setBatchSize(batchSize);
@ -600,9 +594,6 @@ public class RequestConvertersTests extends OpenSearchTestCase {
Request request = RequestConverters.updateByQuery(updateByQueryRequest);
StringJoiner joiner = new StringJoiner("/", "/", "");
joiner.add(String.join(",", updateByQueryRequest.indices()));
if (updateByQueryRequest.getDocTypes().length > 0) {
joiner.add(String.join(",", updateByQueryRequest.getDocTypes()));
}
joiner.add("_update_by_query");
assertEquals(joiner.toString(), request.getEndpoint());
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
@ -614,9 +605,6 @@ public class RequestConvertersTests extends OpenSearchTestCase {
DeleteByQueryRequest deleteByQueryRequest = new DeleteByQueryRequest();
deleteByQueryRequest.indices(randomIndicesNames(1, 5));
Map<String, String> expectedParams = new HashMap<>();
if (randomBoolean()) {
deleteByQueryRequest.setDocTypes(generateRandomStringArray(5, 5, false, false));
}
if (randomBoolean()) {
int batchSize = randomInt(100);
deleteByQueryRequest.setBatchSize(batchSize);
@ -671,9 +659,6 @@ public class RequestConvertersTests extends OpenSearchTestCase {
Request request = RequestConverters.deleteByQuery(deleteByQueryRequest);
StringJoiner joiner = new StringJoiner("/", "/", "");
joiner.add(String.join(",", deleteByQueryRequest.indices()));
if (deleteByQueryRequest.getDocTypes().length > 0) {
joiner.add(String.join(",", deleteByQueryRequest.getDocTypes()));
}
joiner.add("_delete_by_query");
assertEquals(joiner.toString(), request.getEndpoint());
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
@ -1191,10 +1176,6 @@ public class RequestConvertersTests extends OpenSearchTestCase {
if (Strings.hasLength(index)) {
endpoint.add(index);
}
String type = String.join(",", searchRequest.types());
if (Strings.hasLength(type)) {
endpoint.add(type);
}
endpoint.add(searchEndpoint);
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
assertEquals(endpoint.toString(), request.getEndpoint());
@ -1204,14 +1185,6 @@ public class RequestConvertersTests extends OpenSearchTestCase {
public static SearchRequest createTestSearchRequest(String[] indices, Map<String, String> expectedParams) {
SearchRequest searchRequest = new SearchRequest(indices);
int numTypes = randomIntBetween(0, 5);
String[] types = new String[numTypes];
for (int i = 0; i < numTypes; i++) {
types[i] = "type-" + randomAlphaOfLengthBetween(2, 5);
}
searchRequest.types(types);
setRandomSearchParams(searchRequest, expectedParams);
setRandomIndicesOptions(searchRequest::indicesOptions, searchRequest::indicesOptions, expectedParams);
@ -1278,7 +1251,6 @@ public class RequestConvertersTests extends OpenSearchTestCase {
public void testSearchNullIndicesAndTypes() {
expectThrows(NullPointerException.class, () -> new SearchRequest((String[]) null));
expectThrows(NullPointerException.class, () -> new SearchRequest().indices((String[]) null));
expectThrows(NullPointerException.class, () -> new SearchRequest().types((String[]) null));
}
public void testCountNotNullSource() throws IOException {
@ -1293,14 +1265,6 @@ public class RequestConvertersTests extends OpenSearchTestCase {
public void testCount() throws Exception {
String[] indices = randomIndicesNames(0, 5);
CountRequest countRequest = new CountRequest(indices);
int numTypes = randomIntBetween(0, 5);
String[] types = new String[numTypes];
for (int i = 0; i < numTypes; i++) {
types[i] = "type-" + randomAlphaOfLengthBetween(2, 5);
}
countRequest.types(types);
Map<String, String> expectedParams = new HashMap<>();
setRandomCountParams(countRequest, expectedParams);
setRandomIndicesOptions(countRequest::indicesOptions, countRequest::indicesOptions, expectedParams);
@ -1317,10 +1281,6 @@ public class RequestConvertersTests extends OpenSearchTestCase {
if (Strings.hasLength(index)) {
endpoint.add(index);
}
String type = String.join(",", types);
if (Strings.hasLength(type)) {
endpoint.add(type);
}
endpoint.add("_count");
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
assertEquals(endpoint.toString(), request.getEndpoint());
@ -1328,12 +1288,6 @@ public class RequestConvertersTests extends OpenSearchTestCase {
assertToXContentBody(countRequest, request.getEntity());
}
public void testCountNullIndicesAndTypes() {
expectThrows(NullPointerException.class, () -> new CountRequest((String[]) null));
expectThrows(NullPointerException.class, () -> new CountRequest().indices((String[]) null));
expectThrows(NullPointerException.class, () -> new CountRequest().types((String[]) null));
}
private static void setRandomCountParams(CountRequest countRequest, Map<String, String> expectedParams) {
if (randomBoolean()) {
countRequest.routing(randomAlphaOfLengthBetween(3, 10));
@ -1416,7 +1370,6 @@ public class RequestConvertersTests extends OpenSearchTestCase {
null,
null,
null,
null,
xContentRegistry(),
true,
deprecationLogger

View File

@ -88,7 +88,6 @@ import static org.opensearch.index.query.QueryBuilders.spanTermQuery;
import static org.opensearch.index.query.QueryBuilders.spanWithinQuery;
import static org.opensearch.index.query.QueryBuilders.termQuery;
import static org.opensearch.index.query.QueryBuilders.termsQuery;
import static org.opensearch.index.query.QueryBuilders.typeQuery;
import static org.opensearch.index.query.QueryBuilders.wildcardQuery;
import static org.opensearch.index.query.QueryBuilders.wrapperQuery;
import static org.opensearch.index.query.functionscore.ScoreFunctionBuilders.exponentialDecayFunction;
@ -447,12 +446,6 @@ public class QueryDSLDocumentationTests extends OpenSearchTestCase {
// end::terms
}
public void testType() {
// tag::type
typeQuery("my_type"); // <1>
// end::type
}
public void testWildcard() {
// tag::wildcard
wildcardQuery(

View File

@ -140,7 +140,6 @@ public class HighlighterWithAnalyzersTests extends OpenSearchIntegTestCase {
client().prepareIndex("test", "test", "1").setSource("name", "ARCOTEL Hotels Deutschland").get();
refresh();
SearchResponse search = client().prepareSearch("test")
.setTypes("test")
.setQuery(matchQuery("name.autocomplete", "deut tel").operator(Operator.OR))
.highlighter(new HighlightBuilder().field("name.autocomplete"))
.get();

View File

@ -85,7 +85,6 @@ public class StoredExpressionIT extends OpenSearchIntegTestCase {
new SearchSourceBuilder().scriptField("test1", new Script(ScriptType.STORED, null, "script1", Collections.emptyMap()))
)
.setIndices("test")
.setTypes("scriptTest")
.get();
fail("search script should have been rejected");
} catch (Exception e) {

View File

@ -77,7 +77,7 @@ public class ExpressionFieldScriptTests extends OpenSearchTestCase {
when(fieldData.load(any())).thenReturn(atomicFieldData);
service = new ExpressionScriptEngine();
lookup = new SearchLookup(mapperService, (ignored, lookup) -> fieldData, null);
lookup = new SearchLookup(mapperService, (ignored, lookup) -> fieldData);
}
private FieldScript.LeafFactory compile(String expression) {

View File

@ -76,7 +76,7 @@ public class ExpressionNumberSortScriptTests extends OpenSearchTestCase {
when(fieldData.load(any())).thenReturn(atomicFieldData);
service = new ExpressionScriptEngine();
lookup = new SearchLookup(mapperService, (ignored, lookup) -> fieldData, null);
lookup = new SearchLookup(mapperService, (ignored, lookup) -> fieldData);
}
private NumberSortScript.LeafFactory compile(String expression) {

View File

@ -76,7 +76,7 @@ public class ExpressionTermsSetQueryTests extends OpenSearchTestCase {
when(fieldData.load(any())).thenReturn(atomicFieldData);
service = new ExpressionScriptEngine();
lookup = new SearchLookup(mapperService, (ignored, lookup) -> fieldData, null);
lookup = new SearchLookup(mapperService, (ignored, lookup) -> fieldData);
}
private TermsSetQueryScript.LeafFactory compile(String expression) {

View File

@ -196,9 +196,11 @@ public class SearchTemplateIT extends OpenSearchSingleNodeTestCase {
Map<String, Object> templateParams = new HashMap<>();
templateParams.put("fieldParam", "foo");
SearchTemplateResponse searchResponse = new SearchTemplateRequestBuilder(client()).setRequest(
new SearchRequest("test").types("type")
).setScript("testTemplate").setScriptType(ScriptType.STORED).setScriptParams(templateParams).get();
SearchTemplateResponse searchResponse = new SearchTemplateRequestBuilder(client()).setRequest(new SearchRequest("test"))
.setScript("testTemplate")
.setScriptType(ScriptType.STORED)
.setScriptParams(templateParams)
.get();
assertHitCount(searchResponse.getResponse(), 4);
assertAcked(client().admin().cluster().prepareDeleteStoredScript("testTemplate"));
@ -238,14 +240,16 @@ public class SearchTemplateIT extends OpenSearchSingleNodeTestCase {
Map<String, Object> templateParams = new HashMap<>();
templateParams.put("fieldParam", "foo");
SearchTemplateResponse searchResponse = new SearchTemplateRequestBuilder(client()).setRequest(
new SearchRequest().indices("test").types("type")
).setScript("1a").setScriptType(ScriptType.STORED).setScriptParams(templateParams).get();
SearchTemplateResponse searchResponse = new SearchTemplateRequestBuilder(client()).setRequest(new SearchRequest().indices("test"))
.setScript("1a")
.setScriptType(ScriptType.STORED)
.setScriptParams(templateParams)
.get();
assertHitCount(searchResponse.getResponse(), 4);
expectThrows(
ResourceNotFoundException.class,
() -> new SearchTemplateRequestBuilder(client()).setRequest(new SearchRequest().indices("test").types("type"))
() -> new SearchTemplateRequestBuilder(client()).setRequest(new SearchRequest().indices("test"))
.setScript("1000")
.setScriptType(ScriptType.STORED)
.setScriptParams(templateParams)
@ -253,7 +257,7 @@ public class SearchTemplateIT extends OpenSearchSingleNodeTestCase {
);
templateParams.put("fieldParam", "bar");
searchResponse = new SearchTemplateRequestBuilder(client()).setRequest(new SearchRequest("test").types("type"))
searchResponse = new SearchTemplateRequestBuilder(client()).setRequest(new SearchRequest("test"))
.setScript("2")
.setScriptType(ScriptType.STORED)
.setScriptParams(templateParams)
@ -304,7 +308,7 @@ public class SearchTemplateIT extends OpenSearchSingleNodeTestCase {
IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> new SearchTemplateRequestBuilder(client()).setRequest(new SearchRequest("testindex").types("test"))
() -> new SearchTemplateRequestBuilder(client()).setRequest(new SearchRequest("testindex"))
.setScript("git01")
.setScriptType(ScriptType.STORED)
.setScriptParams(templateParams)
@ -320,9 +324,11 @@ public class SearchTemplateIT extends OpenSearchSingleNodeTestCase {
.setContent(new BytesArray(query.replace("{{slop}}", Integer.toString(0))), XContentType.JSON)
);
SearchTemplateResponse searchResponse = new SearchTemplateRequestBuilder(client()).setRequest(
new SearchRequest("testindex").types("test")
).setScript("git01").setScriptType(ScriptType.STORED).setScriptParams(templateParams).get();
SearchTemplateResponse searchResponse = new SearchTemplateRequestBuilder(client()).setRequest(new SearchRequest("testindex"))
.setScript("git01")
.setScriptType(ScriptType.STORED)
.setScriptParams(templateParams)
.get();
assertHitCount(searchResponse.getResponse(), 1);
}
}
@ -360,9 +366,11 @@ public class SearchTemplateIT extends OpenSearchSingleNodeTestCase {
String[] fieldParams = { "foo", "bar" };
arrayTemplateParams.put("fieldParam", fieldParams);
SearchTemplateResponse searchResponse = new SearchTemplateRequestBuilder(client()).setRequest(
new SearchRequest("test").types("type")
).setScript("4").setScriptType(ScriptType.STORED).setScriptParams(arrayTemplateParams).get();
SearchTemplateResponse searchResponse = new SearchTemplateRequestBuilder(client()).setRequest(new SearchRequest("test"))
.setScript("4")
.setScriptType(ScriptType.STORED)
.setScriptParams(arrayTemplateParams)
.get();
assertHitCount(searchResponse.getResponse(), 5);
}

View File

@ -33,7 +33,6 @@
package org.opensearch.script.mustache;
import org.opensearch.client.node.NodeClient;
import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.common.settings.Settings;
import org.opensearch.rest.BaseRestHandler;
import org.opensearch.rest.RestRequest;
@ -53,9 +52,6 @@ import static org.opensearch.rest.RestRequest.Method.GET;
import static org.opensearch.rest.RestRequest.Method.POST;
public class RestMultiSearchTemplateAction extends BaseRestHandler {
private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestMultiSearchTemplateAction.class);
static final String TYPES_DEPRECATION_MESSAGE = "[types removal]"
+ " Specifying types in multi search template requests is deprecated.";
private static final Set<String> RESPONSE_PARAMS;
@ -95,14 +91,6 @@ public class RestMultiSearchTemplateAction extends BaseRestHandler {
@Override
public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException {
MultiSearchTemplateRequest multiRequest = parseRequest(request, allowExplicitIndex);
// Emit a single deprecation message if any search template contains types.
for (SearchTemplateRequest searchTemplateRequest : multiRequest.requests()) {
if (searchTemplateRequest.getRequest().types().length > 0) {
deprecationLogger.deprecate("msearch_with_types", TYPES_DEPRECATION_MESSAGE);
break;
}
}
return channel -> client.execute(MultiSearchTemplateAction.INSTANCE, multiRequest, new RestToXContentListener<>(channel));
}

View File

@ -69,13 +69,10 @@ public class MultiSearchTemplateRequestTests extends OpenSearchTestCase {
assertThat(request.requests().get(0).getRequest().preference(), nullValue());
assertThat(request.requests().get(1).getRequest().indices()[0], equalTo("test2"));
assertThat(request.requests().get(1).getRequest().indices()[1], equalTo("test3"));
assertThat(request.requests().get(1).getRequest().types()[0], equalTo("type1"));
assertThat(request.requests().get(1).getRequest().requestCache(), nullValue());
assertThat(request.requests().get(1).getRequest().preference(), equalTo("_local"));
assertThat(request.requests().get(2).getRequest().indices()[0], equalTo("test4"));
assertThat(request.requests().get(2).getRequest().indices()[1], equalTo("test1"));
assertThat(request.requests().get(2).getRequest().types()[0], equalTo("type2"));
assertThat(request.requests().get(2).getRequest().types()[1], equalTo("type1"));
assertThat(request.requests().get(2).getRequest().routing(), equalTo("123"));
assertNotNull(request.requests().get(0).getScript());
assertNotNull(request.requests().get(1).getScript());

View File

@ -1,79 +0,0 @@
/*
* SPDX-License-Identifier: Apache-2.0
*
* The OpenSearch Contributors require contributions made to
* this file be licensed under the Apache-2.0 license or a
* compatible open source license.
*/
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Modifications Copyright OpenSearch Contributors. See
* GitHub history for details.
*/
package org.opensearch.script.mustache;
import org.opensearch.common.bytes.BytesArray;
import org.opensearch.common.settings.Settings;
import org.opensearch.common.xcontent.XContentType;
import org.opensearch.rest.RestRequest;
import org.opensearch.test.rest.FakeRestRequest;
import org.opensearch.test.rest.RestActionTestCase;
import org.junit.Before;
import java.nio.charset.StandardCharsets;
public class RestMultiSearchTemplateActionTests extends RestActionTestCase {
@Before
public void setUpAction() {
controller().registerHandler(new RestMultiSearchTemplateAction(Settings.EMPTY));
}
public void testTypeInPath() {
String content = "{ \"index\": \"some_index\" } \n" + "{\"source\": {\"query\" : {\"match_all\" :{}}}} \n";
BytesArray bytesContent = new BytesArray(content.getBytes(StandardCharsets.UTF_8));
RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.GET)
.withPath("/some_index/some_type/_msearch/template")
.withContent(bytesContent, XContentType.JSON)
.build();
// We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset.
verifyingClient.setExecuteVerifier((arg1, arg2) -> null);
dispatchRequest(request);
assertWarnings(RestMultiSearchTemplateAction.TYPES_DEPRECATION_MESSAGE);
}
public void testTypeInBody() {
String content = "{ \"index\": \"some_index\", \"type\": \"some_type\" } \n" + "{\"source\": {\"query\" : {\"match_all\" :{}}}} \n";
BytesArray bytesContent = new BytesArray(content.getBytes(StandardCharsets.UTF_8));
RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withPath("/some_index/_msearch/template")
.withContent(bytesContent, XContentType.JSON)
.build();
// We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset.
verifyingClient.setExecuteVerifier((arg1, arg2) -> null);
dispatchRequest(request);
assertWarnings(RestMultiSearchTemplateAction.TYPES_DEPRECATION_MESSAGE);
}
}

View File

@ -1,71 +0,0 @@
/*
* SPDX-License-Identifier: Apache-2.0
*
* The OpenSearch Contributors require contributions made to
* this file be licensed under the Apache-2.0 license or a
* compatible open source license.
*/
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Modifications Copyright OpenSearch Contributors. See
* GitHub history for details.
*/
package org.opensearch.script.mustache;
import org.opensearch.rest.RestRequest;
import org.opensearch.rest.action.search.RestSearchAction;
import org.opensearch.test.rest.FakeRestRequest;
import org.opensearch.test.rest.RestActionTestCase;
import org.junit.Before;
import java.util.HashMap;
import java.util.Map;
public class RestSearchTemplateActionTests extends RestActionTestCase {
@Before
public void setUpAction() {
controller().registerHandler(new RestSearchTemplateAction());
}
public void testTypeInPath() {
RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.GET)
.withPath("/some_index/some_type/_search/template")
.build();
dispatchRequest(request);
assertWarnings(RestSearchAction.TYPES_DEPRECATION_MESSAGE);
}
public void testTypeParameter() {
Map<String, String> params = new HashMap<>();
params.put("type", "some_type");
RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.GET)
.withPath("/some_index/_search/template")
.withParams(params)
.build();
dispatchRequest(request);
assertWarnings(RestSearchAction.TYPES_DEPRECATION_MESSAGE);
}
}

View File

@ -1,6 +1,6 @@
{"index":["test0", "test1"], "request_cache": true}
{"source": {"query" : {"match_{{template}}" :{}}}, "params": {"template": "all" } }
{"index" : "test2,test3", "type" : "type1", "preference": "_local"}
{"index" : "test2,test3", "preference": "_local"}
{"source": {"query" : {"match_{{template}}" :{}}}, "params": {"template": "all" } }
{"index" : ["test4", "test1"], "type" : [ "type2", "type1" ], "routing": "123"}
{"index" : ["test4", "test1"], "routing": "123"}
{"source": {"query" : {"match_{{template}}" :{}}}, "params": {"template": "all" } }

View File

@ -203,7 +203,7 @@ public class TokenCountFieldMapperIntegrationIT extends OpenSearchIntegTestCase
}
private SearchRequestBuilder prepareSearch() {
SearchRequestBuilder request = client().prepareSearch("test").setTypes("test");
SearchRequestBuilder request = client().prepareSearch("test");
request.addStoredField("foo.token_count");
request.addStoredField("foo.token_count_without_position_increments");
if (loadCountedFields) {

View File

@ -201,7 +201,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
// TEST FETCHING _parent from child
SearchResponse searchResponse;
searchResponse = client().prepareSearch("test").setQuery(idsQuery("doc").addIds("c1")).get();
searchResponse = client().prepareSearch("test").setQuery(idsQuery().addIds("c1")).get();
assertNoFailures(searchResponse);
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L));
assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("c1"));

View File

@ -290,13 +290,9 @@ public class HasChildQueryBuilderTests extends AbstractQueryTestCase<HasChildQue
}
public void testToQueryInnerQueryType() throws IOException {
String[] searchTypes = new String[] { TYPE };
QueryShardContext shardContext = createShardContext();
shardContext.setTypes(searchTypes);
HasChildQueryBuilder hasChildQueryBuilder = hasChildQuery(CHILD_DOC, new IdsQueryBuilder().addIds("id"), ScoreMode.None);
Query query = hasChildQueryBuilder.toQuery(shardContext);
// verify that the context types are still the same as the ones we previously set
assertThat(shardContext.getTypes(), equalTo(searchTypes));
assertLateParsingQuery(query, CHILD_DOC, "id");
}

View File

@ -192,13 +192,9 @@ public class HasParentQueryBuilderTests extends AbstractQueryTestCase<HasParentQ
}
public void testToQueryInnerQueryType() throws IOException {
String[] searchTypes = new String[] { TYPE };
QueryShardContext shardContext = createShardContext();
shardContext.setTypes(searchTypes);
HasParentQueryBuilder hasParentQueryBuilder = new HasParentQueryBuilder(PARENT_DOC, new IdsQueryBuilder().addIds("id"), false);
Query query = hasParentQueryBuilder.toQuery(shardContext);
// verify that the context types are still the same as the ones we previously set
assertThat(shardContext.getTypes(), equalTo(searchTypes));
HasChildQueryBuilderTests.assertLateParsingQuery(query, PARENT_DOC, "id");
}

View File

@ -79,7 +79,6 @@ final class RemoteRequestBuilders {
// It is nasty to build paths with StringBuilder but we'll be careful....
StringBuilder path = new StringBuilder("/");
addIndices(path, searchRequest.indices());
addTypes(path, searchRequest.types());
path.append("_search");
Request request = new Request("POST", path.toString());
@ -210,16 +209,6 @@ final class RemoteRequestBuilders {
}
}
private static void addTypes(StringBuilder path, String[] types) {
if (types == null || types.length == 0) {
return;
}
for (String indexOrType : types) {
checkIndexOrType("Type", indexOrType);
}
path.append(Strings.arrayToCommaDelimitedString(types)).append('/');
}
private static void checkIndexOrType(String name, String indexOrType) {
if (indexOrType.indexOf(',') >= 0) {
throw new IllegalArgumentException(name + " containing [,] not supported but got [" + indexOrType + "]");

View File

@ -251,7 +251,7 @@ public class CancelTests extends ReindexTestCase {
assertThat(response, matcher().created(modified).reasonCancelled(equalTo("by user request")));
refresh("dest");
assertHitCount(client().prepareSearch("dest").setTypes(TYPE).setSize(0).get(), modified);
assertHitCount(client().prepareSearch("dest").setSize(0).get(), modified);
}, equalTo("reindex from [" + INDEX + "] to [dest][" + TYPE + "]"));
}
@ -293,7 +293,7 @@ public class CancelTests extends ReindexTestCase {
(response, total, modified) -> {
assertThat(response, matcher().created(modified).reasonCancelled(equalTo("by user request")).slices(hasSize(5)));
refresh("dest");
assertHitCount(client().prepareSearch("dest").setTypes(TYPE).setSize(0).get(), modified);
assertHitCount(client().prepareSearch("dest").setSize(0).get(), modified);
},
equalTo("reindex from [" + INDEX + "] to [dest][" + TYPE + "]")
);

View File

@ -83,25 +83,25 @@ public class DeleteByQueryBasicTests extends ReindexTestCase {
client().prepareIndex("test", "test", "7").setSource("foo", "f")
);
assertHitCount(client().prepareSearch("test").setTypes("test").setSize(0).get(), 7);
assertHitCount(client().prepareSearch("test").setSize(0).get(), 7);
// Deletes two docs that matches "foo:a"
assertThat(deleteByQuery().source("test").filter(termQuery("foo", "a")).refresh(true).get(), matcher().deleted(2));
assertHitCount(client().prepareSearch("test").setTypes("test").setSize(0).get(), 5);
assertHitCount(client().prepareSearch("test").setSize(0).get(), 5);
// Deletes the two first docs with limit by size
DeleteByQueryRequestBuilder request = deleteByQuery().source("test").filter(QueryBuilders.matchAllQuery()).size(2).refresh(true);
request.source().addSort("foo.keyword", SortOrder.ASC);
assertThat(request.get(), matcher().deleted(2));
assertHitCount(client().prepareSearch("test").setTypes("test").setSize(0).get(), 3);
assertHitCount(client().prepareSearch("test").setSize(0).get(), 3);
// Deletes but match no docs
assertThat(deleteByQuery().source("test").filter(termQuery("foo", "no_match")).refresh(true).get(), matcher().deleted(0));
assertHitCount(client().prepareSearch("test").setTypes("test").setSize(0).get(), 3);
assertHitCount(client().prepareSearch("test").setSize(0).get(), 3);
// Deletes all remaining docs
assertThat(deleteByQuery().source("test").filter(QueryBuilders.matchAllQuery()).refresh(true).get(), matcher().deleted(3));
assertHitCount(client().prepareSearch("test").setTypes("test").setSize(0).get(), 0);
assertHitCount(client().prepareSearch("test").setSize(0).get(), 0);
}
public void testDeleteByQueryWithOneIndex() throws Exception {
@ -319,7 +319,7 @@ public class DeleteByQueryBasicTests extends ReindexTestCase {
client().prepareIndex("test", "test", "6").setSource("foo", "e"),
client().prepareIndex("test", "test", "7").setSource("foo", "f")
);
assertHitCount(client().prepareSearch("test").setTypes("test").setSize(0).get(), 7);
assertHitCount(client().prepareSearch("test").setSize(0).get(), 7);
int slices = randomSlices();
int expectedSlices = expectedSliceStatuses(slices, "test");
@ -329,14 +329,14 @@ public class DeleteByQueryBasicTests extends ReindexTestCase {
deleteByQuery().source("test").filter(termQuery("foo", "a")).refresh(true).setSlices(slices).get(),
matcher().deleted(2).slices(hasSize(expectedSlices))
);
assertHitCount(client().prepareSearch("test").setTypes("test").setSize(0).get(), 5);
assertHitCount(client().prepareSearch("test").setSize(0).get(), 5);
// Delete remaining docs
assertThat(
deleteByQuery().source("test").filter(QueryBuilders.matchAllQuery()).refresh(true).setSlices(slices).get(),
matcher().deleted(5).slices(hasSize(expectedSlices))
);
assertHitCount(client().prepareSearch("test").setTypes("test").setSize(0).get(), 0);
assertHitCount(client().prepareSearch("test").setSize(0).get(), 0);
}
public void testMultipleSources() throws Exception {
@ -369,7 +369,7 @@ public class DeleteByQueryBasicTests extends ReindexTestCase {
);
for (String index : docs.keySet()) {
assertHitCount(client().prepareSearch(index).setTypes("test").setSize(0).get(), 0);
assertHitCount(client().prepareSearch(index).setSize(0).get(), 0);
}
}

View File

@ -125,7 +125,7 @@ public class ReindexBasicTests extends ReindexTestCase {
// Use a small batch size so we have to use more than one batch
copy.source().setSize(5);
assertThat(copy.get(), matcher().created(max).batches(greaterThanOrEqualTo(max / 5)).slices(hasSize(expectedSlices)));
assertHitCount(client().prepareSearch("dest").setTypes("type").setSize(0).get(), max);
assertHitCount(client().prepareSearch("dest").setSize(0).get(), max);
// Copy some of the docs
int half = max / 2;

View File

@ -33,8 +33,6 @@
package org.opensearch.index.reindex;
import org.opensearch.common.xcontent.NamedXContentRegistry;
import org.opensearch.rest.RestRequest;
import org.opensearch.rest.action.search.RestSearchAction;
import org.opensearch.test.rest.FakeRestRequest;
import org.opensearch.test.rest.RestActionTestCase;
import org.junit.Before;
@ -52,26 +50,6 @@ public class RestDeleteByQueryActionTests extends RestActionTestCase {
controller().registerHandler(action);
}
public void testTypeInPath() throws IOException {
RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.POST)
.withPath("/some_index/some_type/_delete_by_query")
.build();
// We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset.
verifyingClient.setExecuteLocallyVerifier((arg1, arg2) -> null);
dispatchRequest(request);
// checks the type in the URL is propagated correctly to the request object
// only works after the request is dispatched, so its params are filled from url.
DeleteByQueryRequest dbqRequest = action.buildRequest(request, DEFAULT_NAMED_WRITABLE_REGISTRY);
assertArrayEquals(new String[] { "some_type" }, dbqRequest.getDocTypes());
// RestDeleteByQueryAction itself doesn't check for a deprecated type usage
// checking here for a deprecation from its internal search request
assertWarnings(RestSearchAction.TYPES_DEPRECATION_MESSAGE);
}
public void testParseEmpty() throws IOException {
final FakeRestRequest restRequest = new FakeRestRequest.Builder(new NamedXContentRegistry(emptyList())).build();
DeleteByQueryRequest request = action.buildRequest(restRequest, DEFAULT_NAMED_WRITABLE_REGISTRY);

View File

@ -44,7 +44,6 @@ import org.opensearch.test.rest.RestActionTestCase;
import org.junit.Before;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import static java.util.Collections.singletonMap;
@ -103,30 +102,6 @@ public class RestReindexActionTests extends RestActionTestCase {
}
}
/**
* test deprecation is logged if one or more types are used in source search request inside reindex
*/
public void testTypeInSource() throws IOException {
FakeRestRequest.Builder requestBuilder = new FakeRestRequest.Builder(xContentRegistry()).withMethod(Method.POST)
.withPath("/_reindex");
XContentBuilder b = JsonXContent.contentBuilder().startObject();
{
b.startObject("source");
{
b.field("type", randomFrom(Arrays.asList("\"t1\"", "[\"t1\", \"t2\"]", "\"_doc\"")));
}
b.endObject();
}
b.endObject();
requestBuilder.withContent(new BytesArray(BytesReference.bytes(b).toBytesRef()), XContentType.JSON);
// We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset.
verifyingClient.setExecuteLocallyVerifier((arg1, arg2) -> null);
dispatchRequest(requestBuilder.build());
assertWarnings(ReindexRequest.TYPES_DEPRECATION_MESSAGE);
}
/**
* test deprecation is logged if a type is used in the destination index request inside reindex
*/

View File

@ -33,8 +33,6 @@
package org.opensearch.index.reindex;
import org.opensearch.common.xcontent.NamedXContentRegistry;
import org.opensearch.rest.RestRequest;
import org.opensearch.rest.action.search.RestSearchAction;
import org.opensearch.test.rest.FakeRestRequest;
import org.opensearch.test.rest.RestActionTestCase;
import org.junit.Before;
@ -53,26 +51,6 @@ public class RestUpdateByQueryActionTests extends RestActionTestCase {
controller().registerHandler(action);
}
public void testTypeInPath() throws IOException {
RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.POST)
.withPath("/some_index/some_type/_update_by_query")
.build();
// We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset.
verifyingClient.setExecuteLocallyVerifier((arg1, arg2) -> null);
dispatchRequest(request);
// checks the type in the URL is propagated correctly to the request object
// only works after the request is dispatched, so its params are filled from url.
UpdateByQueryRequest ubqRequest = action.buildRequest(request, DEFAULT_NAMED_WRITABLE_REGISTRY);
assertArrayEquals(new String[] { "some_type" }, ubqRequest.getDocTypes());
// RestUpdateByQueryAction itself doesn't check for a deprecated type usage
// checking here for a deprecation from its internal search request
assertWarnings(RestSearchAction.TYPES_DEPRECATION_MESSAGE);
}
public void testParseEmpty() throws IOException {
final FakeRestRequest restRequest = new FakeRestRequest.Builder(new NamedXContentRegistry(emptyList())).build();
UpdateByQueryRequest request = action.buildRequest(restRequest, DEFAULT_NAMED_WRITABLE_REGISTRY);

View File

@ -55,7 +55,7 @@ public class UpdateByQueryBasicTests extends ReindexTestCase {
client().prepareIndex("test", "test", "3").setSource("foo", "b"),
client().prepareIndex("test", "test", "4").setSource("foo", "c")
);
assertHitCount(client().prepareSearch("test").setTypes("test").setSize(0).get(), 4);
assertHitCount(client().prepareSearch("test").setSize(0).get(), 4);
assertEquals(1, client().prepareGet("test", "test", "1").get().getVersion());
assertEquals(1, client().prepareGet("test", "test", "4").get().getVersion());
@ -95,7 +95,7 @@ public class UpdateByQueryBasicTests extends ReindexTestCase {
client().prepareIndex("test", "test", "3").setSource("foo", "b"),
client().prepareIndex("test", "test", "4").setSource("foo", "c")
);
assertHitCount(client().prepareSearch("test").setTypes("test").setSize(0).get(), 4);
assertHitCount(client().prepareSearch("test").setSize(0).get(), 4);
assertEquals(1, client().prepareGet("test", "test", "1").get().getVersion());
assertEquals(1, client().prepareGet("test", "test", "4").get().getVersion());

View File

@ -78,27 +78,25 @@ public class RemoteRequestBuildersTests extends OpenSearchTestCase {
SearchRequest searchRequest = new SearchRequest().source(new SearchSourceBuilder());
assertEquals("/_search", initialSearch(searchRequest, query, remoteVersion).getEndpoint());
searchRequest.indices("a");
searchRequest.types("b");
assertEquals("/a/b/_search", initialSearch(searchRequest, query, remoteVersion).getEndpoint());
assertEquals("/a/_search", initialSearch(searchRequest, query, remoteVersion).getEndpoint());
searchRequest.indices("a", "b");
searchRequest.types("c", "d");
assertEquals("/a,b/c,d/_search", initialSearch(searchRequest, query, remoteVersion).getEndpoint());
assertEquals("/a,b/_search", initialSearch(searchRequest, query, remoteVersion).getEndpoint());
searchRequest.indices("cat,");
assertEquals("/cat%2C/c,d/_search", initialSearch(searchRequest, query, remoteVersion).getEndpoint());
assertEquals("/cat%2C/_search", initialSearch(searchRequest, query, remoteVersion).getEndpoint());
searchRequest.indices("cat/");
assertEquals("/cat%2F/c,d/_search", initialSearch(searchRequest, query, remoteVersion).getEndpoint());
assertEquals("/cat%2F/_search", initialSearch(searchRequest, query, remoteVersion).getEndpoint());
searchRequest.indices("cat/", "dog");
assertEquals("/cat%2F,dog/c,d/_search", initialSearch(searchRequest, query, remoteVersion).getEndpoint());
assertEquals("/cat%2F,dog/_search", initialSearch(searchRequest, query, remoteVersion).getEndpoint());
// test a specific date math + all characters that need escaping.
searchRequest.indices("<cat{now/d}>", "<>/{}|+:,");
assertEquals(
"/%3Ccat%7Bnow%2Fd%7D%3E,%3C%3E%2F%7B%7D%7C%2B%3A%2C/c,d/_search",
"/%3Ccat%7Bnow%2Fd%7D%3E,%3C%3E%2F%7B%7D%7C%2B%3A%2C/_search",
initialSearch(searchRequest, query, remoteVersion).getEndpoint()
);
// pass-through if already escaped.
searchRequest.indices("%2f", "%3a");
assertEquals("/%2f,%3a/c,d/_search", initialSearch(searchRequest, query, remoteVersion).getEndpoint());
assertEquals("/%2f,%3a/_search", initialSearch(searchRequest, query, remoteVersion).getEndpoint());
assertWarnings(DEPRECATED_URL_ENCODED_INDEX_WARNING);
@ -107,20 +105,6 @@ public class RemoteRequestBuildersTests extends OpenSearchTestCase {
expectBadStartRequest(searchRequest, "Index", ",", "%2fcat,");
searchRequest.indices("%3ccat/");
expectBadStartRequest(searchRequest, "Index", "/", "%3ccat/");
searchRequest.indices("ok");
searchRequest.types("cat,");
expectBadStartRequest(searchRequest, "Type", ",", "cat,");
searchRequest.types("cat,", "dog");
expectBadStartRequest(searchRequest, "Type", ",", "cat,");
searchRequest.types("dog", "cat,");
expectBadStartRequest(searchRequest, "Type", ",", "cat,");
searchRequest.types("cat/");
expectBadStartRequest(searchRequest, "Type", "/", "cat/");
searchRequest.types("cat/", "dog");
expectBadStartRequest(searchRequest, "Type", "/", "cat/");
searchRequest.types("dog", "cat/");
expectBadStartRequest(searchRequest, "Type", "/", "cat/");
}
private void expectBadStartRequest(SearchRequest searchRequest, String type, String bad, String failed) {

View File

@ -99,7 +99,6 @@ public class ICUCollationKeywordFieldMapperIT extends OpenSearchIntegTestCase {
// searching for either of the terms should return both results since they collate to the same value
SearchRequest request = new SearchRequest().indices(index)
.types(type)
.source(
new SearchSourceBuilder().fetchSource(false)
.query(QueryBuilders.termQuery("collate", randomBoolean() ? equivalent[0] : equivalent[1]))
@ -143,7 +142,6 @@ public class ICUCollationKeywordFieldMapperIT extends OpenSearchIntegTestCase {
// using sort mode = max, values B and C will be used for the sort
SearchRequest request = new SearchRequest().indices(index)
.types(type)
.source(
new SearchSourceBuilder().fetchSource(false)
.query(QueryBuilders.termQuery("collate", "a"))
@ -159,7 +157,6 @@ public class ICUCollationKeywordFieldMapperIT extends OpenSearchIntegTestCase {
// same thing, using different sort mode that will use a for both docs
request = new SearchRequest().indices(index)
.types(type)
.source(
new SearchSourceBuilder().fetchSource(false)
.query(QueryBuilders.termQuery("collate", "a"))
@ -207,7 +204,6 @@ public class ICUCollationKeywordFieldMapperIT extends OpenSearchIntegTestCase {
// searching for either of the terms should return both results since they collate to the same value
SearchRequest request = new SearchRequest().indices(index)
.types(type)
.source(
new SearchSourceBuilder().fetchSource(false)
.query(QueryBuilders.termQuery("collate", randomBoolean() ? equivalent[0] : equivalent[1]))
@ -253,7 +249,6 @@ public class ICUCollationKeywordFieldMapperIT extends OpenSearchIntegTestCase {
);
SearchRequest request = new SearchRequest().indices(index)
.types(type)
.source(
new SearchSourceBuilder().fetchSource(false)
.query(QueryBuilders.termQuery("collate", randomBoolean() ? equivalent[0] : equivalent[1]))
@ -300,7 +295,6 @@ public class ICUCollationKeywordFieldMapperIT extends OpenSearchIntegTestCase {
);
SearchRequest request = new SearchRequest().indices(index)
.types(type)
.source(
new SearchSourceBuilder().fetchSource(false)
.query(QueryBuilders.termQuery("collate", randomBoolean() ? equivalent[0] : equivalent[1]))
@ -348,7 +342,6 @@ public class ICUCollationKeywordFieldMapperIT extends OpenSearchIntegTestCase {
);
SearchRequest request = new SearchRequest().indices(index)
.types(type)
.source(
new SearchSourceBuilder().fetchSource(false).sort("collate", SortOrder.ASC).sort("id", SortOrder.ASC) // secondary sort
// should kick in on
@ -391,7 +384,6 @@ public class ICUCollationKeywordFieldMapperIT extends OpenSearchIntegTestCase {
);
SearchRequest request = new SearchRequest().indices(index)
.types(type)
.source(new SearchSourceBuilder().fetchSource(false).sort("collate", SortOrder.ASC));
SearchResponse response = client().search(request).actionGet();
@ -434,7 +426,6 @@ public class ICUCollationKeywordFieldMapperIT extends OpenSearchIntegTestCase {
);
SearchRequest request = new SearchRequest().indices(index)
.types(type)
.source(new SearchSourceBuilder().fetchSource(false).sort("collate", SortOrder.ASC).sort("id", SortOrder.DESC));
SearchResponse response = client().search(request).actionGet();
@ -472,7 +463,6 @@ public class ICUCollationKeywordFieldMapperIT extends OpenSearchIntegTestCase {
);
SearchRequest request = new SearchRequest().indices(index)
.types(type)
.source(new SearchSourceBuilder().fetchSource(false).sort("collate", SortOrder.ASC));
SearchResponse response = client().search(request).actionGet();
@ -522,7 +512,6 @@ public class ICUCollationKeywordFieldMapperIT extends OpenSearchIntegTestCase {
);
SearchRequest request = new SearchRequest().indices(index)
.types(type)
.source(
new SearchSourceBuilder().fetchSource(false)
.query(QueryBuilders.termQuery("collate", randomBoolean() ? equivalent[0] : equivalent[1]))

View File

@ -27,27 +27,6 @@
"description":"A comma-separated list of indices to restrict the results"
}
}
},
{
"path":"/{index}/{type}/_count",
"methods":[
"POST",
"GET"
],
"parts":{
"index":{
"type":"list",
"description":"A comma-separated list of indices to restrict the results"
},
"type": {
"type" : "list",
"description" : "A comma-separated list of types to restrict the results"
}
},
"deprecated": {
"version" : "7.0.0",
"description" : "Specifying types in urls has been deprecated"
}
}
]
},

View File

@ -1,61 +0,0 @@
setup:
- do:
indices.create:
index: test
- do:
index:
index: test
id: 1
body: { foo: bar }
- do:
indices.refresh:
index: [test]
---
"count with body":
- do:
count:
index: test
body:
query:
match:
foo: bar
- match: {count : 1}
- do:
count:
index: test
body:
query:
match:
foo: test
- match: {count : 0}
---
"count with empty body":
# empty body should default to match_all query
- do:
count:
index: test
body: { }
- match: {count : 1}
- do:
count:
index: test
- match: {count : 1}
---
"count body without query element":
- do:
catch: bad_request
count:
index: test
body:
match:
foo: bar

View File

@ -379,14 +379,12 @@ public class TasksIT extends OpenSearchIntegTestCase {
headers.put(Task.X_OPAQUE_ID, "my_id");
headers.put("Foo-Header", "bar");
headers.put("Custom-Task-Header", "my_value");
assertSearchResponse(
client().filterWithHeader(headers).prepareSearch("test").setTypes("doc").setQuery(QueryBuilders.matchAllQuery()).get()
);
assertSearchResponse(client().filterWithHeader(headers).prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()).get());
// the search operation should produce one main task
List<TaskInfo> mainTask = findEvents(SearchAction.NAME, Tuple::v1);
assertEquals(1, mainTask.size());
assertThat(mainTask.get(0).getDescription(), startsWith("indices[test], types[doc], search_type["));
assertThat(mainTask.get(0).getDescription(), startsWith("indices[test], search_type["));
assertThat(mainTask.get(0).getDescription(), containsString("\"query\":{\"match_all\""));
assertTaskHeaders(mainTask.get(0));
@ -829,14 +827,12 @@ public class TasksIT extends OpenSearchIntegTestCase {
assertNoFailures(client().admin().indices().prepareRefresh(TaskResultsService.TASK_INDEX).get());
SearchResponse searchResponse = client().prepareSearch(TaskResultsService.TASK_INDEX)
.setTypes(TaskResultsService.TASK_TYPE)
.setSource(SearchSourceBuilder.searchSource().query(QueryBuilders.termQuery("task.action", taskInfo.getAction())))
.get();
assertEquals(1L, searchResponse.getHits().getTotalHits().value);
searchResponse = client().prepareSearch(TaskResultsService.TASK_INDEX)
.setTypes(TaskResultsService.TASK_TYPE)
.setSource(SearchSourceBuilder.searchSource().query(QueryBuilders.termQuery("task.node", taskInfo.getTaskId().getNodeId())))
.get();

View File

@ -159,11 +159,7 @@ public class BulkProcessorRetryIT extends OpenSearchIntegTestCase {
client().admin().indices().refresh(new RefreshRequest()).get();
SearchResponse results = client().prepareSearch(INDEX_NAME)
.setTypes(TYPE_NAME)
.setQuery(QueryBuilders.matchAllQuery())
.setSize(0)
.get();
SearchResponse results = client().prepareSearch(INDEX_NAME).setQuery(QueryBuilders.matchAllQuery()).setSize(0).get();
if (rejectedExecutionExpected) {
assertThat((int) results.getHits().getTotalHits().value, lessThanOrEqualTo(numberOfAsyncOps));

View File

@ -99,7 +99,7 @@ public class IndexActionIT extends OpenSearchIntegTestCase {
}
try {
logger.debug("running search with a specific type");
SearchResponse response = client().prepareSearch("test").setTypes("type").get();
SearchResponse response = client().prepareSearch("test").get();
if (response.getHits().getTotalHits().value != numOfDocs) {
final String message = "Count is "
+ response.getHits().getTotalHits().value

View File

@ -331,7 +331,7 @@ public class OpenCloseIndexIT extends OpenSearchIntegTestCase {
// check the index still contains the records that we indexed
client().admin().indices().prepareOpen("test").execute().get();
ensureGreen();
SearchResponse searchResponse = client().prepareSearch().setTypes("type").setQuery(QueryBuilders.matchQuery("test", "init")).get();
SearchResponse searchResponse = client().prepareSearch().setQuery(QueryBuilders.matchQuery("test", "init")).get();
assertNoFailures(searchResponse);
assertHitCount(searchResponse, docs);
}

View File

@ -99,7 +99,6 @@ public class BooleanTermsIT extends OpenSearchIntegTestCase {
public void testSingleValueField() throws Exception {
SearchResponse response = client().prepareSearch("idx")
.setTypes("type")
.addAggregation(terms("terms").field(SINGLE_VALUED_FIELD_NAME).collectMode(randomFrom(SubAggCollectionMode.values())))
.get();
@ -132,7 +131,6 @@ public class BooleanTermsIT extends OpenSearchIntegTestCase {
public void testMultiValueField() throws Exception {
SearchResponse response = client().prepareSearch("idx")
.setTypes("type")
.addAggregation(terms("terms").field(MULTI_VALUED_FIELD_NAME).collectMode(randomFrom(SubAggCollectionMode.values())))
.get();
@ -165,7 +163,6 @@ public class BooleanTermsIT extends OpenSearchIntegTestCase {
public void testUnmapped() throws Exception {
SearchResponse response = client().prepareSearch("idx_unmapped")
.setTypes("type")
.addAggregation(
terms("terms").field(SINGLE_VALUED_FIELD_NAME).size(between(1, 5)).collectMode(randomFrom(SubAggCollectionMode.values()))
)

View File

@ -1788,7 +1788,6 @@ public class DateHistogramIT extends OpenSearchIntegTestCase {
private void assertMultiSortResponse(int[] expectedDays, BucketOrder... order) {
ZonedDateTime[] expectedKeys = Arrays.stream(expectedDays).mapToObj(d -> date(1, d)).toArray(ZonedDateTime[]::new);
SearchResponse response = client().prepareSearch("sort_idx")
.setTypes("type")
.addAggregation(
dateHistogram("histo").field("date")
.dateHistogramInterval(DateHistogramInterval.DAY)

View File

@ -121,7 +121,6 @@ public class DiversifiedSamplerIT extends OpenSearchIntegTestCase {
// statement
boolean asc = randomBoolean();
SearchResponse response = client().prepareSearch("test")
.setTypes("book")
.setSearchType(SearchType.QUERY_THEN_FETCH)
.addAggregation(
terms("genres").field("genre")

View File

@ -938,7 +938,6 @@ public class DoubleTermsIT extends AbstractTermsTestCase {
private void assertMultiSortResponse(double[] expectedKeys, BucketOrder... order) {
SearchResponse response = client().prepareSearch("sort_idx")
.setTypes("multi_sort_type")
.addAggregation(
terms("terms").field(SINGLE_VALUED_FIELD_NAME)
.collectMode(randomFrom(SubAggCollectionMode.values()))

View File

@ -1391,7 +1391,6 @@ public class HistogramIT extends OpenSearchIntegTestCase {
private void assertMultiSortResponse(long[] expectedKeys, BucketOrder... order) {
SearchResponse response = client().prepareSearch("sort_idx")
.setTypes("type")
.addAggregation(
histogram("histo").field(SINGLE_VALUED_FIELD_NAME)
.interval(1)

View File

@ -886,7 +886,6 @@ public class LongTermsIT extends AbstractTermsTestCase {
private void assertMultiSortResponse(long[] expectedKeys, BucketOrder... order) {
SearchResponse response = client().prepareSearch("sort_idx")
.setTypes("multi_sort_type")
.addAggregation(
terms("terms").field(SINGLE_VALUED_FIELD_NAME)
.collectMode(randomFrom(SubAggCollectionMode.values()))

View File

@ -332,7 +332,6 @@ public class MinDocCountIT extends AbstractTermsTestCase {
private void testMinDocCountOnTerms(String field, Script script, BucketOrder order, String include, boolean retry) throws Exception {
// all terms
final SearchResponse allTermsResponse = client().prepareSearch("idx")
.setTypes("type")
.setSize(0)
.setQuery(QUERY)
.addAggregation(
@ -352,7 +351,6 @@ public class MinDocCountIT extends AbstractTermsTestCase {
for (long minDocCount = 0; minDocCount < 20; ++minDocCount) {
final int size = randomIntBetween(1, cardinality + 2);
final SearchRequest request = client().prepareSearch("idx")
.setTypes("type")
.setSize(0)
.setQuery(QUERY)
.addAggregation(
@ -407,7 +405,6 @@ public class MinDocCountIT extends AbstractTermsTestCase {
private void testMinDocCountOnHistogram(BucketOrder order) throws Exception {
final int interval = randomIntBetween(1, 3);
final SearchResponse allResponse = client().prepareSearch("idx")
.setTypes("type")
.setSize(0)
.setQuery(QUERY)
.addAggregation(histogram("histo").field("d").interval(interval).order(order).minDocCount(0))
@ -417,7 +414,6 @@ public class MinDocCountIT extends AbstractTermsTestCase {
for (long minDocCount = 0; minDocCount < 50; ++minDocCount) {
final SearchResponse response = client().prepareSearch("idx")
.setTypes("type")
.setSize(0)
.setQuery(QUERY)
.addAggregation(histogram("histo").field("d").interval(interval).order(order).minDocCount(minDocCount))
@ -428,7 +424,6 @@ public class MinDocCountIT extends AbstractTermsTestCase {
private void testMinDocCountOnDateHistogram(BucketOrder order) throws Exception {
final SearchResponse allResponse = client().prepareSearch("idx")
.setTypes("type")
.setSize(0)
.setQuery(QUERY)
.addAggregation(
@ -440,7 +435,6 @@ public class MinDocCountIT extends AbstractTermsTestCase {
for (long minDocCount = 0; minDocCount < 50; ++minDocCount) {
final SearchResponse response = client().prepareSearch("idx")
.setTypes("type")
.setSize(0)
.setQuery(QUERY)
.addAggregation(

View File

@ -478,7 +478,6 @@ public class NestedIT extends OpenSearchIntegTestCase {
indexRandom(true, indexRequests);
SearchResponse response = client().prepareSearch("idx2")
.setTypes("provider")
.addAggregation(
terms("startDate").field("dates.month.start")
.subAggregation(
@ -586,7 +585,6 @@ public class NestedIT extends OpenSearchIntegTestCase {
refresh();
SearchResponse response = client().prepareSearch("idx4")
.setTypes("product")
.addAggregation(
terms("category").field("categories")
.subAggregation(nested("property", "property").subAggregation(terms("property_id").field("property.id")))

View File

@ -120,7 +120,6 @@ public class SamplerIT extends OpenSearchIntegTestCase {
// statement
boolean asc = randomBoolean();
SearchResponse response = client().prepareSearch("test")
.setTypes("book")
.setSearchType(SearchType.QUERY_THEN_FETCH)
.addAggregation(
terms("genres").field("genre")

View File

@ -51,7 +51,6 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
indexData();
SearchResponse response = client().prepareSearch("idx")
.setTypes("type")
.setQuery(matchAllQuery())
.addAggregation(
terms("keys").field("key").size(3).collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.count(false))
@ -76,7 +75,6 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
indexData();
SearchResponse response = client().prepareSearch("idx")
.setTypes("type")
.setQuery(matchAllQuery())
.addAggregation(
terms("keys").field("key")
@ -106,7 +104,6 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
indexData();
SearchResponse response = client().prepareSearch("idx")
.setTypes("type")
.setQuery(matchAllQuery())
.addAggregation(
terms("keys").field("key")
@ -136,7 +133,6 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
indexData();
SearchResponse response = client().prepareSearch("idx")
.setTypes("type")
.setRouting(routing1)
.setQuery(matchAllQuery())
.addAggregation(
@ -166,7 +162,6 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
indexData();
SearchResponse response = client().prepareSearch("idx")
.setTypes("type")
.setQuery(matchAllQuery())
.addAggregation(
terms("keys").field("key").size(3).collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.key(true))
@ -191,7 +186,6 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
indexData();
SearchResponse response = client().prepareSearch("idx")
.setTypes("type")
.setQuery(matchAllQuery())
.addAggregation(
terms("keys").field("key").size(3).collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.count(false))
@ -216,7 +210,6 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
indexData();
SearchResponse response = client().prepareSearch("idx")
.setTypes("type")
.setQuery(matchAllQuery())
.addAggregation(
terms("keys").field("key")
@ -245,7 +238,6 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
indexData();
SearchResponse response = client().prepareSearch("idx")
.setTypes("type")
.setQuery(matchAllQuery())
.addAggregation(
terms("keys").field("key")
@ -275,7 +267,6 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
indexData();
SearchResponse response = client().prepareSearch("idx")
.setTypes("type")
.setRouting(routing1)
.setQuery(matchAllQuery())
.addAggregation(
@ -305,7 +296,6 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
indexData();
SearchResponse response = client().prepareSearch("idx")
.setTypes("type")
.setQuery(matchAllQuery())
.addAggregation(
terms("keys").field("key").size(3).collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.key(true))
@ -330,7 +320,6 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
indexData();
SearchResponse response = client().prepareSearch("idx")
.setTypes("type")
.setQuery(matchAllQuery())
.addAggregation(
terms("keys").field("key").size(3).collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.count(false))
@ -355,7 +344,6 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
indexData();
SearchResponse response = client().prepareSearch("idx")
.setTypes("type")
.setQuery(matchAllQuery())
.addAggregation(
terms("keys").field("key")
@ -384,7 +372,6 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
indexData();
SearchResponse response = client().prepareSearch("idx")
.setTypes("type")
.setQuery(matchAllQuery())
.addAggregation(
terms("keys").field("key")
@ -413,7 +400,6 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
indexData();
SearchResponse response = client().prepareSearch("idx")
.setTypes("type")
.setRouting(routing1)
.setQuery(matchAllQuery())
.addAggregation(
@ -443,7 +429,6 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
indexData();
SearchResponse response = client().prepareSearch("idx")
.setTypes("type")
.setQuery(matchAllQuery())
.addAggregation(
terms("keys").field("key").size(3).collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.key(true))

View File

@ -148,11 +148,9 @@ public class SignificantTermsSignificanceScoreIT extends OpenSearchIntegTestCase
// Use significant_text on text fields but occasionally run with alternative of
// significant_terms on legacy fieldData=true too.
request = client().prepareSearch(INDEX_NAME)
.setTypes(DOC_TYPE)
.addAggregation(terms("class").field(CLASS_FIELD).subAggregation(significantText("sig_terms", TEXT_FIELD)));
} else {
request = client().prepareSearch(INDEX_NAME)
.setTypes(DOC_TYPE)
.addAggregation(terms("class").field(CLASS_FIELD).subAggregation(significantTerms("sig_terms").field(TEXT_FIELD)));
}
@ -245,13 +243,11 @@ public class SignificantTermsSignificanceScoreIT extends OpenSearchIntegTestCase
SearchRequestBuilder request;
if (randomBoolean()) {
request = client().prepareSearch(INDEX_NAME)
.setTypes(DOC_TYPE)
.addAggregation(
terms("class").field(CLASS_FIELD).subAggregation(significantTerms("sig_terms").field(TEXT_FIELD).minDocCount(1))
);
} else {
request = client().prepareSearch(INDEX_NAME)
.setTypes(DOC_TYPE)
.addAggregation(terms("class").field(CLASS_FIELD).subAggregation(significantText("sig_terms", TEXT_FIELD).minDocCount(1)));
}
@ -282,7 +278,6 @@ public class SignificantTermsSignificanceScoreIT extends OpenSearchIntegTestCase
SearchRequestBuilder request1;
if (useSigText) {
request1 = client().prepareSearch(INDEX_NAME)
.setTypes(DOC_TYPE)
.addAggregation(
terms("class").field(CLASS_FIELD)
.subAggregation(
@ -292,7 +287,6 @@ public class SignificantTermsSignificanceScoreIT extends OpenSearchIntegTestCase
);
} else {
request1 = client().prepareSearch(INDEX_NAME)
.setTypes(DOC_TYPE)
.addAggregation(
terms("class").field(CLASS_FIELD)
.subAggregation(
@ -309,7 +303,6 @@ public class SignificantTermsSignificanceScoreIT extends OpenSearchIntegTestCase
SearchRequestBuilder request2;
if (useSigText) {
request2 = client().prepareSearch(INDEX_NAME)
.setTypes(DOC_TYPE)
.addAggregation(
filter("0", QueryBuilders.termQuery(CLASS_FIELD, "0")).subAggregation(
significantText("sig_terms", TEXT_FIELD).minDocCount(1)
@ -326,7 +319,6 @@ public class SignificantTermsSignificanceScoreIT extends OpenSearchIntegTestCase
);
} else {
request2 = client().prepareSearch(INDEX_NAME)
.setTypes(DOC_TYPE)
.addAggregation(
filter("0", QueryBuilders.termQuery(CLASS_FIELD, "0")).subAggregation(
significantTerms("sig_terms").field(TEXT_FIELD)

View File

@ -303,7 +303,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
int size = randomIntBetween(1, 20);
int shardSize = randomIntBetween(size, size * 2);
SearchResponse accurateResponse = client().prepareSearch("idx")
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint())
.field(STRING_FIELD_NAME)
@ -317,7 +316,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
assertSearchResponse(accurateResponse);
SearchResponse testResponse = client().prepareSearch("idx")
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint())
.field(STRING_FIELD_NAME)
@ -337,7 +335,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
int size = randomIntBetween(1, 20);
int shardSize = randomIntBetween(size, size * 2);
SearchResponse accurateResponse = client().prepareSearch("idx_single_shard")
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint())
.field(STRING_FIELD_NAME)
@ -351,7 +348,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
assertSearchResponse(accurateResponse);
SearchResponse testResponse = client().prepareSearch("idx_single_shard")
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint())
.field(STRING_FIELD_NAME)
@ -372,7 +368,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
int shardSize = randomIntBetween(size, size * 2);
SearchResponse testResponse = client().prepareSearch("idx_with_routing")
.setTypes("type")
.setRouting(String.valueOf(between(1, numRoutingValues)))
.addAggregation(
terms("terms").executionHint(randomExecutionHint())
@ -393,7 +388,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
int size = randomIntBetween(1, 20);
int shardSize = randomIntBetween(size, size * 2);
SearchResponse accurateResponse = client().prepareSearch("idx_single_shard")
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint())
.field(STRING_FIELD_NAME)
@ -408,7 +402,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
assertSearchResponse(accurateResponse);
SearchResponse testResponse = client().prepareSearch("idx_single_shard")
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint())
.field(STRING_FIELD_NAME)
@ -429,7 +422,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
int size = randomIntBetween(1, 20);
int shardSize = randomIntBetween(size, size * 2);
SearchResponse accurateResponse = client().prepareSearch("idx_single_shard")
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint())
.field(STRING_FIELD_NAME)
@ -444,7 +436,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
assertSearchResponse(accurateResponse);
SearchResponse testResponse = client().prepareSearch("idx_single_shard")
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint())
.field(STRING_FIELD_NAME)
@ -465,7 +456,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
int size = randomIntBetween(1, 20);
int shardSize = randomIntBetween(size, size * 2);
SearchResponse accurateResponse = client().prepareSearch("idx_single_shard")
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint())
.field(STRING_FIELD_NAME)
@ -480,7 +470,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
assertSearchResponse(accurateResponse);
SearchResponse testResponse = client().prepareSearch("idx_single_shard")
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint())
.field(STRING_FIELD_NAME)
@ -501,7 +490,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
int size = randomIntBetween(1, 20);
int shardSize = randomIntBetween(size, size * 2);
SearchResponse accurateResponse = client().prepareSearch("idx_single_shard")
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint())
.field(STRING_FIELD_NAME)
@ -517,7 +505,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
assertSearchResponse(accurateResponse);
SearchResponse testResponse = client().prepareSearch("idx_single_shard")
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint())
.field(STRING_FIELD_NAME)
@ -539,7 +526,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
int size = randomIntBetween(1, 20);
int shardSize = randomIntBetween(size, size * 2);
SearchResponse accurateResponse = client().prepareSearch("idx_single_shard")
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint())
.field(STRING_FIELD_NAME)
@ -555,7 +541,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
assertSearchResponse(accurateResponse);
SearchResponse testResponse = client().prepareSearch("idx_single_shard")
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint())
.field(STRING_FIELD_NAME)
@ -577,7 +562,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
int size = randomIntBetween(1, 20);
int shardSize = randomIntBetween(size, size * 2);
SearchResponse accurateResponse = client().prepareSearch("idx")
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint())
.field(LONG_FIELD_NAME)
@ -591,7 +575,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
assertSearchResponse(accurateResponse);
SearchResponse testResponse = client().prepareSearch("idx")
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint())
.field(LONG_FIELD_NAME)
@ -611,7 +594,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
int size = randomIntBetween(1, 20);
int shardSize = randomIntBetween(size, size * 2);
SearchResponse accurateResponse = client().prepareSearch("idx_single_shard")
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint())
.field(LONG_FIELD_NAME)
@ -625,7 +607,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
assertSearchResponse(accurateResponse);
SearchResponse testResponse = client().prepareSearch("idx_single_shard")
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint())
.field(LONG_FIELD_NAME)
@ -646,7 +627,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
int shardSize = randomIntBetween(size, size * 2);
SearchResponse testResponse = client().prepareSearch("idx_with_routing")
.setTypes("type")
.setRouting(String.valueOf(between(1, numRoutingValues)))
.addAggregation(
terms("terms").executionHint(randomExecutionHint())
@ -667,7 +647,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
int size = randomIntBetween(1, 20);
int shardSize = randomIntBetween(size, size * 2);
SearchResponse accurateResponse = client().prepareSearch("idx_single_shard")
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint())
.field(LONG_FIELD_NAME)
@ -682,7 +661,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
assertSearchResponse(accurateResponse);
SearchResponse testResponse = client().prepareSearch("idx_single_shard")
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint())
.field(LONG_FIELD_NAME)
@ -703,7 +681,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
int size = randomIntBetween(1, 20);
int shardSize = randomIntBetween(size, size * 2);
SearchResponse accurateResponse = client().prepareSearch("idx_single_shard")
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint())
.field(LONG_FIELD_NAME)
@ -718,7 +695,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
assertSearchResponse(accurateResponse);
SearchResponse testResponse = client().prepareSearch("idx_single_shard")
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint())
.field(LONG_FIELD_NAME)
@ -739,7 +715,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
int size = randomIntBetween(1, 20);
int shardSize = randomIntBetween(size, size * 2);
SearchResponse accurateResponse = client().prepareSearch("idx_single_shard")
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint())
.field(LONG_FIELD_NAME)
@ -754,7 +729,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
assertSearchResponse(accurateResponse);
SearchResponse testResponse = client().prepareSearch("idx_single_shard")
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint())
.field(LONG_FIELD_NAME)
@ -775,7 +749,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
int size = randomIntBetween(1, 20);
int shardSize = randomIntBetween(size, size * 2);
SearchResponse accurateResponse = client().prepareSearch("idx_single_shard")
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint())
.field(LONG_FIELD_NAME)
@ -791,7 +764,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
assertSearchResponse(accurateResponse);
SearchResponse testResponse = client().prepareSearch("idx_single_shard")
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint())
.field(LONG_FIELD_NAME)
@ -813,7 +785,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
int size = randomIntBetween(1, 20);
int shardSize = randomIntBetween(size, size * 2);
SearchResponse accurateResponse = client().prepareSearch("idx_single_shard")
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint())
.field(LONG_FIELD_NAME)
@ -829,7 +800,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
assertSearchResponse(accurateResponse);
SearchResponse testResponse = client().prepareSearch("idx_single_shard")
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint())
.field(LONG_FIELD_NAME)
@ -851,7 +821,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
int size = randomIntBetween(1, 20);
int shardSize = randomIntBetween(size, size * 2);
SearchResponse accurateResponse = client().prepareSearch("idx")
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint())
.field(DOUBLE_FIELD_NAME)
@ -865,7 +834,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
assertSearchResponse(accurateResponse);
SearchResponse testResponse = client().prepareSearch("idx")
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint())
.field(DOUBLE_FIELD_NAME)
@ -885,7 +853,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
int size = randomIntBetween(1, 20);
int shardSize = randomIntBetween(size, size * 2);
SearchResponse accurateResponse = client().prepareSearch("idx_single_shard")
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint())
.field(DOUBLE_FIELD_NAME)
@ -899,7 +866,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
assertSearchResponse(accurateResponse);
SearchResponse testResponse = client().prepareSearch("idx_single_shard")
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint())
.field(DOUBLE_FIELD_NAME)
@ -920,7 +886,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
int shardSize = randomIntBetween(size, size * 2);
SearchResponse testResponse = client().prepareSearch("idx_with_routing")
.setTypes("type")
.setRouting(String.valueOf(between(1, numRoutingValues)))
.addAggregation(
terms("terms").executionHint(randomExecutionHint())
@ -941,7 +906,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
int size = randomIntBetween(1, 20);
int shardSize = randomIntBetween(size, size * 2);
SearchResponse accurateResponse = client().prepareSearch("idx_single_shard")
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint())
.field(DOUBLE_FIELD_NAME)
@ -956,7 +920,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
assertSearchResponse(accurateResponse);
SearchResponse testResponse = client().prepareSearch("idx_single_shard")
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint())
.field(DOUBLE_FIELD_NAME)
@ -977,7 +940,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
int size = randomIntBetween(1, 20);
int shardSize = randomIntBetween(size, size * 2);
SearchResponse accurateResponse = client().prepareSearch("idx_single_shard")
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint())
.field(DOUBLE_FIELD_NAME)
@ -992,7 +954,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
assertSearchResponse(accurateResponse);
SearchResponse testResponse = client().prepareSearch("idx_single_shard")
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint())
.field(DOUBLE_FIELD_NAME)
@ -1013,7 +974,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
int size = randomIntBetween(1, 20);
int shardSize = randomIntBetween(size, size * 2);
SearchResponse accurateResponse = client().prepareSearch("idx_single_shard")
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint())
.field(DOUBLE_FIELD_NAME)
@ -1028,7 +988,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
assertSearchResponse(accurateResponse);
SearchResponse testResponse = client().prepareSearch("idx_single_shard")
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint())
.field(DOUBLE_FIELD_NAME)
@ -1049,7 +1008,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
int size = randomIntBetween(1, 20);
int shardSize = randomIntBetween(size, size * 2);
SearchResponse accurateResponse = client().prepareSearch("idx_single_shard")
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint())
.field(DOUBLE_FIELD_NAME)
@ -1065,7 +1023,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
assertSearchResponse(accurateResponse);
SearchResponse testResponse = client().prepareSearch("idx_single_shard")
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint())
.field(DOUBLE_FIELD_NAME)
@ -1087,7 +1044,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
int size = randomIntBetween(1, 20);
int shardSize = randomIntBetween(size, size * 2);
SearchResponse accurateResponse = client().prepareSearch("idx_single_shard")
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint())
.field(DOUBLE_FIELD_NAME)
@ -1103,7 +1059,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
assertSearchResponse(accurateResponse);
SearchResponse testResponse = client().prepareSearch("idx_single_shard")
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint())
.field(DOUBLE_FIELD_NAME)
@ -1128,7 +1083,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
*/
public void testFixedDocs() throws Exception {
SearchResponse response = client().prepareSearch("idx_fixed_docs_0", "idx_fixed_docs_1", "idx_fixed_docs_2")
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint())
.field(STRING_FIELD_NAME)

View File

@ -368,7 +368,6 @@ public class StringTermsIT extends AbstractTermsTestCase {
private void runTestFieldWithPartitionedFiltering(String field) throws Exception {
// Find total number of unique terms
SearchResponse allResponse = client().prepareSearch("idx")
.setTypes("type")
.addAggregation(terms("terms").field(field).size(10000).collectMode(randomFrom(SubAggCollectionMode.values())))
.get();
assertSearchResponse(allResponse);
@ -382,7 +381,6 @@ public class StringTermsIT extends AbstractTermsTestCase {
Set<String> foundTerms = new HashSet<>();
for (int partition = 0; partition < numPartitions; partition++) {
SearchResponse response = client().prepareSearch("idx")
.setTypes("type")
.addAggregation(
terms("terms").field(field)
.includeExclude(new IncludeExclude(partition, numPartitions))
@ -402,7 +400,6 @@ public class StringTermsIT extends AbstractTermsTestCase {
public void testSingleValuedFieldWithValueScript() throws Exception {
SearchResponse response = client().prepareSearch("idx")
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint())
.field(SINGLE_VALUED_FIELD_NAME)
@ -428,7 +425,6 @@ public class StringTermsIT extends AbstractTermsTestCase {
public void testMultiValuedFieldWithValueScriptNotUnique() throws Exception {
SearchResponse response = client().prepareSearch("idx")
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint())
.field(MULTI_VALUED_FIELD_NAME)
@ -452,7 +448,6 @@ public class StringTermsIT extends AbstractTermsTestCase {
public void testMultiValuedScript() throws Exception {
SearchResponse response = client().prepareSearch("idx")
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint())
.script(
@ -488,7 +483,6 @@ public class StringTermsIT extends AbstractTermsTestCase {
public void testMultiValuedFieldWithValueScript() throws Exception {
SearchResponse response = client().prepareSearch("idx")
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint())
.field(MULTI_VALUED_FIELD_NAME)
@ -537,7 +531,6 @@ public class StringTermsIT extends AbstractTermsTestCase {
);
SearchResponse response = client().prepareSearch("idx")
.setTypes("type")
.addAggregation(
terms("terms").collectMode(randomFrom(SubAggCollectionMode.values())).executionHint(randomExecutionHint()).script(script)
)
@ -567,7 +560,6 @@ public class StringTermsIT extends AbstractTermsTestCase {
);
SearchResponse response = client().prepareSearch("idx")
.setTypes("type")
.addAggregation(
terms("terms").collectMode(randomFrom(SubAggCollectionMode.values())).executionHint(randomExecutionHint()).script(script)
)
@ -590,7 +582,6 @@ public class StringTermsIT extends AbstractTermsTestCase {
public void testScriptMultiValued() throws Exception {
SearchResponse response = client().prepareSearch("idx")
.setTypes("type")
.addAggregation(
terms("terms").collectMode(randomFrom(SubAggCollectionMode.values()))
.executionHint(randomExecutionHint())
@ -626,7 +617,6 @@ public class StringTermsIT extends AbstractTermsTestCase {
public void testPartiallyUnmapped() throws Exception {
SearchResponse response = client().prepareSearch("idx", "idx_unmapped")
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint())
.field(SINGLE_VALUED_FIELD_NAME)
@ -652,7 +642,6 @@ public class StringTermsIT extends AbstractTermsTestCase {
public void testStringTermsNestedIntoPerBucketAggregator() throws Exception {
// no execution hint so that the logic that decides whether or not to use ordinals is executed
SearchResponse response = client().prepareSearch("idx")
.setTypes("type")
.addAggregation(
filter("filter", termQuery(MULTI_VALUED_FIELD_NAME, "val3")).subAggregation(
terms("terms").field(MULTI_VALUED_FIELD_NAME).collectMode(randomFrom(SubAggCollectionMode.values()))
@ -681,7 +670,6 @@ public class StringTermsIT extends AbstractTermsTestCase {
boolean asc = true;
try {
client().prepareSearch("idx")
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint())
.field(SINGLE_VALUED_FIELD_NAME)
@ -710,7 +698,6 @@ public class StringTermsIT extends AbstractTermsTestCase {
public void testSingleValuedFieldOrderedBySingleBucketSubAggregationAsc() throws Exception {
boolean asc = randomBoolean();
SearchResponse response = client().prepareSearch("idx")
.setTypes("type")
.addAggregation(
terms("tags").executionHint(randomExecutionHint())
.field("tag")
@ -749,7 +736,6 @@ public class StringTermsIT extends AbstractTermsTestCase {
public void testSingleValuedFieldOrderedBySubAggregationAscMultiHierarchyLevels() throws Exception {
boolean asc = randomBoolean();
SearchResponse response = client().prepareSearch("idx")
.setTypes("type")
.addAggregation(
terms("tags").executionHint(randomExecutionHint())
.field("tag")
@ -813,7 +799,6 @@ public class StringTermsIT extends AbstractTermsTestCase {
String statsName = statsNameBuilder.toString();
boolean asc = randomBoolean();
SearchResponse response = client().prepareSearch("idx")
.setTypes("type")
.addAggregation(
terms("tags").executionHint(randomExecutionHint())
.field("tag")
@ -877,7 +862,6 @@ public class StringTermsIT extends AbstractTermsTestCase {
String statsName = statsNameBuilder.toString();
boolean asc = randomBoolean();
SearchResponse response = client().prepareSearch("idx")
.setTypes("type")
.addAggregation(
terms("tags").executionHint(randomExecutionHint())
.field("tag")
@ -936,7 +920,6 @@ public class StringTermsIT extends AbstractTermsTestCase {
for (String index : Arrays.asList("idx", "idx_unmapped")) {
try {
client().prepareSearch(index)
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint())
.field(SINGLE_VALUED_FIELD_NAME)
@ -957,7 +940,6 @@ public class StringTermsIT extends AbstractTermsTestCase {
for (String index : Arrays.asList("idx", "idx_unmapped")) {
try {
client().prepareSearch(index)
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint())
.field(SINGLE_VALUED_FIELD_NAME)
@ -982,7 +964,6 @@ public class StringTermsIT extends AbstractTermsTestCase {
for (String index : Arrays.asList("idx", "idx_unmapped")) {
try {
SearchResponse response = client().prepareSearch(index)
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint())
.field(SINGLE_VALUED_FIELD_NAME)
@ -1008,7 +989,6 @@ public class StringTermsIT extends AbstractTermsTestCase {
for (String index : Arrays.asList("idx", "idx_unmapped")) {
try {
client().prepareSearch(index)
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint())
.field(SINGLE_VALUED_FIELD_NAME)
@ -1033,7 +1013,6 @@ public class StringTermsIT extends AbstractTermsTestCase {
public void testSingleValuedFieldOrderedByMultiValueSubAggregationAsc() throws Exception {
boolean asc = true;
SearchResponse response = client().prepareSearch("idx")
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint())
.field(SINGLE_VALUED_FIELD_NAME)
@ -1066,7 +1045,6 @@ public class StringTermsIT extends AbstractTermsTestCase {
public void testSingleValuedFieldOrderedByMultiValueSubAggregationDesc() throws Exception {
boolean asc = false;
SearchResponse response = client().prepareSearch("idx")
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint())
.field(SINGLE_VALUED_FIELD_NAME)
@ -1100,7 +1078,6 @@ public class StringTermsIT extends AbstractTermsTestCase {
public void testSingleValuedFieldOrderedByMultiValueExtendedStatsAsc() throws Exception {
boolean asc = true;
SearchResponse response = client().prepareSearch("idx")
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint())
.field(SINGLE_VALUED_FIELD_NAME)
@ -1134,7 +1111,6 @@ public class StringTermsIT extends AbstractTermsTestCase {
public void testSingleValuedFieldOrderedByStatsAggAscWithTermsSubAgg() throws Exception {
boolean asc = true;
SearchResponse response = client().prepareSearch("idx")
.setTypes("type")
.addAggregation(
terms("terms").executionHint(randomExecutionHint())
.field(SINGLE_VALUED_FIELD_NAME)
@ -1253,7 +1229,6 @@ public class StringTermsIT extends AbstractTermsTestCase {
public void testIndexMetaField() throws Exception {
SearchResponse response = client().prepareSearch("idx", "empty_bucket_idx")
.setTypes("type")
.addAggregation(
terms("terms").collectMode(randomFrom(SubAggCollectionMode.values()))
.executionHint(randomExecutionHint())

View File

@ -204,7 +204,6 @@ public class CardinalityIT extends OpenSearchIntegTestCase {
public void testUnmapped() throws Exception {
SearchResponse response = client().prepareSearch("idx_unmapped")
.setTypes("type")
.addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field("str_value"))
.get();
@ -218,7 +217,6 @@ public class CardinalityIT extends OpenSearchIntegTestCase {
public void testPartiallyUnmapped() throws Exception {
SearchResponse response = client().prepareSearch("idx", "idx_unmapped")
.setTypes("type")
.addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field("str_value"))
.get();
@ -232,7 +230,6 @@ public class CardinalityIT extends OpenSearchIntegTestCase {
public void testSingleValuedString() throws Exception {
SearchResponse response = client().prepareSearch("idx")
.setTypes("type")
.addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field("str_value"))
.get();
@ -246,7 +243,6 @@ public class CardinalityIT extends OpenSearchIntegTestCase {
public void testSingleValuedNumeric() throws Exception {
SearchResponse response = client().prepareSearch("idx")
.setTypes("type")
.addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field(singleNumericField()))
.get();
@ -289,7 +285,6 @@ public class CardinalityIT extends OpenSearchIntegTestCase {
public void testSingleValuedNumericHashed() throws Exception {
SearchResponse response = client().prepareSearch("idx")
.setTypes("type")
.addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field(singleNumericField()))
.get();
@ -303,7 +298,6 @@ public class CardinalityIT extends OpenSearchIntegTestCase {
public void testMultiValuedString() throws Exception {
SearchResponse response = client().prepareSearch("idx")
.setTypes("type")
.addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field("str_values"))
.get();
@ -317,7 +311,6 @@ public class CardinalityIT extends OpenSearchIntegTestCase {
public void testMultiValuedNumeric() throws Exception {
SearchResponse response = client().prepareSearch("idx")
.setTypes("type")
.addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field(multiNumericField(false)))
.get();
@ -331,7 +324,6 @@ public class CardinalityIT extends OpenSearchIntegTestCase {
public void testMultiValuedNumericHashed() throws Exception {
SearchResponse response = client().prepareSearch("idx")
.setTypes("type")
.addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field(multiNumericField(true)))
.get();
@ -345,7 +337,6 @@ public class CardinalityIT extends OpenSearchIntegTestCase {
public void testSingleValuedStringScript() throws Exception {
SearchResponse response = client().prepareSearch("idx")
.setTypes("type")
.addAggregation(
cardinality("cardinality").precisionThreshold(precisionThreshold)
.script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['str_value'].value", emptyMap()))
@ -362,7 +353,6 @@ public class CardinalityIT extends OpenSearchIntegTestCase {
public void testMultiValuedStringScript() throws Exception {
SearchResponse response = client().prepareSearch("idx")
.setTypes("type")
.addAggregation(
cardinality("cardinality").precisionThreshold(precisionThreshold)
.script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['str_values']", emptyMap()))
@ -380,7 +370,6 @@ public class CardinalityIT extends OpenSearchIntegTestCase {
public void testSingleValuedNumericScript() throws Exception {
Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc[' + singleNumericField() + '].value", emptyMap());
SearchResponse response = client().prepareSearch("idx")
.setTypes("type")
.addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).script(script))
.get();
@ -400,7 +389,6 @@ public class CardinalityIT extends OpenSearchIntegTestCase {
Collections.emptyMap()
);
SearchResponse response = client().prepareSearch("idx")
.setTypes("type")
.addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).script(script))
.get();
@ -414,7 +402,6 @@ public class CardinalityIT extends OpenSearchIntegTestCase {
public void testSingleValuedStringValueScript() throws Exception {
SearchResponse response = client().prepareSearch("idx")
.setTypes("type")
.addAggregation(
cardinality("cardinality").precisionThreshold(precisionThreshold)
.field("str_value")
@ -432,7 +419,6 @@ public class CardinalityIT extends OpenSearchIntegTestCase {
public void testMultiValuedStringValueScript() throws Exception {
SearchResponse response = client().prepareSearch("idx")
.setTypes("type")
.addAggregation(
cardinality("cardinality").precisionThreshold(precisionThreshold)
.field("str_values")
@ -450,7 +436,6 @@ public class CardinalityIT extends OpenSearchIntegTestCase {
public void testSingleValuedNumericValueScript() throws Exception {
SearchResponse response = client().prepareSearch("idx")
.setTypes("type")
.addAggregation(
cardinality("cardinality").precisionThreshold(precisionThreshold)
.field(singleNumericField())
@ -468,7 +453,6 @@ public class CardinalityIT extends OpenSearchIntegTestCase {
public void testMultiValuedNumericValueScript() throws Exception {
SearchResponse response = client().prepareSearch("idx")
.setTypes("type")
.addAggregation(
cardinality("cardinality").precisionThreshold(precisionThreshold)
.field(multiNumericField(false))
@ -486,7 +470,6 @@ public class CardinalityIT extends OpenSearchIntegTestCase {
public void testAsSubAgg() throws Exception {
SearchResponse response = client().prepareSearch("idx")
.setTypes("type")
.addAggregation(
terms("terms").field("str_value")
.collectMode(randomFrom(SubAggCollectionMode.values()))

View File

@ -241,7 +241,6 @@ public class SerialDiffIT extends OpenSearchIntegTestCase {
public void testBasicDiff() {
SearchResponse response = client().prepareSearch("idx")
.setTypes("type")
.addAggregation(
histogram("histo").field(INTERVAL_FIELD)
.interval(interval)
@ -286,7 +285,6 @@ public class SerialDiffIT extends OpenSearchIntegTestCase {
public void testInvalidLagSize() {
try {
client().prepareSearch("idx")
.setTypes("type")
.addAggregation(
histogram("histo").field(INTERVAL_FIELD)
.interval(interval)

View File

@ -192,7 +192,6 @@ public class SearchWithRandomIOExceptionsIT extends OpenSearchIntegTestCase {
int expectedResults = added[docToQuery] ? 1 : 0;
logger.info("Searching for [test:{}]", English.intToEnglish(docToQuery));
SearchResponse searchResponse = client().prepareSearch()
.setTypes("type")
.setQuery(QueryBuilders.matchQuery("test", English.intToEnglish(docToQuery)))
.setSize(expectedResults)
.get();
@ -202,7 +201,6 @@ public class SearchWithRandomIOExceptionsIT extends OpenSearchIntegTestCase {
}
// check match all
searchResponse = client().prepareSearch()
.setTypes("type")
.setQuery(QueryBuilders.matchAllQuery())
.setSize(numCreated + numInitialDocs)
.addSort("_uid", SortOrder.ASC)
@ -239,10 +237,7 @@ public class SearchWithRandomIOExceptionsIT extends OpenSearchIntegTestCase {
);
client().admin().indices().prepareOpen("test").execute().get();
ensureGreen();
SearchResponse searchResponse = client().prepareSearch()
.setTypes("type")
.setQuery(QueryBuilders.matchQuery("test", "init"))
.get();
SearchResponse searchResponse = client().prepareSearch().setQuery(QueryBuilders.matchQuery("test", "init")).get();
assertNoFailures(searchResponse);
assertHitCount(searchResponse, numInitialDocs);
}

View File

@ -71,7 +71,6 @@ public class CustomHighlighterSearchIT extends OpenSearchIntegTestCase {
public void testThatCustomHighlightersAreSupported() throws IOException {
SearchResponse searchResponse = client().prepareSearch("test")
.setTypes("test")
.setQuery(QueryBuilders.matchAllQuery())
.highlighter(new HighlightBuilder().field("name").highlighterType("test-custom"))
.get();
@ -86,7 +85,6 @@ public class CustomHighlighterSearchIT extends OpenSearchIntegTestCase {
highlightConfig.options(options);
SearchResponse searchResponse = client().prepareSearch("test")
.setTypes("test")
.setQuery(QueryBuilders.matchAllQuery())
.highlighter(new HighlightBuilder().field(highlightConfig))
.get();
@ -100,7 +98,6 @@ public class CustomHighlighterSearchIT extends OpenSearchIntegTestCase {
options.put("myGlobalOption", "someValue");
SearchResponse searchResponse = client().prepareSearch("test")
.setTypes("test")
.setQuery(QueryBuilders.matchAllQuery())
.highlighter(new HighlightBuilder().field("name").highlighterType("test-custom").options(options))
.get();
@ -111,7 +108,6 @@ public class CustomHighlighterSearchIT extends OpenSearchIntegTestCase {
public void testThatCustomHighlighterReceivesFieldsInOrder() throws Exception {
SearchResponse searchResponse = client().prepareSearch("test")
.setTypes("test")
.setQuery(QueryBuilders.boolQuery().must(QueryBuilders.matchAllQuery()).should(QueryBuilders.termQuery("name", "arbitrary")))
.highlighter(
new HighlightBuilder().highlighterType("test-custom")

View File

@ -2171,7 +2171,7 @@ public class HighlighterSearchIT extends OpenSearchIntegTestCase {
index("test", "type1", "2", "text", new String[] { "", text2 });
refresh();
IdsQueryBuilder idsQueryBuilder = QueryBuilders.idsQuery("type1").addIds("2");
IdsQueryBuilder idsQueryBuilder = QueryBuilders.idsQuery().addIds("2");
field.highlighterType("plain");
response = client().prepareSearch("test").setQuery(idsQueryBuilder).highlighter(new HighlightBuilder().field(field)).get();
assertNotHighlighted(response, 0, "text");
@ -2188,7 +2188,7 @@ public class HighlighterSearchIT extends OpenSearchIntegTestCase {
// But if the field was actually empty then you should get no highlighting field
index("test", "type1", "3", "text", new String[] {});
refresh();
idsQueryBuilder = QueryBuilders.idsQuery("type1").addIds("3");
idsQueryBuilder = QueryBuilders.idsQuery().addIds("3");
field.highlighterType("plain");
response = client().prepareSearch("test").setQuery(idsQueryBuilder).highlighter(new HighlightBuilder().field(field)).get();
assertNotHighlighted(response, 0, "text");
@ -2205,7 +2205,7 @@ public class HighlighterSearchIT extends OpenSearchIntegTestCase {
index("test", "type1", "4");
refresh();
idsQueryBuilder = QueryBuilders.idsQuery("type1").addIds("4");
idsQueryBuilder = QueryBuilders.idsQuery().addIds("4");
field.highlighterType("plain");
response = client().prepareSearch("test").setQuery(idsQueryBuilder).highlighter(new HighlightBuilder().field(field)).get();
assertNotHighlighted(response, 0, "text");
@ -3042,7 +3042,6 @@ public class HighlighterSearchIT extends OpenSearchIntegTestCase {
for (String highlighter : ALL_TYPES) {
SearchResponse response = client().prepareSearch("test")
.setTypes("typename")
.setQuery(matchQuery("foo", "test"))
.highlighter(new HighlightBuilder().field("foo").highlighterType(highlighter).requireFieldMatch(false))
.get();
@ -3071,7 +3070,6 @@ public class HighlighterSearchIT extends OpenSearchIntegTestCase {
for (String highlighter : ALL_TYPES) {
SearchResponse response = client().prepareSearch("filtered_alias")
.setTypes("typename")
.setQuery(matchQuery("foo", "test"))
.highlighter(new HighlightBuilder().field("foo").highlighterType(highlighter).requireFieldMatch(false))
.get();

View File

@ -737,11 +737,7 @@ public class SearchFieldsIT extends OpenSearchIntegTestCase {
.setRefreshPolicy(IMMEDIATE)
.get();
SearchResponse searchResponse = client().prepareSearch("my-index")
.setTypes("my-type1")
.addStoredField("field1")
.addStoredField("_routing")
.get();
SearchResponse searchResponse = client().prepareSearch("my-index").addStoredField("field1").addStoredField("_routing").get();
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L));
assertThat(searchResponse.getHits().getAt(0).field("field1"), nullValue());
@ -755,7 +751,7 @@ public class SearchFieldsIT extends OpenSearchIntegTestCase {
.get();
assertFailures(
client().prepareSearch("my-index").setTypes("my-type1").addStoredField("field1"),
client().prepareSearch("my-index").addStoredField("field1"),
RestStatus.BAD_REQUEST,
containsString("field [field1] isn't a leaf field")
);
@ -838,7 +834,6 @@ public class SearchFieldsIT extends OpenSearchIntegTestCase {
indexRandom(true, client().prepareIndex("test", "type", "1").setSource("test_field", "foobar"));
refresh();
SearchResponse searchResponse = client().prepareSearch("test")
.setTypes("type")
.setSource(new SearchSourceBuilder().query(QueryBuilders.matchAllQuery()).docValueField("test_field"))
.get();
assertHitCount(searchResponse, 1);

View File

@ -220,7 +220,6 @@ public class GeoDistanceIT extends OpenSearchIntegTestCase {
String name = "TestPosition";
search.setQuery(QueryBuilders.matchAllQuery())
.setTypes("type1")
.addAggregation(
AggregationBuilders.geoDistance(name, new GeoPoint(tgt_lat, tgt_lon))
.field("location")

View File

@ -640,7 +640,7 @@ public class MoreLikeThisIT extends OpenSearchIntegTestCase {
.include(true)
.minTermFreq(1)
.minDocFreq(1);
SearchResponse mltResponse = client().prepareSearch().setTypes("type1").setQuery(queryBuilder).get();
SearchResponse mltResponse = client().prepareSearch().setQuery(queryBuilder).get();
assertHitCount(mltResponse, 3L);
}
@ -672,7 +672,7 @@ public class MoreLikeThisIT extends OpenSearchIntegTestCase {
.minDocFreq(1)
.maxQueryTerms(max_query_terms)
.minimumShouldMatch("0%");
SearchResponse response = client().prepareSearch("test").setTypes("type1").setQuery(mltQuery).get();
SearchResponse response = client().prepareSearch("test").setQuery(mltQuery).get();
assertSearchResponse(response);
assertHitCount(response, max_query_terms);
}
@ -705,7 +705,7 @@ public class MoreLikeThisIT extends OpenSearchIntegTestCase {
.minDocFreq(1)
.minimumShouldMatch(minimumShouldMatch);
logger.info("Testing with minimum_should_match = {}", minimumShouldMatch);
SearchResponse response = client().prepareSearch("test").setTypes("type1").setQuery(mltQuery).get();
SearchResponse response = client().prepareSearch("test").setQuery(mltQuery).get();
assertSearchResponse(response);
if (minimumShouldMatch.equals("0%")) {
assertHitCount(response, 10);
@ -735,7 +735,7 @@ public class MoreLikeThisIT extends OpenSearchIntegTestCase {
.minDocFreq(0)
.maxQueryTerms(100)
.minimumShouldMatch("100%"); // strict all terms must match!
SearchResponse response = client().prepareSearch("test").setTypes("type1").setQuery(mltQuery).get();
SearchResponse response = client().prepareSearch("test").setQuery(mltQuery).get();
assertSearchResponse(response);
assertHitCount(response, 1);
}
@ -809,7 +809,7 @@ public class MoreLikeThisIT extends OpenSearchIntegTestCase {
.minDocFreq(0)
.maxQueryTerms(100)
.minimumShouldMatch("0%");
SearchResponse response = client().prepareSearch("test").setTypes("type1").setQuery(mltQuery).get();
SearchResponse response = client().prepareSearch("test").setQuery(mltQuery).get();
assertSearchResponse(response);
assertHitCount(response, numFields);
@ -824,7 +824,7 @@ public class MoreLikeThisIT extends OpenSearchIntegTestCase {
.include(true)
.minimumShouldMatch("0%");
response = client().prepareSearch("test").setTypes("type1").setQuery(mltQuery).get();
response = client().prepareSearch("test").setQuery(mltQuery).get();
assertSearchResponse(response);
assertHitCount(response, numFields - (i + 1));
}
@ -848,7 +848,7 @@ public class MoreLikeThisIT extends OpenSearchIntegTestCase {
.minDocFreq(0)
.include(true)
.minimumShouldMatch("1%");
SearchResponse response = client().prepareSearch("test").setTypes("type1").setQuery(mltQuery).get();
SearchResponse response = client().prepareSearch("test").setQuery(mltQuery).get();
assertSearchResponse(response);
assertHitCount(response, 2);
@ -856,7 +856,7 @@ public class MoreLikeThisIT extends OpenSearchIntegTestCase {
.minDocFreq(0)
.include(true)
.minimumShouldMatch("1%");
response = client().prepareSearch("test").setTypes("type1").setQuery(mltQuery).get();
response = client().prepareSearch("test").setQuery(mltQuery).get();
assertSearchResponse(response);
assertHitCount(response, 1);
}

View File

@ -548,7 +548,6 @@ public class SimpleNestedIT extends OpenSearchIntegTestCase {
refresh();
SearchResponse searchResponse = client().prepareSearch("test")
.setTypes("type1")
.setQuery(QueryBuilders.matchAllQuery())
.addSort(SortBuilders.fieldSort("nested1.field1").order(SortOrder.ASC).setNestedPath("nested1"))
.get();
@ -562,7 +561,6 @@ public class SimpleNestedIT extends OpenSearchIntegTestCase {
assertThat(searchResponse.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("4"));
searchResponse = client().prepareSearch("test")
.setTypes("type1")
.setQuery(QueryBuilders.matchAllQuery())
.addSort(SortBuilders.fieldSort("nested1.field1").order(SortOrder.DESC).setNestedPath("nested1"))
.get();
@ -658,7 +656,6 @@ public class SimpleNestedIT extends OpenSearchIntegTestCase {
refresh();
SearchRequestBuilder searchRequestBuilder = client().prepareSearch("test")
.setTypes("type1")
.setQuery(QueryBuilders.matchAllQuery())
.addSort(
SortBuilders.fieldSort("nested1.field1")
@ -683,7 +680,6 @@ public class SimpleNestedIT extends OpenSearchIntegTestCase {
assertThat(searchResponse.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("10"));
searchRequestBuilder = client().prepareSearch("test")
.setTypes("type1")
.setQuery(QueryBuilders.matchAllQuery())
.addSort(
SortBuilders.fieldSort("nested1.field1")

View File

@ -516,7 +516,6 @@ public class QueryProfilerIT extends OpenSearchIntegTestCase {
SearchResponse resp = client().prepareSearch()
.setQuery(q)
.setIndices("test")
.setTypes("type1")
.setProfile(true)
.setSearchType(SearchType.QUERY_THEN_FETCH)
.get();

View File

@ -45,7 +45,6 @@ import org.opensearch.action.search.SearchPhaseExecutionException;
import org.opensearch.action.search.SearchResponse;
import org.opensearch.action.search.SearchType;
import org.opensearch.bootstrap.JavaVersion;
import org.opensearch.common.Strings;
import org.opensearch.common.document.DocumentField;
import org.opensearch.common.lucene.search.SpanBooleanQueryRewriteWithMaxClause;
import org.opensearch.common.regex.Regex;
@ -124,7 +123,6 @@ import static org.opensearch.index.query.QueryBuilders.spanTermQuery;
import static org.opensearch.index.query.QueryBuilders.termQuery;
import static org.opensearch.index.query.QueryBuilders.termsLookupQuery;
import static org.opensearch.index.query.QueryBuilders.termsQuery;
import static org.opensearch.index.query.QueryBuilders.typeQuery;
import static org.opensearch.index.query.QueryBuilders.wildcardQuery;
import static org.opensearch.index.query.QueryBuilders.wrapperQuery;
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked;
@ -557,23 +555,6 @@ public class SearchQueryIT extends OpenSearchIntegTestCase {
assertHitCount(searchResponse, 0L);
}
public void testTypeFilter() throws Exception {
assertAcked(prepareCreate("test"));
indexRandom(
true,
client().prepareIndex("test", "type1", "1").setSource("field1", "value1"),
client().prepareIndex("test", "type1", "2").setSource("field1", "value1")
);
assertHitCount(client().prepareSearch().setQuery(typeQuery("type1")).get(), 2L);
assertHitCount(client().prepareSearch().setQuery(typeQuery("type2")).get(), 0L);
assertHitCount(client().prepareSearch().setTypes("type1").setQuery(matchAllQuery()).get(), 2L);
assertHitCount(client().prepareSearch().setTypes("type2").setQuery(matchAllQuery()).get(), 0L);
assertHitCount(client().prepareSearch().setTypes("type1", "type2").setQuery(matchAllQuery()).get(), 2L);
}
public void testIdsQueryTestsIdIndexed() throws Exception {
assertAcked(client().admin().indices().prepareCreate("test"));
@ -584,29 +565,19 @@ public class SearchQueryIT extends OpenSearchIntegTestCase {
client().prepareIndex("test", "type1", "3").setSource("field1", "value3")
);
SearchResponse searchResponse = client().prepareSearch().setQuery(constantScoreQuery(idsQuery("type1").addIds("1", "3"))).get();
SearchResponse searchResponse = client().prepareSearch().setQuery(constantScoreQuery(idsQuery().addIds("1", "3"))).get();
assertHitCount(searchResponse, 2L);
assertSearchHits(searchResponse, "1", "3");
// no type
searchResponse = client().prepareSearch().setQuery(constantScoreQuery(idsQuery().addIds("1", "3"))).get();
assertHitCount(searchResponse, 2L);
assertSearchHits(searchResponse, "1", "3");
searchResponse = client().prepareSearch().setQuery(idsQuery("type1").addIds("1", "3")).get();
assertHitCount(searchResponse, 2L);
assertSearchHits(searchResponse, "1", "3");
// no type
searchResponse = client().prepareSearch().setQuery(idsQuery().addIds("1", "3")).get();
assertHitCount(searchResponse, 2L);
assertSearchHits(searchResponse, "1", "3");
searchResponse = client().prepareSearch().setQuery(idsQuery("type1").addIds("7", "10")).get();
searchResponse = client().prepareSearch().setQuery(idsQuery().addIds("7", "10")).get();
assertHitCount(searchResponse, 0L);
// repeat..., with terms
searchResponse = client().prepareSearch().setTypes("type1").setQuery(constantScoreQuery(termsQuery("_id", "1", "3"))).get();
searchResponse = client().prepareSearch().setQuery(constantScoreQuery(termsQuery("_id", "1", "3"))).get();
assertHitCount(searchResponse, 2L);
assertSearchHits(searchResponse, "1", "3");
}
@ -1298,7 +1269,7 @@ public class SearchQueryIT extends OpenSearchIntegTestCase {
client().prepareIndex("test", "_doc", "3").setSource("field1", "value3").get();
refresh();
SearchResponse searchResponse = client().prepareSearch().setQuery(idsQuery("_doc").addIds("1", "2")).get();
SearchResponse searchResponse = client().prepareSearch().setQuery(idsQuery().addIds("1", "2")).get();
assertHitCount(searchResponse, 2L);
assertThat(searchResponse.getHits().getHits().length, equalTo(2));
@ -1310,11 +1281,11 @@ public class SearchQueryIT extends OpenSearchIntegTestCase {
assertHitCount(searchResponse, 2L);
assertThat(searchResponse.getHits().getHits().length, equalTo(2));
searchResponse = client().prepareSearch().setQuery(idsQuery(Strings.EMPTY_ARRAY).addIds("1")).get();
searchResponse = client().prepareSearch().setQuery(idsQuery().addIds("1")).get();
assertHitCount(searchResponse, 1L);
assertThat(searchResponse.getHits().getHits().length, equalTo(1));
searchResponse = client().prepareSearch().setQuery(idsQuery("type1", "type2", "_doc").addIds("1", "2", "3", "4")).get();
searchResponse = client().prepareSearch().setQuery(idsQuery().addIds("1", "2", "3", "4")).get();
assertHitCount(searchResponse, 3L);
assertThat(searchResponse.getHits().getHits().length, equalTo(3));
}

View File

@ -264,7 +264,7 @@ public class SimpleQueryStringIT extends OpenSearchIntegTestCase {
assertHitCount(searchResponse, 1L);
assertSearchHits(searchResponse, "1");
searchResponse = client().prepareSearch().setTypes("type1").setQuery(simpleQueryStringQuery("foo bar baz").field("body")).get();
searchResponse = client().prepareSearch().setQuery(simpleQueryStringQuery("foo bar baz").field("body")).get();
assertHitCount(searchResponse, 1L);
assertSearchHits(searchResponse, "1");
@ -272,7 +272,7 @@ public class SimpleQueryStringIT extends OpenSearchIntegTestCase {
assertHitCount(searchResponse, 1L);
assertSearchHits(searchResponse, "1");
searchResponse = client().prepareSearch().setTypes("type1").setQuery(simpleQueryStringQuery("foo bar baz").field("body.sub")).get();
searchResponse = client().prepareSearch().setQuery(simpleQueryStringQuery("foo bar baz").field("body.sub")).get();
assertHitCount(searchResponse, 1L);
assertSearchHits(searchResponse, "1");
}

View File

@ -543,7 +543,6 @@ public class SearchScrollIT extends OpenSearchIntegTestCase {
refresh();
SearchResponse response = client().prepareSearch("test")
.setTypes("test")
.addSort(new FieldSortBuilder("no_field").order(SortOrder.ASC).missing("_last"))
.setScroll("1m")
.get();
@ -556,7 +555,6 @@ public class SearchScrollIT extends OpenSearchIntegTestCase {
assertNoSearchHits(response);
response = client().prepareSearch("test")
.setTypes("test")
.addSort(new FieldSortBuilder("no_field").order(SortOrder.ASC).missing("_first"))
.setScroll("1m")
.get();

View File

@ -1031,7 +1031,7 @@ public class CompletionSuggestSearchIT extends OpenSearchIntegTestCase {
SearchPhaseExecutionException e = expectThrows(
SearchPhaseExecutionException.class,
() -> client().prepareSearch(INDEX).setTypes(TYPE).addSort(new FieldSortBuilder(FIELD)).get()
() -> client().prepareSearch(INDEX).addSort(new FieldSortBuilder(FIELD)).get()
);
assertThat(e.status().getStatus(), is(400));
assertThat(e.toString(), containsString("Fielddata is not supported on field [" + FIELD + "] of type [completion]"));

View File

@ -222,7 +222,6 @@ public class TransportValidateQueryAction extends TransportBroadcastAction<
String error = null;
ShardSearchRequest shardSearchLocalRequest = new ShardSearchRequest(
request.shardId(),
request.types(),
request.nowInMillis(),
request.filteringAliases()
);

View File

@ -43,7 +43,6 @@ import org.opensearch.cluster.ClusterState;
import org.opensearch.cluster.metadata.IndexNameExpressionResolver;
import org.opensearch.cluster.routing.ShardIterator;
import org.opensearch.cluster.service.ClusterService;
import org.opensearch.common.Strings;
import org.opensearch.common.inject.Inject;
import org.opensearch.common.io.stream.Writeable;
import org.opensearch.common.lease.Releasables;
@ -51,7 +50,6 @@ import org.opensearch.index.IndexService;
import org.opensearch.index.engine.Engine;
import org.opensearch.index.get.GetResult;
import org.opensearch.index.mapper.IdFieldMapper;
import org.opensearch.index.mapper.MapperService;
import org.opensearch.index.mapper.Uid;
import org.opensearch.index.shard.IndexShard;
import org.opensearch.index.shard.ShardId;
@ -136,13 +134,7 @@ public class TransportExplainAction extends TransportSingleShardAction<ExplainRe
@Override
protected ExplainResponse shardOperation(ExplainRequest request, ShardId shardId) throws IOException {
String[] types;
if (MapperService.SINGLE_MAPPING_NAME.equals(request.type())) { // typeless explain call
types = Strings.EMPTY_ARRAY;
} else {
types = new String[] { request.type() };
}
ShardSearchRequest shardSearchLocalRequest = new ShardSearchRequest(shardId, types, request.nowInMillis, request.filteringAlias());
ShardSearchRequest shardSearchLocalRequest = new ShardSearchRequest(shardId, request.nowInMillis, request.filteringAlias());
SearchContext context = searchService.createSearchContext(shardSearchLocalRequest, SearchService.NO_TIMEOUT);
Engine.GetResult result = null;
try {

View File

@ -195,7 +195,7 @@ public class TransportFieldCapabilitiesIndexAction extends HandledTransportActio
return true;
}
assert req.nowInMillis() != 0L;
ShardSearchRequest searchRequest = new ShardSearchRequest(req.shardId(), null, req.nowInMillis(), AliasFilter.EMPTY);
ShardSearchRequest searchRequest = new ShardSearchRequest(req.shardId(), req.nowInMillis(), AliasFilter.EMPTY);
searchRequest.source(new SearchSourceBuilder().query(req.indexFilter()));
return searchService.canMatch(searchRequest).canMatch();
}

View File

@ -192,7 +192,6 @@ public class MultiSearchRequest extends ActionRequest implements CompositeIndice
CheckedBiConsumer<SearchRequest, XContentParser, IOException> consumer,
String[] indices,
IndicesOptions indicesOptions,
String[] types,
String routing,
String searchType,
Boolean ccsMinimizeRoundtrips,
@ -225,9 +224,6 @@ public class MultiSearchRequest extends ActionRequest implements CompositeIndice
if (indicesOptions != null) {
searchRequest.indicesOptions(indicesOptions);
}
if (types != null && types.length > 0) {
searchRequest.types(types);
}
if (routing != null) {
searchRequest.routing(routing);
}
@ -256,8 +252,6 @@ public class MultiSearchRequest extends ActionRequest implements CompositeIndice
throw new IllegalArgumentException("explicit index in multi search is not allowed");
}
searchRequest.indices(nodeStringArrayValue(value));
} else if ("type".equals(entry.getKey()) || "types".equals(entry.getKey())) {
searchRequest.types(nodeStringArrayValue(value));
} else if ("search_type".equals(entry.getKey()) || "searchType".equals(entry.getKey())) {
searchRequest.searchType(nodeStringValue(value, null));
} else if ("ccs_minimize_roundtrips".equals(entry.getKey()) || "ccsMinimizeRoundtrips".equals(entry.getKey())) {
@ -359,9 +353,6 @@ public class MultiSearchRequest extends ActionRequest implements CompositeIndice
xContentBuilder.field("ignore_unavailable", request.indicesOptions().ignoreUnavailable());
xContentBuilder.field("allow_no_indices", request.indicesOptions().allowNoIndices());
}
if (request.types() != null) {
xContentBuilder.field("types", request.types());
}
if (request.searchType() != null) {
xContentBuilder.field("search_type", request.searchType().name().toLowerCase(Locale.ROOT));
}

View File

@ -106,8 +106,6 @@ public class SearchRequest extends ActionRequest implements IndicesRequest.Repla
private Integer preFilterShardSize;
private String[] types = Strings.EMPTY_ARRAY;
private boolean ccsMinimizeRoundtrips = true;
public static final IndicesOptions DEFAULT_INDICES_OPTIONS = IndicesOptions.strictExpandOpenAndForbidClosedIgnoreThrottled();
@ -204,7 +202,6 @@ public class SearchRequest extends ActionRequest implements IndicesRequest.Repla
this.scroll = searchRequest.scroll;
this.searchType = searchRequest.searchType;
this.source = searchRequest.source;
this.types = searchRequest.types;
this.localClusterAlias = localClusterAlias;
this.absoluteStartMillis = absoluteStartMillis;
this.finalReduce = finalReduce;
@ -225,7 +222,15 @@ public class SearchRequest extends ActionRequest implements IndicesRequest.Repla
preference = in.readOptionalString();
scroll = in.readOptionalWriteable(Scroll::new);
source = in.readOptionalWriteable(SearchSourceBuilder::new);
types = in.readStringArray();
if (in.getVersion().before(Version.V_2_0_0)) {
// types no longer relevant so ignore
String[] types = in.readStringArray();
if (types.length > 0) {
throw new IllegalStateException(
"types are no longer supported in search requests but found [" + Arrays.toString(types) + "]"
);
}
}
indicesOptions = IndicesOptions.readIndicesOptions(in);
requestCache = in.readOptionalBoolean();
batchedReduceSize = in.readVInt();
@ -262,7 +267,10 @@ public class SearchRequest extends ActionRequest implements IndicesRequest.Repla
out.writeOptionalString(preference);
out.writeOptionalWriteable(scroll);
out.writeOptionalWriteable(source);
out.writeStringArray(types);
if (out.getVersion().before(Version.V_2_0_0)) {
// types not supported so send an empty array to previous versions
out.writeStringArray(Strings.EMPTY_ARRAY);
}
indicesOptions.writeIndicesOptions(out);
out.writeOptionalBoolean(requestCache);
out.writeVInt(batchedReduceSize);
@ -408,35 +416,6 @@ public class SearchRequest extends ActionRequest implements IndicesRequest.Repla
this.ccsMinimizeRoundtrips = ccsMinimizeRoundtrips;
}
/**
* The document types to execute the search against. Defaults to be executed against
* all types.
*
* @deprecated Types are in the process of being removed. Instead of using a type, prefer to
* filter on a field on the document.
*/
@Deprecated
public String[] types() {
return types;
}
/**
* The document types to execute the search against. Defaults to be executed against
* all types.
*
* @deprecated Types are in the process of being removed. Instead of using a type, prefer to
* filter on a field on the document.
*/
@Deprecated
public SearchRequest types(String... types) {
Objects.requireNonNull(types, "types must not be null");
for (String type : types) {
Objects.requireNonNull(type, "type must not be null");
}
this.types = types;
return this;
}
/**
* A comma separated list of routing values to control the shards the search will be executed on.
*/
@ -702,9 +681,6 @@ public class SearchRequest extends ActionRequest implements IndicesRequest.Repla
sb.append("indices[");
Strings.arrayToDelimitedString(indices, ",", sb);
sb.append("], ");
sb.append("types[");
Strings.arrayToDelimitedString(types, ",", sb);
sb.append("], ");
sb.append("search_type[").append(searchType).append("], ");
if (scroll != null) {
sb.append("scroll[").append(scroll.keepAlive()).append("], ");
@ -733,7 +709,6 @@ public class SearchRequest extends ActionRequest implements IndicesRequest.Repla
&& Objects.equals(source, that.source)
&& Objects.equals(requestCache, that.requestCache)
&& Objects.equals(scroll, that.scroll)
&& Arrays.equals(types, that.types)
&& Objects.equals(batchedReduceSize, that.batchedReduceSize)
&& Objects.equals(maxConcurrentShardRequests, that.maxConcurrentShardRequests)
&& Objects.equals(preFilterShardSize, that.preFilterShardSize)
@ -755,7 +730,6 @@ public class SearchRequest extends ActionRequest implements IndicesRequest.Repla
source,
requestCache,
scroll,
Arrays.hashCode(types),
indicesOptions,
batchedReduceSize,
maxConcurrentShardRequests,
@ -777,8 +751,6 @@ public class SearchRequest extends ActionRequest implements IndicesRequest.Repla
+ Arrays.toString(indices)
+ ", indicesOptions="
+ indicesOptions
+ ", types="
+ Arrays.toString(types)
+ ", routing='"
+ routing
+ '\''

View File

@ -72,17 +72,6 @@ public class SearchRequestBuilder extends ActionRequestBuilder<SearchRequest, Se
return this;
}
/**
* The document types to execute the search against. Defaults to be executed against
* all types.
* @deprecated Types are going away, prefer filtering on a field.
*/
@Deprecated
public SearchRequestBuilder setTypes(String... types) {
request.types(types);
return this;
}
/**
* The search type to execute, defaults to {@link SearchType#DEFAULT}.
*/

View File

@ -624,8 +624,7 @@ public class IndexService extends AbstractIndexComponent implements IndicesClust
}
/**
* Creates a new QueryShardContext. The context has not types set yet, if types are required set them via
* {@link QueryShardContext#setTypes(String...)}.
* Creates a new QueryShardContext.
*
* Passing a {@code null} {@link IndexSearcher} will return a valid context, however it won't be able to make
* {@link IndexReader}-specific optimizations, such as rewriting containing range queries.

View File

@ -47,7 +47,6 @@ import org.opensearch.search.internal.SearchContext;
import org.opensearch.tasks.Task;
import java.nio.charset.Charset;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
@ -220,8 +219,6 @@ public final class SearchSlowLog implements SearchOperationListener {
} else {
messageFields.put("total_hits", "-1");
}
String[] types = context.getQueryShardContext().getTypes();
messageFields.put("types", escapeJson(asJsonArray(types != null ? Arrays.stream(types) : Stream.empty())));
messageFields.put(
"stats",
escapeJson(asJsonArray(context.groupStats() != null ? context.groupStats().stream() : Stream.empty()))
@ -259,13 +256,6 @@ public final class SearchSlowLog implements SearchOperationListener {
sb.append("-1");
}
sb.append("], ");
if (context.getQueryShardContext().getTypes() == null) {
sb.append("types[], ");
} else {
sb.append("types[");
Strings.arrayToDelimitedString(context.getQueryShardContext().getTypes(), ",", sb);
sb.append("], ");
}
if (context.groupStats() == null) {
sb.append("stats[], ");
} else {

View File

@ -32,9 +32,8 @@
package org.opensearch.index.query;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.Query;
import org.opensearch.cluster.metadata.Metadata;
import org.opensearch.Version;
import org.opensearch.common.ParseField;
import org.opensearch.common.ParsingException;
import org.opensearch.common.Strings;
@ -44,14 +43,12 @@ import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.common.xcontent.ObjectParser;
import org.opensearch.common.xcontent.XContentBuilder;
import org.opensearch.common.xcontent.XContentParser;
import org.opensearch.index.mapper.DocumentMapper;
import org.opensearch.index.mapper.IdFieldMapper;
import org.opensearch.index.mapper.MappedFieldType;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.Objects;
@ -72,8 +69,6 @@ public class IdsQueryBuilder extends AbstractQueryBuilder<IdsQueryBuilder> {
private final Set<String> ids = new HashSet<>();
private String[] types = Strings.EMPTY_ARRAY;
/**
* Creates a new IdsQueryBuilder with no types specified upfront
*/
@ -86,38 +81,23 @@ public class IdsQueryBuilder extends AbstractQueryBuilder<IdsQueryBuilder> {
*/
public IdsQueryBuilder(StreamInput in) throws IOException {
super(in);
types = in.readStringArray();
if (in.getVersion().before(Version.V_2_0_0)) {
// types no longer relevant so ignore
String[] types = in.readStringArray();
if (types.length > 0) {
throw new IllegalStateException("types are no longer supported in ids query but found [" + Arrays.toString(types) + "]");
}
}
Collections.addAll(ids, in.readStringArray());
}
@Override
protected void doWriteTo(StreamOutput out) throws IOException {
out.writeStringArray(types);
out.writeStringArray(ids.toArray(new String[ids.size()]));
}
/**
* Add types to query
*
* @deprecated Types are in the process of being removed, prefer to filter on a field instead.
*/
@Deprecated
public IdsQueryBuilder types(String... types) {
if (types == null) {
throw new IllegalArgumentException("[" + NAME + "] types cannot be null");
if (out.getVersion().before(Version.V_2_0_0)) {
// types not supported so send an empty array to previous versions
out.writeStringArray(Strings.EMPTY_ARRAY);
}
this.types = types;
return this;
}
/**
* Returns the types used in this query
*
* @deprecated Types are in the process of being removed, prefer to filter on a field instead.
*/
@Deprecated
public String[] types() {
return this.types;
out.writeStringArray(ids.toArray(new String[ids.size()]));
}
/**
@ -141,9 +121,6 @@ public class IdsQueryBuilder extends AbstractQueryBuilder<IdsQueryBuilder> {
@Override
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(NAME);
if (types.length > 0) {
builder.array(TYPE_FIELD.getPreferredName(), types);
}
builder.startArray(VALUES_FIELD.getPreferredName());
for (String value : ids) {
builder.value(value);
@ -156,18 +133,13 @@ public class IdsQueryBuilder extends AbstractQueryBuilder<IdsQueryBuilder> {
private static final ObjectParser<IdsQueryBuilder, Void> PARSER = new ObjectParser<>(NAME, IdsQueryBuilder::new);
static {
PARSER.declareStringArray(fromList(String.class, IdsQueryBuilder::types), IdsQueryBuilder.TYPE_FIELD);
PARSER.declareStringArray(fromList(String.class, IdsQueryBuilder::addIds), IdsQueryBuilder.VALUES_FIELD);
declareStandardFields(PARSER);
}
public static IdsQueryBuilder fromXContent(XContentParser parser) {
try {
IdsQueryBuilder builder = PARSER.apply(parser, null);
if (builder.types().length > 0) {
deprecationLogger.deprecate("ids_query_with_types", TYPES_DEPRECATION_MESSAGE);
}
return builder;
return PARSER.apply(parser, null);
} catch (IllegalArgumentException e) {
throw new ParsingException(parser.getTokenLocation(), e.getMessage(), e);
}
@ -193,34 +165,20 @@ public class IdsQueryBuilder extends AbstractQueryBuilder<IdsQueryBuilder> {
@Override
protected Query doToQuery(QueryShardContext context) throws IOException {
MappedFieldType idField = context.fieldMapper(IdFieldMapper.NAME);
MappedFieldType idField = context.getFieldType(IdFieldMapper.NAME);
if (idField == null || ids.isEmpty()) {
throw new IllegalStateException("Rewrite first");
}
final DocumentMapper mapper = context.getMapperService().documentMapper();
Collection<String> typesForQuery;
if (types.length == 0) {
typesForQuery = context.queryTypes();
} else if (types.length == 1 && Metadata.ALL.equals(types[0])) {
typesForQuery = Collections.singleton(mapper.type());
} else {
typesForQuery = new HashSet<>(Arrays.asList(types));
}
if (typesForQuery.contains(mapper.type())) {
return idField.termsQuery(new ArrayList<>(ids), context);
} else {
return new MatchNoDocsQuery("Type mismatch");
}
return idField.termsQuery(new ArrayList<>(ids), context);
}
@Override
protected int doHashCode() {
return Objects.hash(ids, Arrays.hashCode(types));
return Objects.hash(ids);
}
@Override
protected boolean doEquals(IdsQueryBuilder other) {
return Objects.equals(ids, other.ids) && Arrays.equals(types, other.types);
return Objects.equals(ids, other.ids);
}
}

View File

@ -153,18 +153,6 @@ public final class QueryBuilders {
return new IdsQueryBuilder();
}
/**
* Constructs a query that will match only specific ids within types.
*
* @param types The mapping/doc type
*
* @deprecated Types are in the process of being removed, use {@link #idsQuery()} instead.
*/
@Deprecated
public static IdsQueryBuilder idsQuery(String... types) {
return new IdsQueryBuilder().types(types);
}
/**
* A Query that matches documents containing a term.
*
@ -622,15 +610,6 @@ public final class QueryBuilders {
return new WrapperQueryBuilder(source);
}
/**
* A filter based on doc/mapping type.
* @deprecated Types are going away, prefer filtering on a field.
*/
@Deprecated
public static TypeQueryBuilder typeQuery(String type) {
return new TypeQueryBuilder(type);
}
/**
* A terms query that can extract the terms from another doc in an index.
*/

View File

@ -44,7 +44,6 @@ import org.opensearch.action.ActionListener;
import org.opensearch.client.Client;
import org.opensearch.common.CheckedFunction;
import org.opensearch.common.ParsingException;
import org.opensearch.common.Strings;
import org.opensearch.common.TriFunction;
import org.opensearch.common.io.stream.NamedWriteableRegistry;
import org.opensearch.common.lucene.search.Queries;
@ -76,9 +75,6 @@ import org.opensearch.search.lookup.SearchLookup;
import org.opensearch.transport.RemoteClusterAware;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@ -105,7 +101,6 @@ public class QueryShardContext extends QueryRewriteContext {
private final TriFunction<MappedFieldType, String, Supplier<SearchLookup>, IndexFieldData<?>> indexFieldDataService;
private final int shardId;
private final IndexSearcher searcher;
private String[] types = Strings.EMPTY_ARRAY;
private boolean cacheable = true;
private final SetOnce<Boolean> frozen = new SetOnce<>();
@ -113,14 +108,6 @@ public class QueryShardContext extends QueryRewriteContext {
private final Predicate<String> indexNameMatcher;
private final BooleanSupplier allowExpensiveQueries;
public void setTypes(String... types) {
this.types = types;
}
public String[] getTypes() {
return types;
}
private final Map<String, Query> namedQueries = new HashMap<>();
private boolean allowUnmappedFields;
private boolean mapUnmappedFieldAsString;
@ -377,18 +364,6 @@ public class QueryShardContext extends QueryRewriteContext {
}
}
/**
* Returns the narrowed down explicit types, or, if not set, all types.
*/
public Collection<String> queryTypes() {
String[] types = getTypes();
if (types == null || types.length == 0 || (types.length == 1 && types[0].equals("_all"))) {
DocumentMapper mapper = getMapperService().documentMapper();
return mapper == null ? Collections.emptyList() : Collections.singleton(mapper.type());
}
return Arrays.asList(types);
}
private SearchLookup lookup = null;
/**
@ -398,8 +373,7 @@ public class QueryShardContext extends QueryRewriteContext {
if (this.lookup == null) {
this.lookup = new SearchLookup(
getMapperService(),
(fieldType, searchLookup) -> indexFieldDataService.apply(fieldType, fullyQualifiedIndex.getName(), searchLookup),
types
(fieldType, searchLookup) -> indexFieldDataService.apply(fieldType, fullyQualifiedIndex.getName(), searchLookup)
);
}
return this.lookup;
@ -415,8 +389,7 @@ public class QueryShardContext extends QueryRewriteContext {
*/
return new SearchLookup(
getMapperService(),
(fieldType, searchLookup) -> indexFieldDataService.apply(fieldType, fullyQualifiedIndex.getName(), searchLookup),
types
(fieldType, searchLookup) -> indexFieldDataService.apply(fieldType, fullyQualifiedIndex.getName(), searchLookup)
);
}

View File

@ -503,9 +503,6 @@ public abstract class AbstractBulkByScrollRequest<Self extends AbstractBulkByScr
} else {
b.append("[all indices]");
}
if (searchRequest.types() != null && searchRequest.types().length != 0) {
b.append(Arrays.toString(searchRequest.types()));
}
}
@Override

View File

@ -91,9 +91,8 @@ public class ClientScrollableHitSource extends ScrollableHitSource {
public void doStart(RejectAwareActionListener<Response> searchListener) {
if (logger.isDebugEnabled()) {
logger.debug(
"executing initial scroll against {}{}",
isEmpty(firstSearchRequest.indices()) ? "all indices" : firstSearchRequest.indices(),
isEmpty(firstSearchRequest.types()) ? "" : firstSearchRequest.types()
"executing initial scroll against {}",
isEmpty(firstSearchRequest.indices()) ? "all indices" : firstSearchRequest.indices()
);
}
client.search(firstSearchRequest, wrapListener(searchListener));

View File

@ -102,19 +102,6 @@ public class DeleteByQueryRequest extends AbstractBulkByScrollRequest<DeleteByQu
return this;
}
/**
* Set the document types for the delete
* @deprecated Types are in the process of being removed. Instead of
* using a type, prefer to filter on a field of the document.
*/
@Deprecated
public DeleteByQueryRequest setDocTypes(String... types) {
if (types != null) {
getSearchRequest().types(types);
}
return this;
}
/**
* Set routing limiting the process to the shards that match that routing value
*/
@ -155,21 +142,6 @@ public class DeleteByQueryRequest extends AbstractBulkByScrollRequest<DeleteByQu
return getSearchRequest().routing();
}
/**
* Gets the document types on which this request would be executed. Returns an empty array if all
* types are to be processed.
* @deprecated Types are in the process of being removed. Instead of
* using a type, prefer to filter on a field of the document.
*/
@Deprecated
public String[] getDocTypes() {
if (getSearchRequest().types() != null) {
return getSearchRequest().types();
} else {
return new String[0];
}
}
@Override
protected DeleteByQueryRequest self() {
return this;
@ -223,29 +195,6 @@ public class DeleteByQueryRequest extends AbstractBulkByScrollRequest<DeleteByQu
return getSearchRequest().indicesOptions();
}
/**
* Gets the document types on which this request would be executed.
* @deprecated Types are in the process of being removed. Instead of
* using a type, prefer to filter on a field of the document.
*/
@Deprecated
public String[] types() {
assert getSearchRequest() != null;
return getSearchRequest().types();
}
/**
* Set the document types for the delete
* @deprecated Types are in the process of being removed. Instead of
* using a type, prefer to filter on a field of the document.
*/
@Deprecated
public DeleteByQueryRequest types(String... types) {
assert getSearchRequest() != null;
getSearchRequest().types(types);
return this;
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();

View File

@ -167,16 +167,6 @@ public class ReindexRequest extends AbstractBulkIndexByScrollRequest<ReindexRequ
return this;
}
/**
* Set the document types which need to be copied from the source indices
*/
public ReindexRequest setSourceDocTypes(String... docTypes) {
if (docTypes != null) {
this.getSearchRequest().types(docTypes);
}
return this;
}
/**
* Sets the scroll size for setting how many documents are to be processed in one batch during reindex
*/
@ -330,10 +320,6 @@ public class ReindexRequest extends AbstractBulkIndexByScrollRequest<ReindexRequ
builder.rawField("query", remoteInfo.getQuery().streamInput(), RemoteInfo.QUERY_CONTENT_TYPE.type());
}
builder.array("index", getSearchRequest().indices());
String[] types = getSearchRequest().types();
if (types.length > 0) {
builder.array("type", types);
}
getSearchRequest().source().innerToXContent(builder, params);
builder.endObject();
}
@ -383,11 +369,6 @@ public class ReindexRequest extends AbstractBulkIndexByScrollRequest<ReindexRequ
if (indices != null) {
request.getSearchRequest().indices(indices);
}
String[] types = extractStringArray(source, "type");
if (types != null) {
deprecationLogger.deprecate("reindex_with_types", TYPES_DEPRECATION_MESSAGE);
request.getSearchRequest().types(types);
}
request.setRemoteInfo(buildRemoteInfo(source));
XContentBuilder builder = XContentFactory.contentBuilder(parser.contentType());
builder.map(source);

View File

@ -98,19 +98,6 @@ public class UpdateByQueryRequest extends AbstractBulkIndexByScrollRequest<Updat
return this;
}
/**
* Set the document types for the update
* @deprecated Types are in the process of being removed. Instead of
* using a type, prefer to filter on a field of the document.
*/
@Deprecated
public UpdateByQueryRequest setDocTypes(String... types) {
if (types != null) {
getSearchRequest().types(types);
}
return this;
}
/**
* Set routing limiting the process to the shards that match that routing value
*/
@ -151,21 +138,6 @@ public class UpdateByQueryRequest extends AbstractBulkIndexByScrollRequest<Updat
return getSearchRequest().routing();
}
/**
* Gets the document types on which this request would be executed. Returns an empty array if all
* types are to be processed.
* @deprecated Types are in the process of being removed. Instead of
* using a type, prefer to filter on a field of the document.
*/
@Deprecated
public String[] getDocTypes() {
if (getSearchRequest().types() != null) {
return getSearchRequest().types();
} else {
return new String[0];
}
}
/**
* Ingest pipeline to set on index requests made by this action.
*/

View File

@ -37,7 +37,6 @@ import org.opensearch.action.search.SearchResponse;
import org.opensearch.action.support.IndicesOptions;
import org.opensearch.client.node.NodeClient;
import org.opensearch.common.Strings;
import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.common.xcontent.XContentBuilder;
import org.opensearch.index.query.QueryBuilder;
import org.opensearch.rest.BaseRestHandler;
@ -59,8 +58,6 @@ import static org.opensearch.rest.action.RestActions.buildBroadcastShardsHeader;
import static org.opensearch.search.internal.SearchContext.DEFAULT_TERMINATE_AFTER;
public class RestCountAction extends BaseRestHandler {
private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestCountAction.class);
static final String TYPES_DEPRECATION_MESSAGE = "[types removal]" + " Specifying types in count requests is deprecated.";
@Override
public List<Route> routes() {
@ -104,11 +101,6 @@ public class RestCountAction extends BaseRestHandler {
searchSourceBuilder.minScore(minScore);
}
if (request.hasParam("type")) {
deprecationLogger.deprecate("count_with_types", TYPES_DEPRECATION_MESSAGE);
countRequest.types(Strings.splitStringByCommaToArray(request.param("type")));
}
countRequest.preference(request.param("preference"));
final int terminateAfter = request.paramAsInt("terminate_after", DEFAULT_TERMINATE_AFTER);

View File

@ -68,7 +68,6 @@ import static org.opensearch.rest.RestRequest.Method.POST;
public class RestMultiSearchAction extends BaseRestHandler {
private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestMultiSearchAction.class);
static final String TYPES_DEPRECATION_MESSAGE = "[types removal]" + " Specifying types in multi search requests is deprecated.";
private static final Set<String> RESPONSE_PARAMS;
@ -108,13 +107,6 @@ public class RestMultiSearchAction extends BaseRestHandler {
@Override
public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
final MultiSearchRequest multiSearchRequest = parseRequest(request, client.getNamedWriteableRegistry(), allowExplicitIndex);
// Emit a single deprecation message if any search request contains types.
for (SearchRequest searchRequest : multiSearchRequest.requests()) {
if (searchRequest.types().length > 0) {
deprecationLogger.deprecate("msearch_with_types", TYPES_DEPRECATION_MESSAGE);
break;
}
}
return channel -> {
final RestCancellableNodeClient cancellableClient = new RestCancellableNodeClient(client, request.getHttpChannel());
cancellableClient.execute(MultiSearchAction.INSTANCE, multiSearchRequest, new RestToXContentListener<>(channel));
@ -192,7 +184,6 @@ public class RestMultiSearchAction extends BaseRestHandler {
) throws IOException {
String[] indices = Strings.splitStringByCommaToArray(request.param("index"));
String[] types = Strings.splitStringByCommaToArray(request.param("type"));
String searchType = request.param("search_type");
boolean ccsMinimizeRoundtrips = request.paramAsBoolean("ccs_minimize_roundtrips", true);
String routing = request.param("routing");
@ -206,7 +197,6 @@ public class RestMultiSearchAction extends BaseRestHandler {
consumer,
indices,
indicesOptions,
types,
routing,
searchType,
ccsMinimizeRoundtrips,

View File

@ -199,10 +199,6 @@ public class RestSearchAction extends BaseRestHandler {
searchRequest.scroll(new Scroll(parseTimeValue(scroll, null, "scroll")));
}
if (request.hasParam("type")) {
deprecationLogger.deprecate("search_with_types", TYPES_DEPRECATION_MESSAGE);
searchRequest.types(Strings.splitStringByCommaToArray(request.param("type")));
}
searchRequest.routing(request.param("routing"));
searchRequest.preference(request.param("preference"));
searchRequest.indicesOptions(IndicesOptions.fromRequest(request, searchRequest.indicesOptions()));

View File

@ -55,7 +55,6 @@ import org.opensearch.index.engine.Engine;
import org.opensearch.index.mapper.MappedFieldType;
import org.opensearch.index.mapper.MapperService;
import org.opensearch.index.mapper.ObjectMapper;
import org.opensearch.index.mapper.TypeFieldMapper;
import org.opensearch.index.query.AbstractQueryBuilder;
import org.opensearch.index.query.ParsedQuery;
import org.opensearch.index.query.QueryBuilder;
@ -209,7 +208,6 @@ final class DefaultSearchContext extends SearchContext {
request::nowInMillis,
shardTarget.getClusterAlias()
);
queryShardContext.setTypes(request.types());
queryBoost = request.indexBoost();
this.lowLevelCancellation = lowLevelCancellation;
}
@ -321,11 +319,6 @@ final class DefaultSearchContext extends SearchContext {
@Override
public Query buildFilteredQuery(Query query) {
List<Query> filters = new ArrayList<>();
Query typeFilter = createTypeFilter(queryShardContext.getTypes());
if (typeFilter != null) {
filters.add(typeFilter);
}
if (mapperService().hasNested()
&& new NestedHelper(mapperService()).mightMatchNestedDocs(query)
&& (aliasFilter == null || new NestedHelper(mapperService()).mightMatchNestedDocs(aliasFilter))) {
@ -357,17 +350,6 @@ final class DefaultSearchContext extends SearchContext {
}
}
private Query createTypeFilter(String[] types) {
if (types != null && types.length >= 1) {
if (mapperService().documentMapper() == null) {
return null;
}
TypeFieldMapper.TypeFieldType ft = new TypeFieldMapper.TypeFieldType(mapperService().documentMapper().type());
return ft.typeFilter(types);
}
return null;
}
@Override
public ShardSearchContextId id() {
return readerContext.id();

View File

@ -71,6 +71,7 @@ import org.opensearch.tasks.TaskId;
import org.opensearch.transport.TransportRequest;
import java.io.IOException;
import java.util.Arrays;
import java.util.Map;
import java.util.function.Function;
@ -87,7 +88,6 @@ public class ShardSearchRequest extends TransportRequest implements IndicesReque
private final int numberOfShards;
private final SearchType searchType;
private final Scroll scroll;
private final String[] types;
private final float indexBoost;
private final Boolean requestCache;
private final long nowInMillis;
@ -152,7 +152,6 @@ public class ShardSearchRequest extends TransportRequest implements IndicesReque
numberOfShards,
searchRequest.searchType(),
searchRequest.source(),
searchRequest.types(),
searchRequest.requestCache(),
aliasFilter,
indexBoost,
@ -170,14 +169,13 @@ public class ShardSearchRequest extends TransportRequest implements IndicesReque
assert searchRequest.allowPartialSearchResults() != null;
}
public ShardSearchRequest(ShardId shardId, String[] types, long nowInMillis, AliasFilter aliasFilter) {
public ShardSearchRequest(ShardId shardId, long nowInMillis, AliasFilter aliasFilter) {
this(
OriginalIndices.NONE,
shardId,
-1,
SearchType.QUERY_THEN_FETCH,
null,
types,
null,
aliasFilter,
1.0f,
@ -198,7 +196,6 @@ public class ShardSearchRequest extends TransportRequest implements IndicesReque
int numberOfShards,
SearchType searchType,
SearchSourceBuilder source,
String[] types,
Boolean requestCache,
AliasFilter aliasFilter,
float indexBoost,
@ -215,7 +212,6 @@ public class ShardSearchRequest extends TransportRequest implements IndicesReque
this.numberOfShards = numberOfShards;
this.searchType = searchType;
this.source = source;
this.types = types;
this.requestCache = requestCache;
this.aliasFilter = aliasFilter;
this.indexBoost = indexBoost;
@ -240,7 +236,13 @@ public class ShardSearchRequest extends TransportRequest implements IndicesReque
numberOfShards = in.readVInt();
scroll = in.readOptionalWriteable(Scroll::new);
source = in.readOptionalWriteable(SearchSourceBuilder::new);
types = in.readStringArray();
if (in.getVersion().before(Version.V_2_0_0)) {
// types no longer relevant so ignore
String[] types = in.readStringArray();
if (types.length > 0) {
throw new IllegalStateException("types are no longer supported in ids query but found [" + Arrays.toString(types) + "]");
}
}
aliasFilter = new AliasFilter(in);
indexBoost = in.readFloat();
nowInMillis = in.readVLong();
@ -281,7 +283,6 @@ public class ShardSearchRequest extends TransportRequest implements IndicesReque
this.numberOfShards = clone.numberOfShards;
this.scroll = clone.scroll;
this.source = clone.source;
this.types = clone.types;
this.aliasFilter = clone.aliasFilter;
this.indexBoost = clone.indexBoost;
this.nowInMillis = clone.nowInMillis;
@ -314,7 +315,10 @@ public class ShardSearchRequest extends TransportRequest implements IndicesReque
}
out.writeOptionalWriteable(scroll);
out.writeOptionalWriteable(source);
out.writeStringArray(types);
if (out.getVersion().before(Version.V_2_0_0)) {
// types not supported so send an empty array to previous versions
out.writeStringArray(Strings.EMPTY_ARRAY);
}
aliasFilter.writeTo(out);
out.writeFloat(indexBoost);
if (asKey == false) {
@ -363,10 +367,6 @@ public class ShardSearchRequest extends TransportRequest implements IndicesReque
return shardId;
}
public String[] types() {
return types;
}
public SearchSourceBuilder source() {
return source;
}

View File

@ -32,7 +32,6 @@
package org.opensearch.search.lookup;
import org.apache.lucene.index.LeafReaderContext;
import org.opensearch.common.Nullable;
import org.opensearch.index.fielddata.IndexFieldData;
import org.opensearch.index.mapper.MappedFieldType;
import org.opensearch.index.mapper.MapperService;
@ -43,13 +42,10 @@ public class DocLookup {
private final MapperService mapperService;
private final Function<MappedFieldType, IndexFieldData<?>> fieldDataLookup;
@Nullable
private final String[] types;
DocLookup(MapperService mapperService, Function<MappedFieldType, IndexFieldData<?>> fieldDataLookup, @Nullable String[] types) {
DocLookup(MapperService mapperService, Function<MappedFieldType, IndexFieldData<?>> fieldDataLookup) {
this.mapperService = mapperService;
this.fieldDataLookup = fieldDataLookup;
this.types = types;
}
public MapperService mapperService() {
@ -61,10 +57,6 @@ public class DocLookup {
}
public LeafDocLookup getLeafDocLookup(LeafReaderContext context) {
return new LeafDocLookup(mapperService, fieldDataLookup, types, context);
}
public String[] getTypes() {
return types;
return new LeafDocLookup(mapperService, fieldDataLookup, context);
}
}

View File

@ -32,22 +32,18 @@
package org.opensearch.search.lookup;
import org.apache.lucene.index.LeafReaderContext;
import org.opensearch.common.Nullable;
import org.opensearch.index.mapper.MapperService;
public class FieldsLookup {
private final MapperService mapperService;
@Nullable
private final String[] types;
FieldsLookup(MapperService mapperService, @Nullable String[] types) {
FieldsLookup(MapperService mapperService) {
this.mapperService = mapperService;
this.types = types;
}
public LeafFieldsLookup getLeafFieldsLookup(LeafReaderContext context) {
return new LeafFieldsLookup(mapperService, types, context.reader());
return new LeafFieldsLookup(mapperService, context.reader());
}
}

View File

@ -33,7 +33,6 @@ package org.opensearch.search.lookup;
import org.apache.lucene.index.LeafReaderContext;
import org.opensearch.ExceptionsHelper;
import org.opensearch.common.Nullable;
import org.opensearch.common.logging.DeprecationLogger;
import org.opensearch.index.fielddata.IndexFieldData;
import org.opensearch.index.fielddata.ScriptDocValues;
@ -43,7 +42,6 @@ import org.opensearch.index.mapper.MapperService;
import java.io.IOException;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
@ -60,23 +58,13 @@ public class LeafDocLookup implements Map<String, ScriptDocValues<?>> {
private final MapperService mapperService;
private final Function<MappedFieldType, IndexFieldData<?>> fieldDataLookup;
@Nullable
private final String[] types;
private final LeafReaderContext reader;
private int docId = -1;
LeafDocLookup(
MapperService mapperService,
Function<MappedFieldType, IndexFieldData<?>> fieldDataLookup,
@Nullable String[] types,
LeafReaderContext reader
) {
LeafDocLookup(MapperService mapperService, Function<MappedFieldType, IndexFieldData<?>> fieldDataLookup, LeafReaderContext reader) {
this.mapperService = mapperService;
this.fieldDataLookup = fieldDataLookup;
this.types = types;
this.reader = reader;
}
@ -100,9 +88,7 @@ public class LeafDocLookup implements Map<String, ScriptDocValues<?>> {
if (scriptValues == null) {
final MappedFieldType fieldType = mapperService.fieldType(fieldName);
if (fieldType == null) {
throw new IllegalArgumentException(
"No field found for [" + fieldName + "] in mapping with types " + Arrays.toString(types)
);
throw new IllegalArgumentException("No field found for [" + fieldName + "] in mapping");
}
// load fielddata on behalf of the script: otherwise it would need additional permissions
// to deal with pagedbytes/ramusagestimator/etc

View File

@ -33,7 +33,6 @@ package org.opensearch.search.lookup;
import org.apache.lucene.index.LeafReader;
import org.opensearch.OpenSearchParseException;
import org.opensearch.common.Nullable;
import org.opensearch.index.fieldvisitor.SingleFieldsVisitor;
import org.opensearch.index.mapper.DocumentMapper;
import org.opensearch.index.mapper.MappedFieldType;
@ -42,7 +41,6 @@ import org.opensearch.index.mapper.TypeFieldMapper;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
@ -54,19 +52,14 @@ import static java.util.Collections.singletonMap;
public class LeafFieldsLookup implements Map {
private final MapperService mapperService;
@Nullable
private final String[] types;
private final LeafReader reader;
private int docId = -1;
private final Map<String, FieldLookup> cachedFieldData = new HashMap<>();
LeafFieldsLookup(MapperService mapperService, @Nullable String[] types, LeafReader reader) {
LeafFieldsLookup(MapperService mapperService, LeafReader reader) {
this.mapperService = mapperService;
this.types = types;
this.reader = reader;
}
@ -148,7 +141,7 @@ public class LeafFieldsLookup implements Map {
if (data == null) {
MappedFieldType fieldType = mapperService.fieldType(name);
if (fieldType == null) {
throw new IllegalArgumentException("No field found for [" + name + "] in mapping with types " + Arrays.toString(types));
throw new IllegalArgumentException("No field found for [" + name + "] in mapping");
}
data = new FieldLookup(fieldType);
cachedFieldData.put(name, data);

View File

@ -33,7 +33,6 @@
package org.opensearch.search.lookup;
import org.apache.lucene.index.LeafReaderContext;
import org.opensearch.common.Nullable;
import org.opensearch.index.fielddata.IndexFieldData;
import org.opensearch.index.mapper.MappedFieldType;
import org.opensearch.index.mapper.MapperService;
@ -75,17 +74,15 @@ public class SearchLookup {
*/
public SearchLookup(
MapperService mapperService,
BiFunction<MappedFieldType, Supplier<SearchLookup>, IndexFieldData<?>> fieldDataLookup,
@Nullable String[] types
BiFunction<MappedFieldType, Supplier<SearchLookup>, IndexFieldData<?>> fieldDataLookup
) {
this.fieldChain = Collections.emptySet();
docMap = new DocLookup(
mapperService,
fieldType -> fieldDataLookup.apply(fieldType, () -> forkAndTrackFieldReferences(fieldType.name())),
types
fieldType -> fieldDataLookup.apply(fieldType, () -> forkAndTrackFieldReferences(fieldType.name()))
);
sourceLookup = new SourceLookup();
fieldsLookup = new FieldsLookup(mapperService, types);
fieldsLookup = new FieldsLookup(mapperService);
this.fieldDataLookup = fieldDataLookup;
}
@ -100,8 +97,7 @@ public class SearchLookup {
this.fieldChain = Collections.unmodifiableSet(fieldChain);
this.docMap = new DocLookup(
searchLookup.docMap.mapperService(),
fieldType -> searchLookup.fieldDataLookup.apply(fieldType, () -> forkAndTrackFieldReferences(fieldType.name())),
searchLookup.docMap.getTypes()
fieldType -> searchLookup.fieldDataLookup.apply(fieldType, () -> forkAndTrackFieldReferences(fieldType.name()))
);
this.sourceLookup = searchLookup.sourceLookup;
this.fieldsLookup = searchLookup.fieldsLookup;

View File

@ -109,7 +109,6 @@ public class ExpandSearchPhaseTests extends OpenSearchTestCase {
assertThat(groupBuilder.must(), Matchers.contains(QueryBuilders.termQuery("foo", "bar")));
}
assertArrayEquals(mockSearchPhaseContext.getRequest().indices(), searchRequest.indices());
assertArrayEquals(mockSearchPhaseContext.getRequest().types(), searchRequest.types());
List<MultiSearchResponse.Item> mSearchResponses = new ArrayList<>(numInnerHits);
for (int innerHitNum = 0; innerHitNum < numInnerHits; innerHitNum++) {

View File

@ -88,13 +88,11 @@ public class MultiSearchRequestTests extends OpenSearchTestCase {
request.requests().get(0).indicesOptions(),
equalTo(IndicesOptions.fromOptions(true, true, true, true, SearchRequest.DEFAULT_INDICES_OPTIONS))
);
assertThat(request.requests().get(0).types().length, equalTo(0));
assertThat(request.requests().get(1).indices()[0], equalTo("test"));
assertThat(
request.requests().get(1).indicesOptions(),
equalTo(IndicesOptions.fromOptions(false, true, true, true, SearchRequest.DEFAULT_INDICES_OPTIONS))
);
assertThat(request.requests().get(1).types()[0], equalTo("type1"));
assertThat(request.requests().get(2).indices()[0], equalTo("test"));
assertThat(
request.requests().get(2).indicesOptions(),
@ -112,12 +110,9 @@ public class MultiSearchRequestTests extends OpenSearchTestCase {
);
assertThat(request.requests().get(5).indices(), is(Strings.EMPTY_ARRAY));
assertThat(request.requests().get(5).types().length, equalTo(0));
assertThat(request.requests().get(6).indices(), is(Strings.EMPTY_ARRAY));
assertThat(request.requests().get(6).types().length, equalTo(0));
assertThat(request.requests().get(6).searchType(), equalTo(SearchType.DFS_QUERY_THEN_FETCH));
assertThat(request.requests().get(7).indices(), is(Strings.EMPTY_ARRAY));
assertThat(request.requests().get(7).types().length, equalTo(0));
}
public void testFailWithUnknownKey() {
@ -148,7 +143,6 @@ public class MultiSearchRequestTests extends OpenSearchTestCase {
request.requests().get(0).indicesOptions(),
equalTo(IndicesOptions.fromOptions(true, true, true, true, SearchRequest.DEFAULT_INDICES_OPTIONS))
);
assertThat(request.requests().get(0).types().length, equalTo(0));
}
public void testCancelAfterIntervalAtParentAndFewChildRequest() throws Exception {
@ -197,23 +191,17 @@ public class MultiSearchRequestTests extends OpenSearchTestCase {
request.requests().get(0).indicesOptions(),
equalTo(IndicesOptions.fromOptions(true, true, true, true, SearchRequest.DEFAULT_INDICES_OPTIONS))
);
assertThat(request.requests().get(0).types().length, equalTo(0));
}
public void testSimpleAdd2() throws Exception {
MultiSearchRequest request = parseMultiSearchRequestFromFile("/org/opensearch/action/search/simple-msearch2.json");
assertThat(request.requests().size(), equalTo(5));
assertThat(request.requests().get(0).indices()[0], equalTo("test"));
assertThat(request.requests().get(0).types().length, equalTo(0));
assertThat(request.requests().get(1).indices()[0], equalTo("test"));
assertThat(request.requests().get(1).types()[0], equalTo("type1"));
assertThat(request.requests().get(2).indices(), is(Strings.EMPTY_ARRAY));
assertThat(request.requests().get(2).types().length, equalTo(0));
assertThat(request.requests().get(3).indices(), is(Strings.EMPTY_ARRAY));
assertThat(request.requests().get(3).types().length, equalTo(0));
assertThat(request.requests().get(3).searchType(), equalTo(SearchType.DFS_QUERY_THEN_FETCH));
assertThat(request.requests().get(4).indices(), is(Strings.EMPTY_ARRAY));
assertThat(request.requests().get(4).types().length, equalTo(0));
}
public void testSimpleAdd3() throws Exception {
@ -223,13 +211,9 @@ public class MultiSearchRequestTests extends OpenSearchTestCase {
assertThat(request.requests().get(0).indices()[1], equalTo("test1"));
assertThat(request.requests().get(1).indices()[0], equalTo("test2"));
assertThat(request.requests().get(1).indices()[1], equalTo("test3"));
assertThat(request.requests().get(1).types()[0], equalTo("type1"));
assertThat(request.requests().get(2).indices()[0], equalTo("test4"));
assertThat(request.requests().get(2).indices()[1], equalTo("test1"));
assertThat(request.requests().get(2).types()[0], equalTo("type2"));
assertThat(request.requests().get(2).types()[1], equalTo("type1"));
assertThat(request.requests().get(3).indices(), is(Strings.EMPTY_ARRAY));
assertThat(request.requests().get(3).types().length, equalTo(0));
assertThat(request.requests().get(3).searchType(), equalTo(SearchType.DFS_QUERY_THEN_FETCH));
}
@ -242,13 +226,10 @@ public class MultiSearchRequestTests extends OpenSearchTestCase {
assertThat(request.requests().get(0).preference(), nullValue());
assertThat(request.requests().get(1).indices()[0], equalTo("test2"));
assertThat(request.requests().get(1).indices()[1], equalTo("test3"));
assertThat(request.requests().get(1).types()[0], equalTo("type1"));
assertThat(request.requests().get(1).requestCache(), nullValue());
assertThat(request.requests().get(1).preference(), equalTo("_local"));
assertThat(request.requests().get(2).indices()[0], equalTo("test4"));
assertThat(request.requests().get(2).indices()[1], equalTo("test1"));
assertThat(request.requests().get(2).types()[0], equalTo("type2"));
assertThat(request.requests().get(2).types()[1], equalTo("type1"));
assertThat(request.requests().get(2).routing(), equalTo("123"));
}
@ -419,7 +400,6 @@ public class MultiSearchRequestTests extends OpenSearchTestCase {
null,
null,
null,
null,
xContentRegistry(),
true,
deprecationLogger

View File

@ -127,7 +127,6 @@ public class SearchRequestTests extends AbstractSearchTestCase {
SearchRequest searchRequest = new SearchRequest();
assertNotNull(searchRequest.indices());
assertNotNull(searchRequest.indicesOptions());
assertNotNull(searchRequest.types());
assertNotNull(searchRequest.searchType());
NullPointerException e = expectThrows(NullPointerException.class, () -> searchRequest.indices((String[]) null));
@ -138,11 +137,6 @@ public class SearchRequestTests extends AbstractSearchTestCase {
e = expectThrows(NullPointerException.class, () -> searchRequest.indicesOptions(null));
assertEquals("indicesOptions must not be null", e.getMessage());
e = expectThrows(NullPointerException.class, () -> searchRequest.types((String[]) null));
assertEquals("types must not be null", e.getMessage());
e = expectThrows(NullPointerException.class, () -> searchRequest.types((String) null));
assertEquals("type must not be null", e.getMessage());
e = expectThrows(NullPointerException.class, () -> searchRequest.searchType((SearchType) null));
assertEquals("searchType must not be null", e.getMessage());
@ -242,7 +236,6 @@ public class SearchRequestTests extends AbstractSearchTestCase {
)
)
);
mutators.add(() -> mutation.types(ArrayUtils.concat(searchRequest.types(), new String[] { randomAlphaOfLength(10) })));
mutators.add(() -> mutation.preference(randomValueOtherThan(searchRequest.preference(), () -> randomAlphaOfLengthBetween(3, 10))));
mutators.add(() -> mutation.routing(randomValueOtherThan(searchRequest.routing(), () -> randomAlphaOfLengthBetween(3, 10))));
mutators.add(() -> mutation.requestCache((randomValueOtherThan(searchRequest.requestCache(), OpenSearchTestCase::randomBoolean))));
@ -273,13 +266,13 @@ public class SearchRequestTests extends AbstractSearchTestCase {
}
public void testDescriptionForDefault() {
assertThat(toDescription(new SearchRequest()), equalTo("indices[], types[], search_type[QUERY_THEN_FETCH], source[]"));
assertThat(toDescription(new SearchRequest()), equalTo("indices[], search_type[QUERY_THEN_FETCH], source[]"));
}
public void testDescriptionIncludesScroll() {
assertThat(
toDescription(new SearchRequest().scroll(TimeValue.timeValueMinutes(5))),
equalTo("indices[], types[], search_type[QUERY_THEN_FETCH], scroll[5m], source[]")
equalTo("indices[], search_type[QUERY_THEN_FETCH], scroll[5m], source[]")
);
}

View File

@ -96,7 +96,7 @@ public class SearchSlowLogTests extends OpenSearchSingleNodeTestCase {
protected SearchContext createSearchContext(IndexService indexService, String... groupStats) {
BigArrays bigArrays = indexService.getBigArrays();
final ShardSearchRequest request = new ShardSearchRequest(new ShardId(indexService.index(), 0), new String[0], 0L, null);
final ShardSearchRequest request = new ShardSearchRequest(new ShardId(indexService.index(), 0), 0L, null);
return new TestSearchContext(bigArrays, indexService) {
@Override
public List<String> groupStats() {
@ -258,30 +258,12 @@ public class SearchSlowLogTests extends OpenSearchSingleNodeTestCase {
assertThat(p.getValueFor("took"), equalTo("10nanos"));
assertThat(p.getValueFor("took_millis"), equalTo("0"));
assertThat(p.getValueFor("total_hits"), equalTo("-1"));
assertThat(p.getValueFor("types"), equalTo("[]"));
assertThat(p.getValueFor("stats"), equalTo("[]"));
assertThat(p.getValueFor("search_type"), Matchers.nullValue());
assertThat(p.getValueFor("total_shards"), equalTo("1"));
assertThat(p.getValueFor("source"), equalTo("{\\\"query\\\":{\\\"match_all\\\":{\\\"boost\\\":1.0}}}"));
}
public void testSlowLogWithTypes() throws IOException {
IndexService index = createIndex("foo");
SearchContext searchContext = searchContextWithSourceAndTask(index);
searchContext.getQueryShardContext().setTypes("type1", "type2");
SearchSlowLog.SearchSlowLogMessage p = new SearchSlowLog.SearchSlowLogMessage(searchContext, 10);
assertThat(p.getValueFor("types"), equalTo("[\\\"type1\\\", \\\"type2\\\"]"));
searchContext.getQueryShardContext().setTypes("type1");
p = new SearchSlowLog.SearchSlowLogMessage(searchContext, 10);
assertThat(p.getValueFor("types"), equalTo("[\\\"type1\\\"]"));
searchContext.getQueryShardContext().setTypes();
p = new SearchSlowLog.SearchSlowLogMessage(searchContext, 10);
assertThat(p.getValueFor("types"), equalTo("[]"));
}
public void testSlowLogsWithStats() throws IOException {
IndexService index = createIndex("foo");
SearchContext searchContext = createSearchContext(index, "group1");

View File

@ -143,7 +143,7 @@ public class IndexFieldDataServiceTests extends OpenSearchSingleNodeTestCase {
searchLookupSetOnce.set(searchLookup);
return (IndexFieldData.Builder) (cache, breakerService) -> null;
});
SearchLookup searchLookup = new SearchLookup(null, null, null);
SearchLookup searchLookup = new SearchLookup(null, null);
ifdService.getForField(ft, "qualified", () -> searchLookup);
assertSame(searchLookup, searchLookupSetOnce.get().get());
}

View File

@ -42,9 +42,6 @@ import org.opensearch.index.query.QueryShardContext;
import org.opensearch.test.OpenSearchTestCase;
import org.mockito.Mockito;
import java.util.Collection;
import java.util.Collections;
public class IdFieldTypeTests extends OpenSearchTestCase {
public void testRangeQuery() {
@ -70,16 +67,12 @@ public class IdFieldTypeTests extends OpenSearchTestCase {
Mockito.when(context.indexVersionCreated()).thenReturn(indexSettings.getAsVersion(IndexMetadata.SETTING_VERSION_CREATED, null));
MapperService mapperService = Mockito.mock(MapperService.class);
Collection<String> types = Collections.emptySet();
Mockito.when(context.queryTypes()).thenReturn(types);
Mockito.when(context.getMapperService()).thenReturn(mapperService);
MappedFieldType ft = new IdFieldMapper.IdFieldType(() -> false);
Query query = ft.termQuery("id", context);
assertEquals(new TermInSetQuery("_id", Uid.encodeId("id")), query);
types = Collections.singleton("type");
Mockito.when(context.queryTypes()).thenReturn(types);
query = ft.termQuery("id", context);
assertEquals(new TermInSetQuery("_id", Uid.encodeId("id")), query);
}

View File

@ -45,7 +45,7 @@ public class CommonTermsQueryParserTests extends OpenSearchSingleNodeTestCase {
CommonTermsQueryBuilder commonTermsQueryBuilder = new CommonTermsQueryBuilder("name", "the").queryName("query-name");
// the named query parses to null; we are testing this does not cause a NullPointerException
SearchResponse response = client().prepareSearch(index).setTypes(type).setQuery(commonTermsQueryBuilder).execute().actionGet();
SearchResponse response = client().prepareSearch(index).setQuery(commonTermsQueryBuilder).execute().actionGet();
assertNotNull(response);
assertEquals(response.getHits().getHits().length, 0);

View File

@ -35,14 +35,11 @@ package org.opensearch.index.query;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermInSetQuery;
import org.opensearch.cluster.metadata.Metadata;
import org.opensearch.common.ParsingException;
import org.opensearch.common.xcontent.XContentParser;
import org.opensearch.index.mapper.IdFieldMapper;
import org.opensearch.test.AbstractQueryTestCase;
import java.io.IOException;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
@ -56,43 +53,17 @@ public class IdsQueryBuilderTests extends AbstractQueryTestCase<IdsQueryBuilder>
@Override
protected IdsQueryBuilder doCreateTestQueryBuilder() {
final String type;
if (randomBoolean()) {
if (frequently()) {
type = "_doc";
} else {
type = randomAlphaOfLengthBetween(1, 10);
}
} else if (randomBoolean()) {
type = Metadata.ALL;
} else {
type = null;
}
int numberOfIds = randomIntBetween(0, 10);
String[] ids = new String[numberOfIds];
for (int i = 0; i < numberOfIds; i++) {
ids[i] = randomAlphaOfLengthBetween(1, 10);
}
IdsQueryBuilder query;
if (type != null && randomBoolean()) {
query = new IdsQueryBuilder().types(type);
query.addIds(ids);
} else {
query = new IdsQueryBuilder();
query.addIds(ids);
}
return query;
return new IdsQueryBuilder().addIds(ids);
}
@Override
protected void doAssertLuceneQuery(IdsQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
boolean allTypes = queryBuilder.types().length == 0 || queryBuilder.types().length == 1 && "_all".equals(queryBuilder.types()[0]);
if (queryBuilder.ids().size() == 0
// no types
|| context.fieldMapper(IdFieldMapper.NAME) == null
// there are types, but disjoint from the query
|| (allTypes == false
&& Arrays.asList(queryBuilder.types()).indexOf(context.getMapperService().documentMapper().type()) == -1)) {
if (queryBuilder.ids().size() == 0) {
assertThat(query, instanceOf(MatchNoDocsQuery.class));
} else {
assertThat(query, instanceOf(TermInSetQuery.class));
@ -100,11 +71,8 @@ public class IdsQueryBuilderTests extends AbstractQueryTestCase<IdsQueryBuilder>
}
public void testIllegalArguments() {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new IdsQueryBuilder().types((String[]) null));
assertEquals("[ids] types cannot be null", e.getMessage());
IdsQueryBuilder idsQueryBuilder = new IdsQueryBuilder();
e = expectThrows(IllegalArgumentException.class, () -> idsQueryBuilder.addIds((String[]) null));
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> idsQueryBuilder.addIds((String[]) null));
assertEquals("[ids] ids cannot be null", e.getMessage());
}
@ -116,64 +84,27 @@ public class IdsQueryBuilderTests extends AbstractQueryTestCase<IdsQueryBuilder>
}
public void testFromJson() throws IOException {
String json = "{\n"
+ " \"ids\" : {\n"
+ " \"type\" : [ \"my_type\" ],\n"
+ " \"values\" : [ \"1\", \"100\", \"4\" ],\n"
+ " \"boost\" : 1.0\n"
+ " }\n"
+ "}";
String json = "{\n" + " \"ids\" : {\n" + " \"values\" : [ \"1\", \"100\", \"4\" ],\n" + " \"boost\" : 1.0\n" + " }\n" + "}";
IdsQueryBuilder parsed = (IdsQueryBuilder) parseQuery(json);
checkGeneratedJson(json, parsed);
assertThat(parsed.ids(), contains("1", "100", "4"));
assertEquals(json, "my_type", parsed.types()[0]);
// check that type that is not an array and also ids that are numbers are parsed
json = "{\n"
+ " \"ids\" : {\n"
+ " \"type\" : \"my_type\",\n"
+ " \"values\" : [ 1, 100, 4 ],\n"
+ " \"boost\" : 1.0\n"
+ " }\n"
+ "}";
json = "{\n" + " \"ids\" : {\n" + " \"values\" : [ 1, 100, 4 ],\n" + " \"boost\" : 1.0\n" + " }\n" + "}";
parsed = (IdsQueryBuilder) parseQuery(json);
assertThat(parsed.ids(), contains("1", "100", "4"));
assertEquals(json, "my_type", parsed.types()[0]);
// check with empty type array
json = "{\n"
+ " \"ids\" : {\n"
+ " \"type\" : [ ],\n"
+ " \"values\" : [ \"1\", \"100\", \"4\" ],\n"
+ " \"boost\" : 1.0\n"
+ " }\n"
+ "}";
parsed = (IdsQueryBuilder) parseQuery(json);
assertThat(parsed.ids(), contains("1", "100", "4"));
assertEquals(json, 0, parsed.types().length);
// check without type
json = "{\n" + " \"ids\" : {\n" + " \"values\" : [ \"1\", \"100\", \"4\" ],\n" + " \"boost\" : 1.0\n" + " }\n" + "}";
parsed = (IdsQueryBuilder) parseQuery(json);
assertThat(parsed.ids(), contains("1", "100", "4"));
assertEquals(json, 0, parsed.types().length);
}
@Override
protected QueryBuilder parseQuery(XContentParser parser) throws IOException {
QueryBuilder query = super.parseQuery(parser);
assertThat(query, instanceOf(IdsQueryBuilder.class));
IdsQueryBuilder idsQuery = (IdsQueryBuilder) query;
if (idsQuery.types().length > 0 && !assertedWarnings.contains(IdsQueryBuilder.TYPES_DEPRECATION_MESSAGE)) {
assertWarnings(IdsQueryBuilder.TYPES_DEPRECATION_MESSAGE);
assertedWarnings.add(IdsQueryBuilder.TYPES_DEPRECATION_MESSAGE);
}
return query;
}
@Override
public void testMustRewrite() throws IOException {
super.testMustRewrite();
QueryShardContext context = createShardContextWithNoType();
context.setAllowUnmappedFields(true);
IdsQueryBuilder queryBuilder = createTestQueryBuilder();

View File

@ -34,7 +34,6 @@ package org.opensearch.index.query;
import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
import com.carrotsearch.randomizedtesting.generators.RandomStrings;
import org.opensearch.common.Strings;
import java.util.Random;
@ -63,7 +62,7 @@ public class RandomQueryBuilder {
case 2:
// We make sure this query has no types to avoid deprecation warnings in the
// tests that use this method.
return new IdsQueryBuilderTests().createTestQueryBuilder().types(Strings.EMPTY_ARRAY);
return new IdsQueryBuilderTests().createTestQueryBuilder();
case 3:
return createMultiTermQuery(r);
default:

View File

@ -95,30 +95,6 @@ public class DeleteByQueryRequestTests extends AbstractBulkByScrollRequestTestCa
// No extra assertions needed
}
public void testTypesGetter() {
int numTypes = between(1, 50);
String[] types = new String[numTypes];
for (int i = 0; i < numTypes; i++) {
types[i] = randomSimpleString(random(), 1, 30);
}
SearchRequest searchRequest = new SearchRequest();
searchRequest.types(types);
DeleteByQueryRequest request = new DeleteByQueryRequest(searchRequest);
assertArrayEquals(request.types(), types);
}
public void testTypesSetter() {
int numTypes = between(1, 50);
String[] types = new String[numTypes];
for (int i = 0; i < numTypes; i++) {
types[i] = randomSimpleString(random(), 1, 30);
}
SearchRequest searchRequest = new SearchRequest();
DeleteByQueryRequest request = new DeleteByQueryRequest(searchRequest);
request.types(types);
assertArrayEquals(request.types(), types);
}
public void testValidateGivenNoQuery() {
SearchRequest searchRequest = new SearchRequest();
DeleteByQueryRequest deleteByQueryRequest = new DeleteByQueryRequest(searchRequest);

View File

@ -1,103 +0,0 @@
/*
* SPDX-License-Identifier: Apache-2.0
*
* The OpenSearch Contributors require contributions made to
* this file be licensed under the Apache-2.0 license or a
* compatible open source license.
*/
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Modifications Copyright OpenSearch Contributors. See
* GitHub history for details.
*/
package org.opensearch.rest.action.admin.indices;
import org.opensearch.common.settings.Settings;
import org.opensearch.common.util.concurrent.ThreadContext;
import org.opensearch.rest.RestRequest;
import org.opensearch.rest.RestStatus;
import org.opensearch.test.rest.FakeRestChannel;
import org.opensearch.test.rest.FakeRestRequest;
import org.opensearch.test.rest.RestActionTestCase;
import org.junit.Before;
import java.util.HashMap;
import java.util.Map;
import static org.opensearch.rest.BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER;
public class RestGetFieldMappingActionTests extends RestActionTestCase {
@Before
public void setUpAction() {
controller().registerHandler(new RestGetFieldMappingAction());
}
public void testIncludeTypeName() {
Map<String, String> params = new HashMap<>();
String path;
if (randomBoolean()) {
params.put(INCLUDE_TYPE_NAME_PARAMETER, "true");
path = "some_index/some_type/_mapping/field/some_field";
} else {
params.put(INCLUDE_TYPE_NAME_PARAMETER, "false");
path = "some_index/_mapping/field/some_field";
}
// We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset.
verifyingClient.setExecuteVerifier((arg1, arg2) -> null);
RestRequest deprecatedRequest = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.GET)
.withPath(path)
.withParams(params)
.build();
dispatchRequest(deprecatedRequest);
assertWarnings(RestGetFieldMappingAction.TYPES_DEPRECATION_MESSAGE);
RestRequest validRequest = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.GET)
.withPath("some_index/_mapping/field/some_field")
.build();
dispatchRequest(validRequest);
}
public void testTypeInPath() {
// Test that specifying a type while setting include_type_name to false
// results in an illegal argument exception.
Map<String, String> params = new HashMap<>();
params.put(INCLUDE_TYPE_NAME_PARAMETER, "false");
RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.GET)
.withPath("some_index/some_type/_mapping/field/some_field")
.withParams(params)
.build();
// We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset.
verifyingClient.setExecuteVerifier((arg1, arg2) -> null);
FakeRestChannel channel = new FakeRestChannel(request, false, 1);
ThreadContext threadContext = new ThreadContext(Settings.EMPTY);
controller().dispatchRequest(request, channel, threadContext);
assertEquals(1, channel.errors().get());
assertEquals(RestStatus.BAD_REQUEST, channel.capturedResponse().status());
}
}

View File

@ -1,124 +0,0 @@
/*
* SPDX-License-Identifier: Apache-2.0
*
* The OpenSearch Contributors require contributions made to
* this file be licensed under the Apache-2.0 license or a
* compatible open source license.
*/
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Modifications Copyright OpenSearch Contributors. See
* GitHub history for details.
*/
package org.opensearch.rest.action.admin.indices;
import org.opensearch.client.node.NodeClient;
import org.opensearch.common.settings.Settings;
import org.opensearch.common.util.concurrent.ThreadContext;
import org.opensearch.rest.RestRequest;
import org.opensearch.rest.RestStatus;
import org.opensearch.test.rest.FakeRestChannel;
import org.opensearch.test.rest.FakeRestRequest;
import org.opensearch.test.rest.RestActionTestCase;
import org.opensearch.threadpool.TestThreadPool;
import org.opensearch.threadpool.ThreadPool;
import org.junit.After;
import org.junit.Before;
import java.util.HashMap;
import java.util.Map;
import static org.opensearch.rest.BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER;
import static org.mockito.Mockito.mock;
public class RestGetMappingActionTests extends RestActionTestCase {
private ThreadPool threadPool;
@Before
public void setUpAction() {
threadPool = new TestThreadPool(RestValidateQueryActionTests.class.getName());
controller().registerHandler(new RestGetMappingAction(threadPool));
}
@After
public void tearDownAction() {
assertTrue(terminate(threadPool));
}
public void testTypeExistsDeprecation() throws Exception {
Map<String, String> params = new HashMap<>();
params.put("type", "_doc");
RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.HEAD)
.withParams(params)
.build();
RestGetMappingAction handler = new RestGetMappingAction(threadPool);
handler.prepareRequest(request, mock(NodeClient.class));
assertWarnings("Type exists requests are deprecated, as types have been deprecated.");
}
public void testTypeInPath() {
// Test that specifying a type while setting include_type_name to false
// results in an illegal argument exception.
Map<String, String> params = new HashMap<>();
params.put(INCLUDE_TYPE_NAME_PARAMETER, "false");
RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.GET)
.withPath("some_index/some_type/_mapping/some_field")
.withParams(params)
.build();
// We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset.
verifyingClient.setExecuteVerifier((arg1, arg2) -> null);
FakeRestChannel channel = new FakeRestChannel(request, false, 1);
ThreadContext threadContext = new ThreadContext(Settings.EMPTY);
controller().dispatchRequest(request, channel, threadContext);
assertEquals(1, channel.errors().get());
assertEquals(RestStatus.BAD_REQUEST, channel.capturedResponse().status());
}
/**
* Setting "include_type_name" to true or false should cause a deprecation warning starting in 7.0
*/
public void testTypeUrlParameterDeprecation() throws Exception {
Map<String, String> params = new HashMap<>();
params.put(INCLUDE_TYPE_NAME_PARAMETER, Boolean.toString(randomBoolean()));
RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.GET)
.withParams(params)
.withPath("/some_index/_mappings")
.build();
// We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset.
verifyingClient.setExecuteVerifier((arg1, arg2) -> null);
FakeRestChannel channel = new FakeRestChannel(request, false, 1);
ThreadContext threadContext = new ThreadContext(Settings.EMPTY);
controller().dispatchRequest(request, channel, threadContext);
assertWarnings(RestGetMappingAction.TYPES_DEPRECATION_MESSAGE);
}
}

View File

@ -1,92 +0,0 @@
/*
* SPDX-License-Identifier: Apache-2.0
*
* The OpenSearch Contributors require contributions made to
* this file be licensed under the Apache-2.0 license or a
* compatible open source license.
*/
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Modifications Copyright OpenSearch Contributors. See
* GitHub history for details.
*/
package org.opensearch.rest.action.admin.indices;
import org.opensearch.client.node.NodeClient;
import org.opensearch.common.bytes.BytesReference;
import org.opensearch.common.xcontent.XContentBuilder;
import org.opensearch.common.xcontent.XContentFactory;
import org.opensearch.common.xcontent.XContentType;
import org.opensearch.rest.RestRequest;
import org.opensearch.test.rest.FakeRestRequest;
import org.opensearch.test.rest.RestActionTestCase;
import org.junit.Before;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import static org.opensearch.rest.BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER;
import static org.mockito.Mockito.mock;
public class RestPutIndexTemplateActionTests extends RestActionTestCase {
private RestPutIndexTemplateAction action;
@Before
public void setUpAction() {
action = new RestPutIndexTemplateAction();
controller().registerHandler(action);
}
public void testIncludeTypeName() throws IOException {
XContentBuilder typedContent = XContentFactory.jsonBuilder()
.startObject()
.startObject("mappings")
.startObject("my_doc")
.startObject("properties")
.startObject("field1")
.field("type", "keyword")
.endObject()
.startObject("field2")
.field("type", "text")
.endObject()
.endObject()
.endObject()
.endObject()
.startObject("aliases")
.startObject("read_alias")
.endObject()
.endObject()
.endObject();
Map<String, String> params = new HashMap<>();
params.put(INCLUDE_TYPE_NAME_PARAMETER, "true");
RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.PUT)
.withParams(params)
.withPath("/_template/_some_template")
.withContent(BytesReference.bytes(typedContent), XContentType.JSON)
.build();
action.prepareRequest(request, mock(NodeClient.class));
assertWarnings(RestPutIndexTemplateAction.TYPES_DEPRECATION_MESSAGE);
}
}

Some files were not shown because too many files have changed in this diff Show More