mirror of
https://github.com/honeymoose/OpenSearch.git
synced 2025-03-29 11:28:30 +00:00
[Remove] Type from Search Internals (#2109)
With types deprecation the type support is removed from internal search API (SearchRequest and QueryShardContext). Signed-off-by: Nicholas Walter Knize <nknize@apache.org>
This commit is contained in:
parent
32a761df2c
commit
7fe642fda5
@ -432,7 +432,7 @@ final class RequestConverters {
|
|||||||
* for standard searches
|
* for standard searches
|
||||||
*/
|
*/
|
||||||
static Request search(SearchRequest searchRequest, String searchEndpoint) throws IOException {
|
static Request search(SearchRequest searchRequest, String searchEndpoint) throws IOException {
|
||||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint(searchRequest.indices(), searchRequest.types(), searchEndpoint));
|
Request request = new Request(HttpPost.METHOD_NAME, endpoint(searchRequest.indices(), searchEndpoint));
|
||||||
|
|
||||||
Params params = new Params();
|
Params params = new Params();
|
||||||
addSearchRequestParams(params, searchRequest);
|
addSearchRequestParams(params, searchRequest);
|
||||||
@ -502,7 +502,7 @@ final class RequestConverters {
|
|||||||
request = new Request(HttpGet.METHOD_NAME, "_render/template");
|
request = new Request(HttpGet.METHOD_NAME, "_render/template");
|
||||||
} else {
|
} else {
|
||||||
SearchRequest searchRequest = searchTemplateRequest.getRequest();
|
SearchRequest searchRequest = searchTemplateRequest.getRequest();
|
||||||
String endpoint = endpoint(searchRequest.indices(), searchRequest.types(), "_search/template");
|
String endpoint = endpoint(searchRequest.indices(), "_search/template");
|
||||||
request = new Request(HttpGet.METHOD_NAME, endpoint);
|
request = new Request(HttpGet.METHOD_NAME, endpoint);
|
||||||
|
|
||||||
Params params = new Params();
|
Params params = new Params();
|
||||||
@ -633,7 +633,7 @@ final class RequestConverters {
|
|||||||
|
|
||||||
private static Request prepareDeleteByQueryRequest(DeleteByQueryRequest deleteByQueryRequest, boolean waitForCompletion)
|
private static Request prepareDeleteByQueryRequest(DeleteByQueryRequest deleteByQueryRequest, boolean waitForCompletion)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
String endpoint = endpoint(deleteByQueryRequest.indices(), deleteByQueryRequest.getDocTypes(), "_delete_by_query");
|
String endpoint = endpoint(deleteByQueryRequest.indices(), "_delete_by_query");
|
||||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||||
Params params = new Params().withRouting(deleteByQueryRequest.getRouting())
|
Params params = new Params().withRouting(deleteByQueryRequest.getRouting())
|
||||||
.withRefresh(deleteByQueryRequest.isRefresh())
|
.withRefresh(deleteByQueryRequest.isRefresh())
|
||||||
@ -661,7 +661,7 @@ final class RequestConverters {
|
|||||||
}
|
}
|
||||||
|
|
||||||
static Request prepareUpdateByQueryRequest(UpdateByQueryRequest updateByQueryRequest, boolean waitForCompletion) throws IOException {
|
static Request prepareUpdateByQueryRequest(UpdateByQueryRequest updateByQueryRequest, boolean waitForCompletion) throws IOException {
|
||||||
String endpoint = endpoint(updateByQueryRequest.indices(), updateByQueryRequest.getDocTypes(), "_update_by_query");
|
String endpoint = endpoint(updateByQueryRequest.indices(), "_update_by_query");
|
||||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||||
Params params = new Params().withRouting(updateByQueryRequest.getRouting())
|
Params params = new Params().withRouting(updateByQueryRequest.getRouting())
|
||||||
.withPipeline(updateByQueryRequest.getPipeline())
|
.withPipeline(updateByQueryRequest.getPipeline())
|
||||||
@ -799,10 +799,12 @@ final class RequestConverters {
|
|||||||
return new NByteArrayEntity(source.bytes, source.offset, source.length, createContentType(xContentType));
|
return new NByteArrayEntity(source.bytes, source.offset, source.length, createContentType(xContentType));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Deprecated
|
||||||
static String endpoint(String index, String type, String id) {
|
static String endpoint(String index, String type, String id) {
|
||||||
return new EndpointBuilder().addPathPart(index, type, id).build();
|
return new EndpointBuilder().addPathPart(index, type, id).build();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Deprecated
|
||||||
static String endpoint(String index, String type, String id, String endpoint) {
|
static String endpoint(String index, String type, String id, String endpoint) {
|
||||||
return new EndpointBuilder().addPathPart(index, type, id).addPathPartAsIs(endpoint).build();
|
return new EndpointBuilder().addPathPart(index, type, id).addPathPartAsIs(endpoint).build();
|
||||||
}
|
}
|
||||||
@ -815,6 +817,7 @@ final class RequestConverters {
|
|||||||
return new EndpointBuilder().addCommaSeparatedPathParts(indices).addPathPartAsIs(endpoint).build();
|
return new EndpointBuilder().addCommaSeparatedPathParts(indices).addPathPartAsIs(endpoint).build();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Deprecated
|
||||||
static String endpoint(String[] indices, String[] types, String endpoint) {
|
static String endpoint(String[] indices, String[] types, String endpoint) {
|
||||||
return new EndpointBuilder().addCommaSeparatedPathParts(indices)
|
return new EndpointBuilder().addCommaSeparatedPathParts(indices)
|
||||||
.addCommaSeparatedPathParts(types)
|
.addCommaSeparatedPathParts(types)
|
||||||
@ -829,6 +832,7 @@ final class RequestConverters {
|
|||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Deprecated
|
||||||
static String endpoint(String[] indices, String endpoint, String type) {
|
static String endpoint(String[] indices, String endpoint, String type) {
|
||||||
return new EndpointBuilder().addCommaSeparatedPathParts(indices).addPathPartAsIs(endpoint).addPathPart(type).build();
|
return new EndpointBuilder().addCommaSeparatedPathParts(indices).addPathPartAsIs(endpoint).addPathPart(type).build();
|
||||||
}
|
}
|
||||||
|
@ -468,9 +468,6 @@ public class RequestConvertersTests extends OpenSearchTestCase {
|
|||||||
);
|
);
|
||||||
reindexRequest.setRemoteInfo(remoteInfo);
|
reindexRequest.setRemoteInfo(remoteInfo);
|
||||||
}
|
}
|
||||||
if (randomBoolean()) {
|
|
||||||
reindexRequest.setSourceDocTypes("doc", "tweet");
|
|
||||||
}
|
|
||||||
if (randomBoolean()) {
|
if (randomBoolean()) {
|
||||||
reindexRequest.setSourceBatchSize(randomInt(100));
|
reindexRequest.setSourceBatchSize(randomInt(100));
|
||||||
}
|
}
|
||||||
@ -536,9 +533,6 @@ public class RequestConvertersTests extends OpenSearchTestCase {
|
|||||||
UpdateByQueryRequest updateByQueryRequest = new UpdateByQueryRequest();
|
UpdateByQueryRequest updateByQueryRequest = new UpdateByQueryRequest();
|
||||||
updateByQueryRequest.indices(randomIndicesNames(1, 5));
|
updateByQueryRequest.indices(randomIndicesNames(1, 5));
|
||||||
Map<String, String> expectedParams = new HashMap<>();
|
Map<String, String> expectedParams = new HashMap<>();
|
||||||
if (randomBoolean()) {
|
|
||||||
updateByQueryRequest.setDocTypes(generateRandomStringArray(5, 5, false, false));
|
|
||||||
}
|
|
||||||
if (randomBoolean()) {
|
if (randomBoolean()) {
|
||||||
int batchSize = randomInt(100);
|
int batchSize = randomInt(100);
|
||||||
updateByQueryRequest.setBatchSize(batchSize);
|
updateByQueryRequest.setBatchSize(batchSize);
|
||||||
@ -600,9 +594,6 @@ public class RequestConvertersTests extends OpenSearchTestCase {
|
|||||||
Request request = RequestConverters.updateByQuery(updateByQueryRequest);
|
Request request = RequestConverters.updateByQuery(updateByQueryRequest);
|
||||||
StringJoiner joiner = new StringJoiner("/", "/", "");
|
StringJoiner joiner = new StringJoiner("/", "/", "");
|
||||||
joiner.add(String.join(",", updateByQueryRequest.indices()));
|
joiner.add(String.join(",", updateByQueryRequest.indices()));
|
||||||
if (updateByQueryRequest.getDocTypes().length > 0) {
|
|
||||||
joiner.add(String.join(",", updateByQueryRequest.getDocTypes()));
|
|
||||||
}
|
|
||||||
joiner.add("_update_by_query");
|
joiner.add("_update_by_query");
|
||||||
assertEquals(joiner.toString(), request.getEndpoint());
|
assertEquals(joiner.toString(), request.getEndpoint());
|
||||||
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
|
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
|
||||||
@ -614,9 +605,6 @@ public class RequestConvertersTests extends OpenSearchTestCase {
|
|||||||
DeleteByQueryRequest deleteByQueryRequest = new DeleteByQueryRequest();
|
DeleteByQueryRequest deleteByQueryRequest = new DeleteByQueryRequest();
|
||||||
deleteByQueryRequest.indices(randomIndicesNames(1, 5));
|
deleteByQueryRequest.indices(randomIndicesNames(1, 5));
|
||||||
Map<String, String> expectedParams = new HashMap<>();
|
Map<String, String> expectedParams = new HashMap<>();
|
||||||
if (randomBoolean()) {
|
|
||||||
deleteByQueryRequest.setDocTypes(generateRandomStringArray(5, 5, false, false));
|
|
||||||
}
|
|
||||||
if (randomBoolean()) {
|
if (randomBoolean()) {
|
||||||
int batchSize = randomInt(100);
|
int batchSize = randomInt(100);
|
||||||
deleteByQueryRequest.setBatchSize(batchSize);
|
deleteByQueryRequest.setBatchSize(batchSize);
|
||||||
@ -671,9 +659,6 @@ public class RequestConvertersTests extends OpenSearchTestCase {
|
|||||||
Request request = RequestConverters.deleteByQuery(deleteByQueryRequest);
|
Request request = RequestConverters.deleteByQuery(deleteByQueryRequest);
|
||||||
StringJoiner joiner = new StringJoiner("/", "/", "");
|
StringJoiner joiner = new StringJoiner("/", "/", "");
|
||||||
joiner.add(String.join(",", deleteByQueryRequest.indices()));
|
joiner.add(String.join(",", deleteByQueryRequest.indices()));
|
||||||
if (deleteByQueryRequest.getDocTypes().length > 0) {
|
|
||||||
joiner.add(String.join(",", deleteByQueryRequest.getDocTypes()));
|
|
||||||
}
|
|
||||||
joiner.add("_delete_by_query");
|
joiner.add("_delete_by_query");
|
||||||
assertEquals(joiner.toString(), request.getEndpoint());
|
assertEquals(joiner.toString(), request.getEndpoint());
|
||||||
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
|
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
|
||||||
@ -1191,10 +1176,6 @@ public class RequestConvertersTests extends OpenSearchTestCase {
|
|||||||
if (Strings.hasLength(index)) {
|
if (Strings.hasLength(index)) {
|
||||||
endpoint.add(index);
|
endpoint.add(index);
|
||||||
}
|
}
|
||||||
String type = String.join(",", searchRequest.types());
|
|
||||||
if (Strings.hasLength(type)) {
|
|
||||||
endpoint.add(type);
|
|
||||||
}
|
|
||||||
endpoint.add(searchEndpoint);
|
endpoint.add(searchEndpoint);
|
||||||
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
|
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
|
||||||
assertEquals(endpoint.toString(), request.getEndpoint());
|
assertEquals(endpoint.toString(), request.getEndpoint());
|
||||||
@ -1204,14 +1185,6 @@ public class RequestConvertersTests extends OpenSearchTestCase {
|
|||||||
|
|
||||||
public static SearchRequest createTestSearchRequest(String[] indices, Map<String, String> expectedParams) {
|
public static SearchRequest createTestSearchRequest(String[] indices, Map<String, String> expectedParams) {
|
||||||
SearchRequest searchRequest = new SearchRequest(indices);
|
SearchRequest searchRequest = new SearchRequest(indices);
|
||||||
|
|
||||||
int numTypes = randomIntBetween(0, 5);
|
|
||||||
String[] types = new String[numTypes];
|
|
||||||
for (int i = 0; i < numTypes; i++) {
|
|
||||||
types[i] = "type-" + randomAlphaOfLengthBetween(2, 5);
|
|
||||||
}
|
|
||||||
searchRequest.types(types);
|
|
||||||
|
|
||||||
setRandomSearchParams(searchRequest, expectedParams);
|
setRandomSearchParams(searchRequest, expectedParams);
|
||||||
setRandomIndicesOptions(searchRequest::indicesOptions, searchRequest::indicesOptions, expectedParams);
|
setRandomIndicesOptions(searchRequest::indicesOptions, searchRequest::indicesOptions, expectedParams);
|
||||||
|
|
||||||
@ -1278,7 +1251,6 @@ public class RequestConvertersTests extends OpenSearchTestCase {
|
|||||||
public void testSearchNullIndicesAndTypes() {
|
public void testSearchNullIndicesAndTypes() {
|
||||||
expectThrows(NullPointerException.class, () -> new SearchRequest((String[]) null));
|
expectThrows(NullPointerException.class, () -> new SearchRequest((String[]) null));
|
||||||
expectThrows(NullPointerException.class, () -> new SearchRequest().indices((String[]) null));
|
expectThrows(NullPointerException.class, () -> new SearchRequest().indices((String[]) null));
|
||||||
expectThrows(NullPointerException.class, () -> new SearchRequest().types((String[]) null));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testCountNotNullSource() throws IOException {
|
public void testCountNotNullSource() throws IOException {
|
||||||
@ -1293,14 +1265,6 @@ public class RequestConvertersTests extends OpenSearchTestCase {
|
|||||||
public void testCount() throws Exception {
|
public void testCount() throws Exception {
|
||||||
String[] indices = randomIndicesNames(0, 5);
|
String[] indices = randomIndicesNames(0, 5);
|
||||||
CountRequest countRequest = new CountRequest(indices);
|
CountRequest countRequest = new CountRequest(indices);
|
||||||
|
|
||||||
int numTypes = randomIntBetween(0, 5);
|
|
||||||
String[] types = new String[numTypes];
|
|
||||||
for (int i = 0; i < numTypes; i++) {
|
|
||||||
types[i] = "type-" + randomAlphaOfLengthBetween(2, 5);
|
|
||||||
}
|
|
||||||
countRequest.types(types);
|
|
||||||
|
|
||||||
Map<String, String> expectedParams = new HashMap<>();
|
Map<String, String> expectedParams = new HashMap<>();
|
||||||
setRandomCountParams(countRequest, expectedParams);
|
setRandomCountParams(countRequest, expectedParams);
|
||||||
setRandomIndicesOptions(countRequest::indicesOptions, countRequest::indicesOptions, expectedParams);
|
setRandomIndicesOptions(countRequest::indicesOptions, countRequest::indicesOptions, expectedParams);
|
||||||
@ -1317,10 +1281,6 @@ public class RequestConvertersTests extends OpenSearchTestCase {
|
|||||||
if (Strings.hasLength(index)) {
|
if (Strings.hasLength(index)) {
|
||||||
endpoint.add(index);
|
endpoint.add(index);
|
||||||
}
|
}
|
||||||
String type = String.join(",", types);
|
|
||||||
if (Strings.hasLength(type)) {
|
|
||||||
endpoint.add(type);
|
|
||||||
}
|
|
||||||
endpoint.add("_count");
|
endpoint.add("_count");
|
||||||
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
|
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
|
||||||
assertEquals(endpoint.toString(), request.getEndpoint());
|
assertEquals(endpoint.toString(), request.getEndpoint());
|
||||||
@ -1328,12 +1288,6 @@ public class RequestConvertersTests extends OpenSearchTestCase {
|
|||||||
assertToXContentBody(countRequest, request.getEntity());
|
assertToXContentBody(countRequest, request.getEntity());
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testCountNullIndicesAndTypes() {
|
|
||||||
expectThrows(NullPointerException.class, () -> new CountRequest((String[]) null));
|
|
||||||
expectThrows(NullPointerException.class, () -> new CountRequest().indices((String[]) null));
|
|
||||||
expectThrows(NullPointerException.class, () -> new CountRequest().types((String[]) null));
|
|
||||||
}
|
|
||||||
|
|
||||||
private static void setRandomCountParams(CountRequest countRequest, Map<String, String> expectedParams) {
|
private static void setRandomCountParams(CountRequest countRequest, Map<String, String> expectedParams) {
|
||||||
if (randomBoolean()) {
|
if (randomBoolean()) {
|
||||||
countRequest.routing(randomAlphaOfLengthBetween(3, 10));
|
countRequest.routing(randomAlphaOfLengthBetween(3, 10));
|
||||||
@ -1416,7 +1370,6 @@ public class RequestConvertersTests extends OpenSearchTestCase {
|
|||||||
null,
|
null,
|
||||||
null,
|
null,
|
||||||
null,
|
null,
|
||||||
null,
|
|
||||||
xContentRegistry(),
|
xContentRegistry(),
|
||||||
true,
|
true,
|
||||||
deprecationLogger
|
deprecationLogger
|
||||||
|
@ -88,7 +88,6 @@ import static org.opensearch.index.query.QueryBuilders.spanTermQuery;
|
|||||||
import static org.opensearch.index.query.QueryBuilders.spanWithinQuery;
|
import static org.opensearch.index.query.QueryBuilders.spanWithinQuery;
|
||||||
import static org.opensearch.index.query.QueryBuilders.termQuery;
|
import static org.opensearch.index.query.QueryBuilders.termQuery;
|
||||||
import static org.opensearch.index.query.QueryBuilders.termsQuery;
|
import static org.opensearch.index.query.QueryBuilders.termsQuery;
|
||||||
import static org.opensearch.index.query.QueryBuilders.typeQuery;
|
|
||||||
import static org.opensearch.index.query.QueryBuilders.wildcardQuery;
|
import static org.opensearch.index.query.QueryBuilders.wildcardQuery;
|
||||||
import static org.opensearch.index.query.QueryBuilders.wrapperQuery;
|
import static org.opensearch.index.query.QueryBuilders.wrapperQuery;
|
||||||
import static org.opensearch.index.query.functionscore.ScoreFunctionBuilders.exponentialDecayFunction;
|
import static org.opensearch.index.query.functionscore.ScoreFunctionBuilders.exponentialDecayFunction;
|
||||||
@ -447,12 +446,6 @@ public class QueryDSLDocumentationTests extends OpenSearchTestCase {
|
|||||||
// end::terms
|
// end::terms
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testType() {
|
|
||||||
// tag::type
|
|
||||||
typeQuery("my_type"); // <1>
|
|
||||||
// end::type
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testWildcard() {
|
public void testWildcard() {
|
||||||
// tag::wildcard
|
// tag::wildcard
|
||||||
wildcardQuery(
|
wildcardQuery(
|
||||||
|
@ -140,7 +140,6 @@ public class HighlighterWithAnalyzersTests extends OpenSearchIntegTestCase {
|
|||||||
client().prepareIndex("test", "test", "1").setSource("name", "ARCOTEL Hotels Deutschland").get();
|
client().prepareIndex("test", "test", "1").setSource("name", "ARCOTEL Hotels Deutschland").get();
|
||||||
refresh();
|
refresh();
|
||||||
SearchResponse search = client().prepareSearch("test")
|
SearchResponse search = client().prepareSearch("test")
|
||||||
.setTypes("test")
|
|
||||||
.setQuery(matchQuery("name.autocomplete", "deut tel").operator(Operator.OR))
|
.setQuery(matchQuery("name.autocomplete", "deut tel").operator(Operator.OR))
|
||||||
.highlighter(new HighlightBuilder().field("name.autocomplete"))
|
.highlighter(new HighlightBuilder().field("name.autocomplete"))
|
||||||
.get();
|
.get();
|
||||||
|
@ -85,7 +85,6 @@ public class StoredExpressionIT extends OpenSearchIntegTestCase {
|
|||||||
new SearchSourceBuilder().scriptField("test1", new Script(ScriptType.STORED, null, "script1", Collections.emptyMap()))
|
new SearchSourceBuilder().scriptField("test1", new Script(ScriptType.STORED, null, "script1", Collections.emptyMap()))
|
||||||
)
|
)
|
||||||
.setIndices("test")
|
.setIndices("test")
|
||||||
.setTypes("scriptTest")
|
|
||||||
.get();
|
.get();
|
||||||
fail("search script should have been rejected");
|
fail("search script should have been rejected");
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
|
@ -77,7 +77,7 @@ public class ExpressionFieldScriptTests extends OpenSearchTestCase {
|
|||||||
when(fieldData.load(any())).thenReturn(atomicFieldData);
|
when(fieldData.load(any())).thenReturn(atomicFieldData);
|
||||||
|
|
||||||
service = new ExpressionScriptEngine();
|
service = new ExpressionScriptEngine();
|
||||||
lookup = new SearchLookup(mapperService, (ignored, lookup) -> fieldData, null);
|
lookup = new SearchLookup(mapperService, (ignored, lookup) -> fieldData);
|
||||||
}
|
}
|
||||||
|
|
||||||
private FieldScript.LeafFactory compile(String expression) {
|
private FieldScript.LeafFactory compile(String expression) {
|
||||||
|
@ -76,7 +76,7 @@ public class ExpressionNumberSortScriptTests extends OpenSearchTestCase {
|
|||||||
when(fieldData.load(any())).thenReturn(atomicFieldData);
|
when(fieldData.load(any())).thenReturn(atomicFieldData);
|
||||||
|
|
||||||
service = new ExpressionScriptEngine();
|
service = new ExpressionScriptEngine();
|
||||||
lookup = new SearchLookup(mapperService, (ignored, lookup) -> fieldData, null);
|
lookup = new SearchLookup(mapperService, (ignored, lookup) -> fieldData);
|
||||||
}
|
}
|
||||||
|
|
||||||
private NumberSortScript.LeafFactory compile(String expression) {
|
private NumberSortScript.LeafFactory compile(String expression) {
|
||||||
|
@ -76,7 +76,7 @@ public class ExpressionTermsSetQueryTests extends OpenSearchTestCase {
|
|||||||
when(fieldData.load(any())).thenReturn(atomicFieldData);
|
when(fieldData.load(any())).thenReturn(atomicFieldData);
|
||||||
|
|
||||||
service = new ExpressionScriptEngine();
|
service = new ExpressionScriptEngine();
|
||||||
lookup = new SearchLookup(mapperService, (ignored, lookup) -> fieldData, null);
|
lookup = new SearchLookup(mapperService, (ignored, lookup) -> fieldData);
|
||||||
}
|
}
|
||||||
|
|
||||||
private TermsSetQueryScript.LeafFactory compile(String expression) {
|
private TermsSetQueryScript.LeafFactory compile(String expression) {
|
||||||
|
@ -196,9 +196,11 @@ public class SearchTemplateIT extends OpenSearchSingleNodeTestCase {
|
|||||||
Map<String, Object> templateParams = new HashMap<>();
|
Map<String, Object> templateParams = new HashMap<>();
|
||||||
templateParams.put("fieldParam", "foo");
|
templateParams.put("fieldParam", "foo");
|
||||||
|
|
||||||
SearchTemplateResponse searchResponse = new SearchTemplateRequestBuilder(client()).setRequest(
|
SearchTemplateResponse searchResponse = new SearchTemplateRequestBuilder(client()).setRequest(new SearchRequest("test"))
|
||||||
new SearchRequest("test").types("type")
|
.setScript("testTemplate")
|
||||||
).setScript("testTemplate").setScriptType(ScriptType.STORED).setScriptParams(templateParams).get();
|
.setScriptType(ScriptType.STORED)
|
||||||
|
.setScriptParams(templateParams)
|
||||||
|
.get();
|
||||||
assertHitCount(searchResponse.getResponse(), 4);
|
assertHitCount(searchResponse.getResponse(), 4);
|
||||||
|
|
||||||
assertAcked(client().admin().cluster().prepareDeleteStoredScript("testTemplate"));
|
assertAcked(client().admin().cluster().prepareDeleteStoredScript("testTemplate"));
|
||||||
@ -238,14 +240,16 @@ public class SearchTemplateIT extends OpenSearchSingleNodeTestCase {
|
|||||||
Map<String, Object> templateParams = new HashMap<>();
|
Map<String, Object> templateParams = new HashMap<>();
|
||||||
templateParams.put("fieldParam", "foo");
|
templateParams.put("fieldParam", "foo");
|
||||||
|
|
||||||
SearchTemplateResponse searchResponse = new SearchTemplateRequestBuilder(client()).setRequest(
|
SearchTemplateResponse searchResponse = new SearchTemplateRequestBuilder(client()).setRequest(new SearchRequest().indices("test"))
|
||||||
new SearchRequest().indices("test").types("type")
|
.setScript("1a")
|
||||||
).setScript("1a").setScriptType(ScriptType.STORED).setScriptParams(templateParams).get();
|
.setScriptType(ScriptType.STORED)
|
||||||
|
.setScriptParams(templateParams)
|
||||||
|
.get();
|
||||||
assertHitCount(searchResponse.getResponse(), 4);
|
assertHitCount(searchResponse.getResponse(), 4);
|
||||||
|
|
||||||
expectThrows(
|
expectThrows(
|
||||||
ResourceNotFoundException.class,
|
ResourceNotFoundException.class,
|
||||||
() -> new SearchTemplateRequestBuilder(client()).setRequest(new SearchRequest().indices("test").types("type"))
|
() -> new SearchTemplateRequestBuilder(client()).setRequest(new SearchRequest().indices("test"))
|
||||||
.setScript("1000")
|
.setScript("1000")
|
||||||
.setScriptType(ScriptType.STORED)
|
.setScriptType(ScriptType.STORED)
|
||||||
.setScriptParams(templateParams)
|
.setScriptParams(templateParams)
|
||||||
@ -253,7 +257,7 @@ public class SearchTemplateIT extends OpenSearchSingleNodeTestCase {
|
|||||||
);
|
);
|
||||||
|
|
||||||
templateParams.put("fieldParam", "bar");
|
templateParams.put("fieldParam", "bar");
|
||||||
searchResponse = new SearchTemplateRequestBuilder(client()).setRequest(new SearchRequest("test").types("type"))
|
searchResponse = new SearchTemplateRequestBuilder(client()).setRequest(new SearchRequest("test"))
|
||||||
.setScript("2")
|
.setScript("2")
|
||||||
.setScriptType(ScriptType.STORED)
|
.setScriptType(ScriptType.STORED)
|
||||||
.setScriptParams(templateParams)
|
.setScriptParams(templateParams)
|
||||||
@ -304,7 +308,7 @@ public class SearchTemplateIT extends OpenSearchSingleNodeTestCase {
|
|||||||
|
|
||||||
IllegalArgumentException e = expectThrows(
|
IllegalArgumentException e = expectThrows(
|
||||||
IllegalArgumentException.class,
|
IllegalArgumentException.class,
|
||||||
() -> new SearchTemplateRequestBuilder(client()).setRequest(new SearchRequest("testindex").types("test"))
|
() -> new SearchTemplateRequestBuilder(client()).setRequest(new SearchRequest("testindex"))
|
||||||
.setScript("git01")
|
.setScript("git01")
|
||||||
.setScriptType(ScriptType.STORED)
|
.setScriptType(ScriptType.STORED)
|
||||||
.setScriptParams(templateParams)
|
.setScriptParams(templateParams)
|
||||||
@ -320,9 +324,11 @@ public class SearchTemplateIT extends OpenSearchSingleNodeTestCase {
|
|||||||
.setContent(new BytesArray(query.replace("{{slop}}", Integer.toString(0))), XContentType.JSON)
|
.setContent(new BytesArray(query.replace("{{slop}}", Integer.toString(0))), XContentType.JSON)
|
||||||
);
|
);
|
||||||
|
|
||||||
SearchTemplateResponse searchResponse = new SearchTemplateRequestBuilder(client()).setRequest(
|
SearchTemplateResponse searchResponse = new SearchTemplateRequestBuilder(client()).setRequest(new SearchRequest("testindex"))
|
||||||
new SearchRequest("testindex").types("test")
|
.setScript("git01")
|
||||||
).setScript("git01").setScriptType(ScriptType.STORED).setScriptParams(templateParams).get();
|
.setScriptType(ScriptType.STORED)
|
||||||
|
.setScriptParams(templateParams)
|
||||||
|
.get();
|
||||||
assertHitCount(searchResponse.getResponse(), 1);
|
assertHitCount(searchResponse.getResponse(), 1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -360,9 +366,11 @@ public class SearchTemplateIT extends OpenSearchSingleNodeTestCase {
|
|||||||
String[] fieldParams = { "foo", "bar" };
|
String[] fieldParams = { "foo", "bar" };
|
||||||
arrayTemplateParams.put("fieldParam", fieldParams);
|
arrayTemplateParams.put("fieldParam", fieldParams);
|
||||||
|
|
||||||
SearchTemplateResponse searchResponse = new SearchTemplateRequestBuilder(client()).setRequest(
|
SearchTemplateResponse searchResponse = new SearchTemplateRequestBuilder(client()).setRequest(new SearchRequest("test"))
|
||||||
new SearchRequest("test").types("type")
|
.setScript("4")
|
||||||
).setScript("4").setScriptType(ScriptType.STORED).setScriptParams(arrayTemplateParams).get();
|
.setScriptType(ScriptType.STORED)
|
||||||
|
.setScriptParams(arrayTemplateParams)
|
||||||
|
.get();
|
||||||
assertHitCount(searchResponse.getResponse(), 5);
|
assertHitCount(searchResponse.getResponse(), 5);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -33,7 +33,6 @@
|
|||||||
package org.opensearch.script.mustache;
|
package org.opensearch.script.mustache;
|
||||||
|
|
||||||
import org.opensearch.client.node.NodeClient;
|
import org.opensearch.client.node.NodeClient;
|
||||||
import org.opensearch.common.logging.DeprecationLogger;
|
|
||||||
import org.opensearch.common.settings.Settings;
|
import org.opensearch.common.settings.Settings;
|
||||||
import org.opensearch.rest.BaseRestHandler;
|
import org.opensearch.rest.BaseRestHandler;
|
||||||
import org.opensearch.rest.RestRequest;
|
import org.opensearch.rest.RestRequest;
|
||||||
@ -53,9 +52,6 @@ import static org.opensearch.rest.RestRequest.Method.GET;
|
|||||||
import static org.opensearch.rest.RestRequest.Method.POST;
|
import static org.opensearch.rest.RestRequest.Method.POST;
|
||||||
|
|
||||||
public class RestMultiSearchTemplateAction extends BaseRestHandler {
|
public class RestMultiSearchTemplateAction extends BaseRestHandler {
|
||||||
private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestMultiSearchTemplateAction.class);
|
|
||||||
static final String TYPES_DEPRECATION_MESSAGE = "[types removal]"
|
|
||||||
+ " Specifying types in multi search template requests is deprecated.";
|
|
||||||
|
|
||||||
private static final Set<String> RESPONSE_PARAMS;
|
private static final Set<String> RESPONSE_PARAMS;
|
||||||
|
|
||||||
@ -95,14 +91,6 @@ public class RestMultiSearchTemplateAction extends BaseRestHandler {
|
|||||||
@Override
|
@Override
|
||||||
public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException {
|
public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException {
|
||||||
MultiSearchTemplateRequest multiRequest = parseRequest(request, allowExplicitIndex);
|
MultiSearchTemplateRequest multiRequest = parseRequest(request, allowExplicitIndex);
|
||||||
|
|
||||||
// Emit a single deprecation message if any search template contains types.
|
|
||||||
for (SearchTemplateRequest searchTemplateRequest : multiRequest.requests()) {
|
|
||||||
if (searchTemplateRequest.getRequest().types().length > 0) {
|
|
||||||
deprecationLogger.deprecate("msearch_with_types", TYPES_DEPRECATION_MESSAGE);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return channel -> client.execute(MultiSearchTemplateAction.INSTANCE, multiRequest, new RestToXContentListener<>(channel));
|
return channel -> client.execute(MultiSearchTemplateAction.INSTANCE, multiRequest, new RestToXContentListener<>(channel));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -69,13 +69,10 @@ public class MultiSearchTemplateRequestTests extends OpenSearchTestCase {
|
|||||||
assertThat(request.requests().get(0).getRequest().preference(), nullValue());
|
assertThat(request.requests().get(0).getRequest().preference(), nullValue());
|
||||||
assertThat(request.requests().get(1).getRequest().indices()[0], equalTo("test2"));
|
assertThat(request.requests().get(1).getRequest().indices()[0], equalTo("test2"));
|
||||||
assertThat(request.requests().get(1).getRequest().indices()[1], equalTo("test3"));
|
assertThat(request.requests().get(1).getRequest().indices()[1], equalTo("test3"));
|
||||||
assertThat(request.requests().get(1).getRequest().types()[0], equalTo("type1"));
|
|
||||||
assertThat(request.requests().get(1).getRequest().requestCache(), nullValue());
|
assertThat(request.requests().get(1).getRequest().requestCache(), nullValue());
|
||||||
assertThat(request.requests().get(1).getRequest().preference(), equalTo("_local"));
|
assertThat(request.requests().get(1).getRequest().preference(), equalTo("_local"));
|
||||||
assertThat(request.requests().get(2).getRequest().indices()[0], equalTo("test4"));
|
assertThat(request.requests().get(2).getRequest().indices()[0], equalTo("test4"));
|
||||||
assertThat(request.requests().get(2).getRequest().indices()[1], equalTo("test1"));
|
assertThat(request.requests().get(2).getRequest().indices()[1], equalTo("test1"));
|
||||||
assertThat(request.requests().get(2).getRequest().types()[0], equalTo("type2"));
|
|
||||||
assertThat(request.requests().get(2).getRequest().types()[1], equalTo("type1"));
|
|
||||||
assertThat(request.requests().get(2).getRequest().routing(), equalTo("123"));
|
assertThat(request.requests().get(2).getRequest().routing(), equalTo("123"));
|
||||||
assertNotNull(request.requests().get(0).getScript());
|
assertNotNull(request.requests().get(0).getScript());
|
||||||
assertNotNull(request.requests().get(1).getScript());
|
assertNotNull(request.requests().get(1).getScript());
|
||||||
|
@ -1,79 +0,0 @@
|
|||||||
/*
|
|
||||||
* SPDX-License-Identifier: Apache-2.0
|
|
||||||
*
|
|
||||||
* The OpenSearch Contributors require contributions made to
|
|
||||||
* this file be licensed under the Apache-2.0 license or a
|
|
||||||
* compatible open source license.
|
|
||||||
*/
|
|
||||||
|
|
||||||
/*
|
|
||||||
* Licensed to Elasticsearch under one or more contributor
|
|
||||||
* license agreements. See the NOTICE file distributed with
|
|
||||||
* this work for additional information regarding copyright
|
|
||||||
* ownership. Elasticsearch licenses this file to you under
|
|
||||||
* the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
* not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing,
|
|
||||||
* software distributed under the License is distributed on an
|
|
||||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
||||||
* KIND, either express or implied. See the License for the
|
|
||||||
* specific language governing permissions and limitations
|
|
||||||
* under the License.
|
|
||||||
*/
|
|
||||||
/*
|
|
||||||
* Modifications Copyright OpenSearch Contributors. See
|
|
||||||
* GitHub history for details.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.opensearch.script.mustache;
|
|
||||||
|
|
||||||
import org.opensearch.common.bytes.BytesArray;
|
|
||||||
import org.opensearch.common.settings.Settings;
|
|
||||||
import org.opensearch.common.xcontent.XContentType;
|
|
||||||
import org.opensearch.rest.RestRequest;
|
|
||||||
import org.opensearch.test.rest.FakeRestRequest;
|
|
||||||
import org.opensearch.test.rest.RestActionTestCase;
|
|
||||||
import org.junit.Before;
|
|
||||||
|
|
||||||
import java.nio.charset.StandardCharsets;
|
|
||||||
|
|
||||||
public class RestMultiSearchTemplateActionTests extends RestActionTestCase {
|
|
||||||
|
|
||||||
@Before
|
|
||||||
public void setUpAction() {
|
|
||||||
controller().registerHandler(new RestMultiSearchTemplateAction(Settings.EMPTY));
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testTypeInPath() {
|
|
||||||
String content = "{ \"index\": \"some_index\" } \n" + "{\"source\": {\"query\" : {\"match_all\" :{}}}} \n";
|
|
||||||
BytesArray bytesContent = new BytesArray(content.getBytes(StandardCharsets.UTF_8));
|
|
||||||
|
|
||||||
RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.GET)
|
|
||||||
.withPath("/some_index/some_type/_msearch/template")
|
|
||||||
.withContent(bytesContent, XContentType.JSON)
|
|
||||||
.build();
|
|
||||||
// We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset.
|
|
||||||
verifyingClient.setExecuteVerifier((arg1, arg2) -> null);
|
|
||||||
|
|
||||||
dispatchRequest(request);
|
|
||||||
assertWarnings(RestMultiSearchTemplateAction.TYPES_DEPRECATION_MESSAGE);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testTypeInBody() {
|
|
||||||
String content = "{ \"index\": \"some_index\", \"type\": \"some_type\" } \n" + "{\"source\": {\"query\" : {\"match_all\" :{}}}} \n";
|
|
||||||
BytesArray bytesContent = new BytesArray(content.getBytes(StandardCharsets.UTF_8));
|
|
||||||
|
|
||||||
RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withPath("/some_index/_msearch/template")
|
|
||||||
.withContent(bytesContent, XContentType.JSON)
|
|
||||||
.build();
|
|
||||||
// We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset.
|
|
||||||
verifyingClient.setExecuteVerifier((arg1, arg2) -> null);
|
|
||||||
|
|
||||||
dispatchRequest(request);
|
|
||||||
assertWarnings(RestMultiSearchTemplateAction.TYPES_DEPRECATION_MESSAGE);
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,71 +0,0 @@
|
|||||||
/*
|
|
||||||
* SPDX-License-Identifier: Apache-2.0
|
|
||||||
*
|
|
||||||
* The OpenSearch Contributors require contributions made to
|
|
||||||
* this file be licensed under the Apache-2.0 license or a
|
|
||||||
* compatible open source license.
|
|
||||||
*/
|
|
||||||
|
|
||||||
/*
|
|
||||||
* Licensed to Elasticsearch under one or more contributor
|
|
||||||
* license agreements. See the NOTICE file distributed with
|
|
||||||
* this work for additional information regarding copyright
|
|
||||||
* ownership. Elasticsearch licenses this file to you under
|
|
||||||
* the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
* not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing,
|
|
||||||
* software distributed under the License is distributed on an
|
|
||||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
||||||
* KIND, either express or implied. See the License for the
|
|
||||||
* specific language governing permissions and limitations
|
|
||||||
* under the License.
|
|
||||||
*/
|
|
||||||
/*
|
|
||||||
* Modifications Copyright OpenSearch Contributors. See
|
|
||||||
* GitHub history for details.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.opensearch.script.mustache;
|
|
||||||
|
|
||||||
import org.opensearch.rest.RestRequest;
|
|
||||||
import org.opensearch.rest.action.search.RestSearchAction;
|
|
||||||
import org.opensearch.test.rest.FakeRestRequest;
|
|
||||||
import org.opensearch.test.rest.RestActionTestCase;
|
|
||||||
import org.junit.Before;
|
|
||||||
|
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.Map;
|
|
||||||
|
|
||||||
public class RestSearchTemplateActionTests extends RestActionTestCase {
|
|
||||||
|
|
||||||
@Before
|
|
||||||
public void setUpAction() {
|
|
||||||
controller().registerHandler(new RestSearchTemplateAction());
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testTypeInPath() {
|
|
||||||
RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.GET)
|
|
||||||
.withPath("/some_index/some_type/_search/template")
|
|
||||||
.build();
|
|
||||||
|
|
||||||
dispatchRequest(request);
|
|
||||||
assertWarnings(RestSearchAction.TYPES_DEPRECATION_MESSAGE);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testTypeParameter() {
|
|
||||||
Map<String, String> params = new HashMap<>();
|
|
||||||
params.put("type", "some_type");
|
|
||||||
|
|
||||||
RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.GET)
|
|
||||||
.withPath("/some_index/_search/template")
|
|
||||||
.withParams(params)
|
|
||||||
.build();
|
|
||||||
|
|
||||||
dispatchRequest(request);
|
|
||||||
assertWarnings(RestSearchAction.TYPES_DEPRECATION_MESSAGE);
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,6 +1,6 @@
|
|||||||
{"index":["test0", "test1"], "request_cache": true}
|
{"index":["test0", "test1"], "request_cache": true}
|
||||||
{"source": {"query" : {"match_{{template}}" :{}}}, "params": {"template": "all" } }
|
{"source": {"query" : {"match_{{template}}" :{}}}, "params": {"template": "all" } }
|
||||||
{"index" : "test2,test3", "type" : "type1", "preference": "_local"}
|
{"index" : "test2,test3", "preference": "_local"}
|
||||||
{"source": {"query" : {"match_{{template}}" :{}}}, "params": {"template": "all" } }
|
{"source": {"query" : {"match_{{template}}" :{}}}, "params": {"template": "all" } }
|
||||||
{"index" : ["test4", "test1"], "type" : [ "type2", "type1" ], "routing": "123"}
|
{"index" : ["test4", "test1"], "routing": "123"}
|
||||||
{"source": {"query" : {"match_{{template}}" :{}}}, "params": {"template": "all" } }
|
{"source": {"query" : {"match_{{template}}" :{}}}, "params": {"template": "all" } }
|
||||||
|
@ -203,7 +203,7 @@ public class TokenCountFieldMapperIntegrationIT extends OpenSearchIntegTestCase
|
|||||||
}
|
}
|
||||||
|
|
||||||
private SearchRequestBuilder prepareSearch() {
|
private SearchRequestBuilder prepareSearch() {
|
||||||
SearchRequestBuilder request = client().prepareSearch("test").setTypes("test");
|
SearchRequestBuilder request = client().prepareSearch("test");
|
||||||
request.addStoredField("foo.token_count");
|
request.addStoredField("foo.token_count");
|
||||||
request.addStoredField("foo.token_count_without_position_increments");
|
request.addStoredField("foo.token_count_without_position_increments");
|
||||||
if (loadCountedFields) {
|
if (loadCountedFields) {
|
||||||
|
@ -201,7 +201,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
|
|||||||
|
|
||||||
// TEST FETCHING _parent from child
|
// TEST FETCHING _parent from child
|
||||||
SearchResponse searchResponse;
|
SearchResponse searchResponse;
|
||||||
searchResponse = client().prepareSearch("test").setQuery(idsQuery("doc").addIds("c1")).get();
|
searchResponse = client().prepareSearch("test").setQuery(idsQuery().addIds("c1")).get();
|
||||||
assertNoFailures(searchResponse);
|
assertNoFailures(searchResponse);
|
||||||
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L));
|
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L));
|
||||||
assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("c1"));
|
assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("c1"));
|
||||||
|
@ -290,13 +290,9 @@ public class HasChildQueryBuilderTests extends AbstractQueryTestCase<HasChildQue
|
|||||||
}
|
}
|
||||||
|
|
||||||
public void testToQueryInnerQueryType() throws IOException {
|
public void testToQueryInnerQueryType() throws IOException {
|
||||||
String[] searchTypes = new String[] { TYPE };
|
|
||||||
QueryShardContext shardContext = createShardContext();
|
QueryShardContext shardContext = createShardContext();
|
||||||
shardContext.setTypes(searchTypes);
|
|
||||||
HasChildQueryBuilder hasChildQueryBuilder = hasChildQuery(CHILD_DOC, new IdsQueryBuilder().addIds("id"), ScoreMode.None);
|
HasChildQueryBuilder hasChildQueryBuilder = hasChildQuery(CHILD_DOC, new IdsQueryBuilder().addIds("id"), ScoreMode.None);
|
||||||
Query query = hasChildQueryBuilder.toQuery(shardContext);
|
Query query = hasChildQueryBuilder.toQuery(shardContext);
|
||||||
// verify that the context types are still the same as the ones we previously set
|
|
||||||
assertThat(shardContext.getTypes(), equalTo(searchTypes));
|
|
||||||
assertLateParsingQuery(query, CHILD_DOC, "id");
|
assertLateParsingQuery(query, CHILD_DOC, "id");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -192,13 +192,9 @@ public class HasParentQueryBuilderTests extends AbstractQueryTestCase<HasParentQ
|
|||||||
}
|
}
|
||||||
|
|
||||||
public void testToQueryInnerQueryType() throws IOException {
|
public void testToQueryInnerQueryType() throws IOException {
|
||||||
String[] searchTypes = new String[] { TYPE };
|
|
||||||
QueryShardContext shardContext = createShardContext();
|
QueryShardContext shardContext = createShardContext();
|
||||||
shardContext.setTypes(searchTypes);
|
|
||||||
HasParentQueryBuilder hasParentQueryBuilder = new HasParentQueryBuilder(PARENT_DOC, new IdsQueryBuilder().addIds("id"), false);
|
HasParentQueryBuilder hasParentQueryBuilder = new HasParentQueryBuilder(PARENT_DOC, new IdsQueryBuilder().addIds("id"), false);
|
||||||
Query query = hasParentQueryBuilder.toQuery(shardContext);
|
Query query = hasParentQueryBuilder.toQuery(shardContext);
|
||||||
// verify that the context types are still the same as the ones we previously set
|
|
||||||
assertThat(shardContext.getTypes(), equalTo(searchTypes));
|
|
||||||
HasChildQueryBuilderTests.assertLateParsingQuery(query, PARENT_DOC, "id");
|
HasChildQueryBuilderTests.assertLateParsingQuery(query, PARENT_DOC, "id");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -79,7 +79,6 @@ final class RemoteRequestBuilders {
|
|||||||
// It is nasty to build paths with StringBuilder but we'll be careful....
|
// It is nasty to build paths with StringBuilder but we'll be careful....
|
||||||
StringBuilder path = new StringBuilder("/");
|
StringBuilder path = new StringBuilder("/");
|
||||||
addIndices(path, searchRequest.indices());
|
addIndices(path, searchRequest.indices());
|
||||||
addTypes(path, searchRequest.types());
|
|
||||||
path.append("_search");
|
path.append("_search");
|
||||||
Request request = new Request("POST", path.toString());
|
Request request = new Request("POST", path.toString());
|
||||||
|
|
||||||
@ -210,16 +209,6 @@ final class RemoteRequestBuilders {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void addTypes(StringBuilder path, String[] types) {
|
|
||||||
if (types == null || types.length == 0) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
for (String indexOrType : types) {
|
|
||||||
checkIndexOrType("Type", indexOrType);
|
|
||||||
}
|
|
||||||
path.append(Strings.arrayToCommaDelimitedString(types)).append('/');
|
|
||||||
}
|
|
||||||
|
|
||||||
private static void checkIndexOrType(String name, String indexOrType) {
|
private static void checkIndexOrType(String name, String indexOrType) {
|
||||||
if (indexOrType.indexOf(',') >= 0) {
|
if (indexOrType.indexOf(',') >= 0) {
|
||||||
throw new IllegalArgumentException(name + " containing [,] not supported but got [" + indexOrType + "]");
|
throw new IllegalArgumentException(name + " containing [,] not supported but got [" + indexOrType + "]");
|
||||||
|
@ -251,7 +251,7 @@ public class CancelTests extends ReindexTestCase {
|
|||||||
assertThat(response, matcher().created(modified).reasonCancelled(equalTo("by user request")));
|
assertThat(response, matcher().created(modified).reasonCancelled(equalTo("by user request")));
|
||||||
|
|
||||||
refresh("dest");
|
refresh("dest");
|
||||||
assertHitCount(client().prepareSearch("dest").setTypes(TYPE).setSize(0).get(), modified);
|
assertHitCount(client().prepareSearch("dest").setSize(0).get(), modified);
|
||||||
}, equalTo("reindex from [" + INDEX + "] to [dest][" + TYPE + "]"));
|
}, equalTo("reindex from [" + INDEX + "] to [dest][" + TYPE + "]"));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -293,7 +293,7 @@ public class CancelTests extends ReindexTestCase {
|
|||||||
(response, total, modified) -> {
|
(response, total, modified) -> {
|
||||||
assertThat(response, matcher().created(modified).reasonCancelled(equalTo("by user request")).slices(hasSize(5)));
|
assertThat(response, matcher().created(modified).reasonCancelled(equalTo("by user request")).slices(hasSize(5)));
|
||||||
refresh("dest");
|
refresh("dest");
|
||||||
assertHitCount(client().prepareSearch("dest").setTypes(TYPE).setSize(0).get(), modified);
|
assertHitCount(client().prepareSearch("dest").setSize(0).get(), modified);
|
||||||
},
|
},
|
||||||
equalTo("reindex from [" + INDEX + "] to [dest][" + TYPE + "]")
|
equalTo("reindex from [" + INDEX + "] to [dest][" + TYPE + "]")
|
||||||
);
|
);
|
||||||
|
@ -83,25 +83,25 @@ public class DeleteByQueryBasicTests extends ReindexTestCase {
|
|||||||
client().prepareIndex("test", "test", "7").setSource("foo", "f")
|
client().prepareIndex("test", "test", "7").setSource("foo", "f")
|
||||||
);
|
);
|
||||||
|
|
||||||
assertHitCount(client().prepareSearch("test").setTypes("test").setSize(0).get(), 7);
|
assertHitCount(client().prepareSearch("test").setSize(0).get(), 7);
|
||||||
|
|
||||||
// Deletes two docs that matches "foo:a"
|
// Deletes two docs that matches "foo:a"
|
||||||
assertThat(deleteByQuery().source("test").filter(termQuery("foo", "a")).refresh(true).get(), matcher().deleted(2));
|
assertThat(deleteByQuery().source("test").filter(termQuery("foo", "a")).refresh(true).get(), matcher().deleted(2));
|
||||||
assertHitCount(client().prepareSearch("test").setTypes("test").setSize(0).get(), 5);
|
assertHitCount(client().prepareSearch("test").setSize(0).get(), 5);
|
||||||
|
|
||||||
// Deletes the two first docs with limit by size
|
// Deletes the two first docs with limit by size
|
||||||
DeleteByQueryRequestBuilder request = deleteByQuery().source("test").filter(QueryBuilders.matchAllQuery()).size(2).refresh(true);
|
DeleteByQueryRequestBuilder request = deleteByQuery().source("test").filter(QueryBuilders.matchAllQuery()).size(2).refresh(true);
|
||||||
request.source().addSort("foo.keyword", SortOrder.ASC);
|
request.source().addSort("foo.keyword", SortOrder.ASC);
|
||||||
assertThat(request.get(), matcher().deleted(2));
|
assertThat(request.get(), matcher().deleted(2));
|
||||||
assertHitCount(client().prepareSearch("test").setTypes("test").setSize(0).get(), 3);
|
assertHitCount(client().prepareSearch("test").setSize(0).get(), 3);
|
||||||
|
|
||||||
// Deletes but match no docs
|
// Deletes but match no docs
|
||||||
assertThat(deleteByQuery().source("test").filter(termQuery("foo", "no_match")).refresh(true).get(), matcher().deleted(0));
|
assertThat(deleteByQuery().source("test").filter(termQuery("foo", "no_match")).refresh(true).get(), matcher().deleted(0));
|
||||||
assertHitCount(client().prepareSearch("test").setTypes("test").setSize(0).get(), 3);
|
assertHitCount(client().prepareSearch("test").setSize(0).get(), 3);
|
||||||
|
|
||||||
// Deletes all remaining docs
|
// Deletes all remaining docs
|
||||||
assertThat(deleteByQuery().source("test").filter(QueryBuilders.matchAllQuery()).refresh(true).get(), matcher().deleted(3));
|
assertThat(deleteByQuery().source("test").filter(QueryBuilders.matchAllQuery()).refresh(true).get(), matcher().deleted(3));
|
||||||
assertHitCount(client().prepareSearch("test").setTypes("test").setSize(0).get(), 0);
|
assertHitCount(client().prepareSearch("test").setSize(0).get(), 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testDeleteByQueryWithOneIndex() throws Exception {
|
public void testDeleteByQueryWithOneIndex() throws Exception {
|
||||||
@ -319,7 +319,7 @@ public class DeleteByQueryBasicTests extends ReindexTestCase {
|
|||||||
client().prepareIndex("test", "test", "6").setSource("foo", "e"),
|
client().prepareIndex("test", "test", "6").setSource("foo", "e"),
|
||||||
client().prepareIndex("test", "test", "7").setSource("foo", "f")
|
client().prepareIndex("test", "test", "7").setSource("foo", "f")
|
||||||
);
|
);
|
||||||
assertHitCount(client().prepareSearch("test").setTypes("test").setSize(0).get(), 7);
|
assertHitCount(client().prepareSearch("test").setSize(0).get(), 7);
|
||||||
|
|
||||||
int slices = randomSlices();
|
int slices = randomSlices();
|
||||||
int expectedSlices = expectedSliceStatuses(slices, "test");
|
int expectedSlices = expectedSliceStatuses(slices, "test");
|
||||||
@ -329,14 +329,14 @@ public class DeleteByQueryBasicTests extends ReindexTestCase {
|
|||||||
deleteByQuery().source("test").filter(termQuery("foo", "a")).refresh(true).setSlices(slices).get(),
|
deleteByQuery().source("test").filter(termQuery("foo", "a")).refresh(true).setSlices(slices).get(),
|
||||||
matcher().deleted(2).slices(hasSize(expectedSlices))
|
matcher().deleted(2).slices(hasSize(expectedSlices))
|
||||||
);
|
);
|
||||||
assertHitCount(client().prepareSearch("test").setTypes("test").setSize(0).get(), 5);
|
assertHitCount(client().prepareSearch("test").setSize(0).get(), 5);
|
||||||
|
|
||||||
// Delete remaining docs
|
// Delete remaining docs
|
||||||
assertThat(
|
assertThat(
|
||||||
deleteByQuery().source("test").filter(QueryBuilders.matchAllQuery()).refresh(true).setSlices(slices).get(),
|
deleteByQuery().source("test").filter(QueryBuilders.matchAllQuery()).refresh(true).setSlices(slices).get(),
|
||||||
matcher().deleted(5).slices(hasSize(expectedSlices))
|
matcher().deleted(5).slices(hasSize(expectedSlices))
|
||||||
);
|
);
|
||||||
assertHitCount(client().prepareSearch("test").setTypes("test").setSize(0).get(), 0);
|
assertHitCount(client().prepareSearch("test").setSize(0).get(), 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testMultipleSources() throws Exception {
|
public void testMultipleSources() throws Exception {
|
||||||
@ -369,7 +369,7 @@ public class DeleteByQueryBasicTests extends ReindexTestCase {
|
|||||||
);
|
);
|
||||||
|
|
||||||
for (String index : docs.keySet()) {
|
for (String index : docs.keySet()) {
|
||||||
assertHitCount(client().prepareSearch(index).setTypes("test").setSize(0).get(), 0);
|
assertHitCount(client().prepareSearch(index).setSize(0).get(), 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -125,7 +125,7 @@ public class ReindexBasicTests extends ReindexTestCase {
|
|||||||
// Use a small batch size so we have to use more than one batch
|
// Use a small batch size so we have to use more than one batch
|
||||||
copy.source().setSize(5);
|
copy.source().setSize(5);
|
||||||
assertThat(copy.get(), matcher().created(max).batches(greaterThanOrEqualTo(max / 5)).slices(hasSize(expectedSlices)));
|
assertThat(copy.get(), matcher().created(max).batches(greaterThanOrEqualTo(max / 5)).slices(hasSize(expectedSlices)));
|
||||||
assertHitCount(client().prepareSearch("dest").setTypes("type").setSize(0).get(), max);
|
assertHitCount(client().prepareSearch("dest").setSize(0).get(), max);
|
||||||
|
|
||||||
// Copy some of the docs
|
// Copy some of the docs
|
||||||
int half = max / 2;
|
int half = max / 2;
|
||||||
|
@ -33,8 +33,6 @@
|
|||||||
package org.opensearch.index.reindex;
|
package org.opensearch.index.reindex;
|
||||||
|
|
||||||
import org.opensearch.common.xcontent.NamedXContentRegistry;
|
import org.opensearch.common.xcontent.NamedXContentRegistry;
|
||||||
import org.opensearch.rest.RestRequest;
|
|
||||||
import org.opensearch.rest.action.search.RestSearchAction;
|
|
||||||
import org.opensearch.test.rest.FakeRestRequest;
|
import org.opensearch.test.rest.FakeRestRequest;
|
||||||
import org.opensearch.test.rest.RestActionTestCase;
|
import org.opensearch.test.rest.RestActionTestCase;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
@ -52,26 +50,6 @@ public class RestDeleteByQueryActionTests extends RestActionTestCase {
|
|||||||
controller().registerHandler(action);
|
controller().registerHandler(action);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testTypeInPath() throws IOException {
|
|
||||||
RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.POST)
|
|
||||||
.withPath("/some_index/some_type/_delete_by_query")
|
|
||||||
.build();
|
|
||||||
|
|
||||||
// We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset.
|
|
||||||
verifyingClient.setExecuteLocallyVerifier((arg1, arg2) -> null);
|
|
||||||
|
|
||||||
dispatchRequest(request);
|
|
||||||
|
|
||||||
// checks the type in the URL is propagated correctly to the request object
|
|
||||||
// only works after the request is dispatched, so its params are filled from url.
|
|
||||||
DeleteByQueryRequest dbqRequest = action.buildRequest(request, DEFAULT_NAMED_WRITABLE_REGISTRY);
|
|
||||||
assertArrayEquals(new String[] { "some_type" }, dbqRequest.getDocTypes());
|
|
||||||
|
|
||||||
// RestDeleteByQueryAction itself doesn't check for a deprecated type usage
|
|
||||||
// checking here for a deprecation from its internal search request
|
|
||||||
assertWarnings(RestSearchAction.TYPES_DEPRECATION_MESSAGE);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testParseEmpty() throws IOException {
|
public void testParseEmpty() throws IOException {
|
||||||
final FakeRestRequest restRequest = new FakeRestRequest.Builder(new NamedXContentRegistry(emptyList())).build();
|
final FakeRestRequest restRequest = new FakeRestRequest.Builder(new NamedXContentRegistry(emptyList())).build();
|
||||||
DeleteByQueryRequest request = action.buildRequest(restRequest, DEFAULT_NAMED_WRITABLE_REGISTRY);
|
DeleteByQueryRequest request = action.buildRequest(restRequest, DEFAULT_NAMED_WRITABLE_REGISTRY);
|
||||||
|
@ -44,7 +44,6 @@ import org.opensearch.test.rest.RestActionTestCase;
|
|||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
|
|
||||||
import static java.util.Collections.singletonMap;
|
import static java.util.Collections.singletonMap;
|
||||||
@ -103,30 +102,6 @@ public class RestReindexActionTests extends RestActionTestCase {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* test deprecation is logged if one or more types are used in source search request inside reindex
|
|
||||||
*/
|
|
||||||
public void testTypeInSource() throws IOException {
|
|
||||||
FakeRestRequest.Builder requestBuilder = new FakeRestRequest.Builder(xContentRegistry()).withMethod(Method.POST)
|
|
||||||
.withPath("/_reindex");
|
|
||||||
XContentBuilder b = JsonXContent.contentBuilder().startObject();
|
|
||||||
{
|
|
||||||
b.startObject("source");
|
|
||||||
{
|
|
||||||
b.field("type", randomFrom(Arrays.asList("\"t1\"", "[\"t1\", \"t2\"]", "\"_doc\"")));
|
|
||||||
}
|
|
||||||
b.endObject();
|
|
||||||
}
|
|
||||||
b.endObject();
|
|
||||||
requestBuilder.withContent(new BytesArray(BytesReference.bytes(b).toBytesRef()), XContentType.JSON);
|
|
||||||
|
|
||||||
// We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset.
|
|
||||||
verifyingClient.setExecuteLocallyVerifier((arg1, arg2) -> null);
|
|
||||||
|
|
||||||
dispatchRequest(requestBuilder.build());
|
|
||||||
assertWarnings(ReindexRequest.TYPES_DEPRECATION_MESSAGE);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* test deprecation is logged if a type is used in the destination index request inside reindex
|
* test deprecation is logged if a type is used in the destination index request inside reindex
|
||||||
*/
|
*/
|
||||||
|
@ -33,8 +33,6 @@
|
|||||||
package org.opensearch.index.reindex;
|
package org.opensearch.index.reindex;
|
||||||
|
|
||||||
import org.opensearch.common.xcontent.NamedXContentRegistry;
|
import org.opensearch.common.xcontent.NamedXContentRegistry;
|
||||||
import org.opensearch.rest.RestRequest;
|
|
||||||
import org.opensearch.rest.action.search.RestSearchAction;
|
|
||||||
import org.opensearch.test.rest.FakeRestRequest;
|
import org.opensearch.test.rest.FakeRestRequest;
|
||||||
import org.opensearch.test.rest.RestActionTestCase;
|
import org.opensearch.test.rest.RestActionTestCase;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
@ -53,26 +51,6 @@ public class RestUpdateByQueryActionTests extends RestActionTestCase {
|
|||||||
controller().registerHandler(action);
|
controller().registerHandler(action);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testTypeInPath() throws IOException {
|
|
||||||
RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.POST)
|
|
||||||
.withPath("/some_index/some_type/_update_by_query")
|
|
||||||
.build();
|
|
||||||
|
|
||||||
// We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset.
|
|
||||||
verifyingClient.setExecuteLocallyVerifier((arg1, arg2) -> null);
|
|
||||||
|
|
||||||
dispatchRequest(request);
|
|
||||||
|
|
||||||
// checks the type in the URL is propagated correctly to the request object
|
|
||||||
// only works after the request is dispatched, so its params are filled from url.
|
|
||||||
UpdateByQueryRequest ubqRequest = action.buildRequest(request, DEFAULT_NAMED_WRITABLE_REGISTRY);
|
|
||||||
assertArrayEquals(new String[] { "some_type" }, ubqRequest.getDocTypes());
|
|
||||||
|
|
||||||
// RestUpdateByQueryAction itself doesn't check for a deprecated type usage
|
|
||||||
// checking here for a deprecation from its internal search request
|
|
||||||
assertWarnings(RestSearchAction.TYPES_DEPRECATION_MESSAGE);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testParseEmpty() throws IOException {
|
public void testParseEmpty() throws IOException {
|
||||||
final FakeRestRequest restRequest = new FakeRestRequest.Builder(new NamedXContentRegistry(emptyList())).build();
|
final FakeRestRequest restRequest = new FakeRestRequest.Builder(new NamedXContentRegistry(emptyList())).build();
|
||||||
UpdateByQueryRequest request = action.buildRequest(restRequest, DEFAULT_NAMED_WRITABLE_REGISTRY);
|
UpdateByQueryRequest request = action.buildRequest(restRequest, DEFAULT_NAMED_WRITABLE_REGISTRY);
|
||||||
|
@ -55,7 +55,7 @@ public class UpdateByQueryBasicTests extends ReindexTestCase {
|
|||||||
client().prepareIndex("test", "test", "3").setSource("foo", "b"),
|
client().prepareIndex("test", "test", "3").setSource("foo", "b"),
|
||||||
client().prepareIndex("test", "test", "4").setSource("foo", "c")
|
client().prepareIndex("test", "test", "4").setSource("foo", "c")
|
||||||
);
|
);
|
||||||
assertHitCount(client().prepareSearch("test").setTypes("test").setSize(0).get(), 4);
|
assertHitCount(client().prepareSearch("test").setSize(0).get(), 4);
|
||||||
assertEquals(1, client().prepareGet("test", "test", "1").get().getVersion());
|
assertEquals(1, client().prepareGet("test", "test", "1").get().getVersion());
|
||||||
assertEquals(1, client().prepareGet("test", "test", "4").get().getVersion());
|
assertEquals(1, client().prepareGet("test", "test", "4").get().getVersion());
|
||||||
|
|
||||||
@ -95,7 +95,7 @@ public class UpdateByQueryBasicTests extends ReindexTestCase {
|
|||||||
client().prepareIndex("test", "test", "3").setSource("foo", "b"),
|
client().prepareIndex("test", "test", "3").setSource("foo", "b"),
|
||||||
client().prepareIndex("test", "test", "4").setSource("foo", "c")
|
client().prepareIndex("test", "test", "4").setSource("foo", "c")
|
||||||
);
|
);
|
||||||
assertHitCount(client().prepareSearch("test").setTypes("test").setSize(0).get(), 4);
|
assertHitCount(client().prepareSearch("test").setSize(0).get(), 4);
|
||||||
assertEquals(1, client().prepareGet("test", "test", "1").get().getVersion());
|
assertEquals(1, client().prepareGet("test", "test", "1").get().getVersion());
|
||||||
assertEquals(1, client().prepareGet("test", "test", "4").get().getVersion());
|
assertEquals(1, client().prepareGet("test", "test", "4").get().getVersion());
|
||||||
|
|
||||||
|
@ -78,27 +78,25 @@ public class RemoteRequestBuildersTests extends OpenSearchTestCase {
|
|||||||
SearchRequest searchRequest = new SearchRequest().source(new SearchSourceBuilder());
|
SearchRequest searchRequest = new SearchRequest().source(new SearchSourceBuilder());
|
||||||
assertEquals("/_search", initialSearch(searchRequest, query, remoteVersion).getEndpoint());
|
assertEquals("/_search", initialSearch(searchRequest, query, remoteVersion).getEndpoint());
|
||||||
searchRequest.indices("a");
|
searchRequest.indices("a");
|
||||||
searchRequest.types("b");
|
assertEquals("/a/_search", initialSearch(searchRequest, query, remoteVersion).getEndpoint());
|
||||||
assertEquals("/a/b/_search", initialSearch(searchRequest, query, remoteVersion).getEndpoint());
|
|
||||||
searchRequest.indices("a", "b");
|
searchRequest.indices("a", "b");
|
||||||
searchRequest.types("c", "d");
|
assertEquals("/a,b/_search", initialSearch(searchRequest, query, remoteVersion).getEndpoint());
|
||||||
assertEquals("/a,b/c,d/_search", initialSearch(searchRequest, query, remoteVersion).getEndpoint());
|
|
||||||
searchRequest.indices("cat,");
|
searchRequest.indices("cat,");
|
||||||
assertEquals("/cat%2C/c,d/_search", initialSearch(searchRequest, query, remoteVersion).getEndpoint());
|
assertEquals("/cat%2C/_search", initialSearch(searchRequest, query, remoteVersion).getEndpoint());
|
||||||
searchRequest.indices("cat/");
|
searchRequest.indices("cat/");
|
||||||
assertEquals("/cat%2F/c,d/_search", initialSearch(searchRequest, query, remoteVersion).getEndpoint());
|
assertEquals("/cat%2F/_search", initialSearch(searchRequest, query, remoteVersion).getEndpoint());
|
||||||
searchRequest.indices("cat/", "dog");
|
searchRequest.indices("cat/", "dog");
|
||||||
assertEquals("/cat%2F,dog/c,d/_search", initialSearch(searchRequest, query, remoteVersion).getEndpoint());
|
assertEquals("/cat%2F,dog/_search", initialSearch(searchRequest, query, remoteVersion).getEndpoint());
|
||||||
// test a specific date math + all characters that need escaping.
|
// test a specific date math + all characters that need escaping.
|
||||||
searchRequest.indices("<cat{now/d}>", "<>/{}|+:,");
|
searchRequest.indices("<cat{now/d}>", "<>/{}|+:,");
|
||||||
assertEquals(
|
assertEquals(
|
||||||
"/%3Ccat%7Bnow%2Fd%7D%3E,%3C%3E%2F%7B%7D%7C%2B%3A%2C/c,d/_search",
|
"/%3Ccat%7Bnow%2Fd%7D%3E,%3C%3E%2F%7B%7D%7C%2B%3A%2C/_search",
|
||||||
initialSearch(searchRequest, query, remoteVersion).getEndpoint()
|
initialSearch(searchRequest, query, remoteVersion).getEndpoint()
|
||||||
);
|
);
|
||||||
|
|
||||||
// pass-through if already escaped.
|
// pass-through if already escaped.
|
||||||
searchRequest.indices("%2f", "%3a");
|
searchRequest.indices("%2f", "%3a");
|
||||||
assertEquals("/%2f,%3a/c,d/_search", initialSearch(searchRequest, query, remoteVersion).getEndpoint());
|
assertEquals("/%2f,%3a/_search", initialSearch(searchRequest, query, remoteVersion).getEndpoint());
|
||||||
|
|
||||||
assertWarnings(DEPRECATED_URL_ENCODED_INDEX_WARNING);
|
assertWarnings(DEPRECATED_URL_ENCODED_INDEX_WARNING);
|
||||||
|
|
||||||
@ -107,20 +105,6 @@ public class RemoteRequestBuildersTests extends OpenSearchTestCase {
|
|||||||
expectBadStartRequest(searchRequest, "Index", ",", "%2fcat,");
|
expectBadStartRequest(searchRequest, "Index", ",", "%2fcat,");
|
||||||
searchRequest.indices("%3ccat/");
|
searchRequest.indices("%3ccat/");
|
||||||
expectBadStartRequest(searchRequest, "Index", "/", "%3ccat/");
|
expectBadStartRequest(searchRequest, "Index", "/", "%3ccat/");
|
||||||
|
|
||||||
searchRequest.indices("ok");
|
|
||||||
searchRequest.types("cat,");
|
|
||||||
expectBadStartRequest(searchRequest, "Type", ",", "cat,");
|
|
||||||
searchRequest.types("cat,", "dog");
|
|
||||||
expectBadStartRequest(searchRequest, "Type", ",", "cat,");
|
|
||||||
searchRequest.types("dog", "cat,");
|
|
||||||
expectBadStartRequest(searchRequest, "Type", ",", "cat,");
|
|
||||||
searchRequest.types("cat/");
|
|
||||||
expectBadStartRequest(searchRequest, "Type", "/", "cat/");
|
|
||||||
searchRequest.types("cat/", "dog");
|
|
||||||
expectBadStartRequest(searchRequest, "Type", "/", "cat/");
|
|
||||||
searchRequest.types("dog", "cat/");
|
|
||||||
expectBadStartRequest(searchRequest, "Type", "/", "cat/");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void expectBadStartRequest(SearchRequest searchRequest, String type, String bad, String failed) {
|
private void expectBadStartRequest(SearchRequest searchRequest, String type, String bad, String failed) {
|
||||||
|
@ -99,7 +99,6 @@ public class ICUCollationKeywordFieldMapperIT extends OpenSearchIntegTestCase {
|
|||||||
|
|
||||||
// searching for either of the terms should return both results since they collate to the same value
|
// searching for either of the terms should return both results since they collate to the same value
|
||||||
SearchRequest request = new SearchRequest().indices(index)
|
SearchRequest request = new SearchRequest().indices(index)
|
||||||
.types(type)
|
|
||||||
.source(
|
.source(
|
||||||
new SearchSourceBuilder().fetchSource(false)
|
new SearchSourceBuilder().fetchSource(false)
|
||||||
.query(QueryBuilders.termQuery("collate", randomBoolean() ? equivalent[0] : equivalent[1]))
|
.query(QueryBuilders.termQuery("collate", randomBoolean() ? equivalent[0] : equivalent[1]))
|
||||||
@ -143,7 +142,6 @@ public class ICUCollationKeywordFieldMapperIT extends OpenSearchIntegTestCase {
|
|||||||
|
|
||||||
// using sort mode = max, values B and C will be used for the sort
|
// using sort mode = max, values B and C will be used for the sort
|
||||||
SearchRequest request = new SearchRequest().indices(index)
|
SearchRequest request = new SearchRequest().indices(index)
|
||||||
.types(type)
|
|
||||||
.source(
|
.source(
|
||||||
new SearchSourceBuilder().fetchSource(false)
|
new SearchSourceBuilder().fetchSource(false)
|
||||||
.query(QueryBuilders.termQuery("collate", "a"))
|
.query(QueryBuilders.termQuery("collate", "a"))
|
||||||
@ -159,7 +157,6 @@ public class ICUCollationKeywordFieldMapperIT extends OpenSearchIntegTestCase {
|
|||||||
|
|
||||||
// same thing, using different sort mode that will use a for both docs
|
// same thing, using different sort mode that will use a for both docs
|
||||||
request = new SearchRequest().indices(index)
|
request = new SearchRequest().indices(index)
|
||||||
.types(type)
|
|
||||||
.source(
|
.source(
|
||||||
new SearchSourceBuilder().fetchSource(false)
|
new SearchSourceBuilder().fetchSource(false)
|
||||||
.query(QueryBuilders.termQuery("collate", "a"))
|
.query(QueryBuilders.termQuery("collate", "a"))
|
||||||
@ -207,7 +204,6 @@ public class ICUCollationKeywordFieldMapperIT extends OpenSearchIntegTestCase {
|
|||||||
|
|
||||||
// searching for either of the terms should return both results since they collate to the same value
|
// searching for either of the terms should return both results since they collate to the same value
|
||||||
SearchRequest request = new SearchRequest().indices(index)
|
SearchRequest request = new SearchRequest().indices(index)
|
||||||
.types(type)
|
|
||||||
.source(
|
.source(
|
||||||
new SearchSourceBuilder().fetchSource(false)
|
new SearchSourceBuilder().fetchSource(false)
|
||||||
.query(QueryBuilders.termQuery("collate", randomBoolean() ? equivalent[0] : equivalent[1]))
|
.query(QueryBuilders.termQuery("collate", randomBoolean() ? equivalent[0] : equivalent[1]))
|
||||||
@ -253,7 +249,6 @@ public class ICUCollationKeywordFieldMapperIT extends OpenSearchIntegTestCase {
|
|||||||
);
|
);
|
||||||
|
|
||||||
SearchRequest request = new SearchRequest().indices(index)
|
SearchRequest request = new SearchRequest().indices(index)
|
||||||
.types(type)
|
|
||||||
.source(
|
.source(
|
||||||
new SearchSourceBuilder().fetchSource(false)
|
new SearchSourceBuilder().fetchSource(false)
|
||||||
.query(QueryBuilders.termQuery("collate", randomBoolean() ? equivalent[0] : equivalent[1]))
|
.query(QueryBuilders.termQuery("collate", randomBoolean() ? equivalent[0] : equivalent[1]))
|
||||||
@ -300,7 +295,6 @@ public class ICUCollationKeywordFieldMapperIT extends OpenSearchIntegTestCase {
|
|||||||
);
|
);
|
||||||
|
|
||||||
SearchRequest request = new SearchRequest().indices(index)
|
SearchRequest request = new SearchRequest().indices(index)
|
||||||
.types(type)
|
|
||||||
.source(
|
.source(
|
||||||
new SearchSourceBuilder().fetchSource(false)
|
new SearchSourceBuilder().fetchSource(false)
|
||||||
.query(QueryBuilders.termQuery("collate", randomBoolean() ? equivalent[0] : equivalent[1]))
|
.query(QueryBuilders.termQuery("collate", randomBoolean() ? equivalent[0] : equivalent[1]))
|
||||||
@ -348,7 +342,6 @@ public class ICUCollationKeywordFieldMapperIT extends OpenSearchIntegTestCase {
|
|||||||
);
|
);
|
||||||
|
|
||||||
SearchRequest request = new SearchRequest().indices(index)
|
SearchRequest request = new SearchRequest().indices(index)
|
||||||
.types(type)
|
|
||||||
.source(
|
.source(
|
||||||
new SearchSourceBuilder().fetchSource(false).sort("collate", SortOrder.ASC).sort("id", SortOrder.ASC) // secondary sort
|
new SearchSourceBuilder().fetchSource(false).sort("collate", SortOrder.ASC).sort("id", SortOrder.ASC) // secondary sort
|
||||||
// should kick in on
|
// should kick in on
|
||||||
@ -391,7 +384,6 @@ public class ICUCollationKeywordFieldMapperIT extends OpenSearchIntegTestCase {
|
|||||||
);
|
);
|
||||||
|
|
||||||
SearchRequest request = new SearchRequest().indices(index)
|
SearchRequest request = new SearchRequest().indices(index)
|
||||||
.types(type)
|
|
||||||
.source(new SearchSourceBuilder().fetchSource(false).sort("collate", SortOrder.ASC));
|
.source(new SearchSourceBuilder().fetchSource(false).sort("collate", SortOrder.ASC));
|
||||||
|
|
||||||
SearchResponse response = client().search(request).actionGet();
|
SearchResponse response = client().search(request).actionGet();
|
||||||
@ -434,7 +426,6 @@ public class ICUCollationKeywordFieldMapperIT extends OpenSearchIntegTestCase {
|
|||||||
);
|
);
|
||||||
|
|
||||||
SearchRequest request = new SearchRequest().indices(index)
|
SearchRequest request = new SearchRequest().indices(index)
|
||||||
.types(type)
|
|
||||||
.source(new SearchSourceBuilder().fetchSource(false).sort("collate", SortOrder.ASC).sort("id", SortOrder.DESC));
|
.source(new SearchSourceBuilder().fetchSource(false).sort("collate", SortOrder.ASC).sort("id", SortOrder.DESC));
|
||||||
|
|
||||||
SearchResponse response = client().search(request).actionGet();
|
SearchResponse response = client().search(request).actionGet();
|
||||||
@ -472,7 +463,6 @@ public class ICUCollationKeywordFieldMapperIT extends OpenSearchIntegTestCase {
|
|||||||
);
|
);
|
||||||
|
|
||||||
SearchRequest request = new SearchRequest().indices(index)
|
SearchRequest request = new SearchRequest().indices(index)
|
||||||
.types(type)
|
|
||||||
.source(new SearchSourceBuilder().fetchSource(false).sort("collate", SortOrder.ASC));
|
.source(new SearchSourceBuilder().fetchSource(false).sort("collate", SortOrder.ASC));
|
||||||
|
|
||||||
SearchResponse response = client().search(request).actionGet();
|
SearchResponse response = client().search(request).actionGet();
|
||||||
@ -522,7 +512,6 @@ public class ICUCollationKeywordFieldMapperIT extends OpenSearchIntegTestCase {
|
|||||||
);
|
);
|
||||||
|
|
||||||
SearchRequest request = new SearchRequest().indices(index)
|
SearchRequest request = new SearchRequest().indices(index)
|
||||||
.types(type)
|
|
||||||
.source(
|
.source(
|
||||||
new SearchSourceBuilder().fetchSource(false)
|
new SearchSourceBuilder().fetchSource(false)
|
||||||
.query(QueryBuilders.termQuery("collate", randomBoolean() ? equivalent[0] : equivalent[1]))
|
.query(QueryBuilders.termQuery("collate", randomBoolean() ? equivalent[0] : equivalent[1]))
|
||||||
|
@ -27,27 +27,6 @@
|
|||||||
"description":"A comma-separated list of indices to restrict the results"
|
"description":"A comma-separated list of indices to restrict the results"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
|
||||||
{
|
|
||||||
"path":"/{index}/{type}/_count",
|
|
||||||
"methods":[
|
|
||||||
"POST",
|
|
||||||
"GET"
|
|
||||||
],
|
|
||||||
"parts":{
|
|
||||||
"index":{
|
|
||||||
"type":"list",
|
|
||||||
"description":"A comma-separated list of indices to restrict the results"
|
|
||||||
},
|
|
||||||
"type": {
|
|
||||||
"type" : "list",
|
|
||||||
"description" : "A comma-separated list of types to restrict the results"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"deprecated": {
|
|
||||||
"version" : "7.0.0",
|
|
||||||
"description" : "Specifying types in urls has been deprecated"
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
|
@ -1,61 +0,0 @@
|
|||||||
setup:
|
|
||||||
- do:
|
|
||||||
indices.create:
|
|
||||||
index: test
|
|
||||||
- do:
|
|
||||||
index:
|
|
||||||
index: test
|
|
||||||
id: 1
|
|
||||||
body: { foo: bar }
|
|
||||||
|
|
||||||
- do:
|
|
||||||
indices.refresh:
|
|
||||||
index: [test]
|
|
||||||
|
|
||||||
---
|
|
||||||
"count with body":
|
|
||||||
- do:
|
|
||||||
count:
|
|
||||||
index: test
|
|
||||||
body:
|
|
||||||
query:
|
|
||||||
match:
|
|
||||||
foo: bar
|
|
||||||
|
|
||||||
- match: {count : 1}
|
|
||||||
|
|
||||||
- do:
|
|
||||||
count:
|
|
||||||
index: test
|
|
||||||
body:
|
|
||||||
query:
|
|
||||||
match:
|
|
||||||
foo: test
|
|
||||||
|
|
||||||
- match: {count : 0}
|
|
||||||
|
|
||||||
---
|
|
||||||
"count with empty body":
|
|
||||||
# empty body should default to match_all query
|
|
||||||
- do:
|
|
||||||
count:
|
|
||||||
index: test
|
|
||||||
body: { }
|
|
||||||
|
|
||||||
- match: {count : 1}
|
|
||||||
|
|
||||||
- do:
|
|
||||||
count:
|
|
||||||
index: test
|
|
||||||
|
|
||||||
- match: {count : 1}
|
|
||||||
|
|
||||||
---
|
|
||||||
"count body without query element":
|
|
||||||
- do:
|
|
||||||
catch: bad_request
|
|
||||||
count:
|
|
||||||
index: test
|
|
||||||
body:
|
|
||||||
match:
|
|
||||||
foo: bar
|
|
@ -379,14 +379,12 @@ public class TasksIT extends OpenSearchIntegTestCase {
|
|||||||
headers.put(Task.X_OPAQUE_ID, "my_id");
|
headers.put(Task.X_OPAQUE_ID, "my_id");
|
||||||
headers.put("Foo-Header", "bar");
|
headers.put("Foo-Header", "bar");
|
||||||
headers.put("Custom-Task-Header", "my_value");
|
headers.put("Custom-Task-Header", "my_value");
|
||||||
assertSearchResponse(
|
assertSearchResponse(client().filterWithHeader(headers).prepareSearch("test").setQuery(QueryBuilders.matchAllQuery()).get());
|
||||||
client().filterWithHeader(headers).prepareSearch("test").setTypes("doc").setQuery(QueryBuilders.matchAllQuery()).get()
|
|
||||||
);
|
|
||||||
|
|
||||||
// the search operation should produce one main task
|
// the search operation should produce one main task
|
||||||
List<TaskInfo> mainTask = findEvents(SearchAction.NAME, Tuple::v1);
|
List<TaskInfo> mainTask = findEvents(SearchAction.NAME, Tuple::v1);
|
||||||
assertEquals(1, mainTask.size());
|
assertEquals(1, mainTask.size());
|
||||||
assertThat(mainTask.get(0).getDescription(), startsWith("indices[test], types[doc], search_type["));
|
assertThat(mainTask.get(0).getDescription(), startsWith("indices[test], search_type["));
|
||||||
assertThat(mainTask.get(0).getDescription(), containsString("\"query\":{\"match_all\""));
|
assertThat(mainTask.get(0).getDescription(), containsString("\"query\":{\"match_all\""));
|
||||||
assertTaskHeaders(mainTask.get(0));
|
assertTaskHeaders(mainTask.get(0));
|
||||||
|
|
||||||
@ -829,14 +827,12 @@ public class TasksIT extends OpenSearchIntegTestCase {
|
|||||||
assertNoFailures(client().admin().indices().prepareRefresh(TaskResultsService.TASK_INDEX).get());
|
assertNoFailures(client().admin().indices().prepareRefresh(TaskResultsService.TASK_INDEX).get());
|
||||||
|
|
||||||
SearchResponse searchResponse = client().prepareSearch(TaskResultsService.TASK_INDEX)
|
SearchResponse searchResponse = client().prepareSearch(TaskResultsService.TASK_INDEX)
|
||||||
.setTypes(TaskResultsService.TASK_TYPE)
|
|
||||||
.setSource(SearchSourceBuilder.searchSource().query(QueryBuilders.termQuery("task.action", taskInfo.getAction())))
|
.setSource(SearchSourceBuilder.searchSource().query(QueryBuilders.termQuery("task.action", taskInfo.getAction())))
|
||||||
.get();
|
.get();
|
||||||
|
|
||||||
assertEquals(1L, searchResponse.getHits().getTotalHits().value);
|
assertEquals(1L, searchResponse.getHits().getTotalHits().value);
|
||||||
|
|
||||||
searchResponse = client().prepareSearch(TaskResultsService.TASK_INDEX)
|
searchResponse = client().prepareSearch(TaskResultsService.TASK_INDEX)
|
||||||
.setTypes(TaskResultsService.TASK_TYPE)
|
|
||||||
.setSource(SearchSourceBuilder.searchSource().query(QueryBuilders.termQuery("task.node", taskInfo.getTaskId().getNodeId())))
|
.setSource(SearchSourceBuilder.searchSource().query(QueryBuilders.termQuery("task.node", taskInfo.getTaskId().getNodeId())))
|
||||||
.get();
|
.get();
|
||||||
|
|
||||||
|
@ -159,11 +159,7 @@ public class BulkProcessorRetryIT extends OpenSearchIntegTestCase {
|
|||||||
|
|
||||||
client().admin().indices().refresh(new RefreshRequest()).get();
|
client().admin().indices().refresh(new RefreshRequest()).get();
|
||||||
|
|
||||||
SearchResponse results = client().prepareSearch(INDEX_NAME)
|
SearchResponse results = client().prepareSearch(INDEX_NAME).setQuery(QueryBuilders.matchAllQuery()).setSize(0).get();
|
||||||
.setTypes(TYPE_NAME)
|
|
||||||
.setQuery(QueryBuilders.matchAllQuery())
|
|
||||||
.setSize(0)
|
|
||||||
.get();
|
|
||||||
|
|
||||||
if (rejectedExecutionExpected) {
|
if (rejectedExecutionExpected) {
|
||||||
assertThat((int) results.getHits().getTotalHits().value, lessThanOrEqualTo(numberOfAsyncOps));
|
assertThat((int) results.getHits().getTotalHits().value, lessThanOrEqualTo(numberOfAsyncOps));
|
||||||
|
@ -99,7 +99,7 @@ public class IndexActionIT extends OpenSearchIntegTestCase {
|
|||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
logger.debug("running search with a specific type");
|
logger.debug("running search with a specific type");
|
||||||
SearchResponse response = client().prepareSearch("test").setTypes("type").get();
|
SearchResponse response = client().prepareSearch("test").get();
|
||||||
if (response.getHits().getTotalHits().value != numOfDocs) {
|
if (response.getHits().getTotalHits().value != numOfDocs) {
|
||||||
final String message = "Count is "
|
final String message = "Count is "
|
||||||
+ response.getHits().getTotalHits().value
|
+ response.getHits().getTotalHits().value
|
||||||
|
@ -331,7 +331,7 @@ public class OpenCloseIndexIT extends OpenSearchIntegTestCase {
|
|||||||
// check the index still contains the records that we indexed
|
// check the index still contains the records that we indexed
|
||||||
client().admin().indices().prepareOpen("test").execute().get();
|
client().admin().indices().prepareOpen("test").execute().get();
|
||||||
ensureGreen();
|
ensureGreen();
|
||||||
SearchResponse searchResponse = client().prepareSearch().setTypes("type").setQuery(QueryBuilders.matchQuery("test", "init")).get();
|
SearchResponse searchResponse = client().prepareSearch().setQuery(QueryBuilders.matchQuery("test", "init")).get();
|
||||||
assertNoFailures(searchResponse);
|
assertNoFailures(searchResponse);
|
||||||
assertHitCount(searchResponse, docs);
|
assertHitCount(searchResponse, docs);
|
||||||
}
|
}
|
||||||
|
@ -99,7 +99,6 @@ public class BooleanTermsIT extends OpenSearchIntegTestCase {
|
|||||||
|
|
||||||
public void testSingleValueField() throws Exception {
|
public void testSingleValueField() throws Exception {
|
||||||
SearchResponse response = client().prepareSearch("idx")
|
SearchResponse response = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(terms("terms").field(SINGLE_VALUED_FIELD_NAME).collectMode(randomFrom(SubAggCollectionMode.values())))
|
.addAggregation(terms("terms").field(SINGLE_VALUED_FIELD_NAME).collectMode(randomFrom(SubAggCollectionMode.values())))
|
||||||
.get();
|
.get();
|
||||||
|
|
||||||
@ -132,7 +131,6 @@ public class BooleanTermsIT extends OpenSearchIntegTestCase {
|
|||||||
|
|
||||||
public void testMultiValueField() throws Exception {
|
public void testMultiValueField() throws Exception {
|
||||||
SearchResponse response = client().prepareSearch("idx")
|
SearchResponse response = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(terms("terms").field(MULTI_VALUED_FIELD_NAME).collectMode(randomFrom(SubAggCollectionMode.values())))
|
.addAggregation(terms("terms").field(MULTI_VALUED_FIELD_NAME).collectMode(randomFrom(SubAggCollectionMode.values())))
|
||||||
.get();
|
.get();
|
||||||
|
|
||||||
@ -165,7 +163,6 @@ public class BooleanTermsIT extends OpenSearchIntegTestCase {
|
|||||||
|
|
||||||
public void testUnmapped() throws Exception {
|
public void testUnmapped() throws Exception {
|
||||||
SearchResponse response = client().prepareSearch("idx_unmapped")
|
SearchResponse response = client().prepareSearch("idx_unmapped")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").field(SINGLE_VALUED_FIELD_NAME).size(between(1, 5)).collectMode(randomFrom(SubAggCollectionMode.values()))
|
terms("terms").field(SINGLE_VALUED_FIELD_NAME).size(between(1, 5)).collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||||
)
|
)
|
||||||
|
@ -1788,7 +1788,6 @@ public class DateHistogramIT extends OpenSearchIntegTestCase {
|
|||||||
private void assertMultiSortResponse(int[] expectedDays, BucketOrder... order) {
|
private void assertMultiSortResponse(int[] expectedDays, BucketOrder... order) {
|
||||||
ZonedDateTime[] expectedKeys = Arrays.stream(expectedDays).mapToObj(d -> date(1, d)).toArray(ZonedDateTime[]::new);
|
ZonedDateTime[] expectedKeys = Arrays.stream(expectedDays).mapToObj(d -> date(1, d)).toArray(ZonedDateTime[]::new);
|
||||||
SearchResponse response = client().prepareSearch("sort_idx")
|
SearchResponse response = client().prepareSearch("sort_idx")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
dateHistogram("histo").field("date")
|
dateHistogram("histo").field("date")
|
||||||
.dateHistogramInterval(DateHistogramInterval.DAY)
|
.dateHistogramInterval(DateHistogramInterval.DAY)
|
||||||
|
@ -121,7 +121,6 @@ public class DiversifiedSamplerIT extends OpenSearchIntegTestCase {
|
|||||||
// statement
|
// statement
|
||||||
boolean asc = randomBoolean();
|
boolean asc = randomBoolean();
|
||||||
SearchResponse response = client().prepareSearch("test")
|
SearchResponse response = client().prepareSearch("test")
|
||||||
.setTypes("book")
|
|
||||||
.setSearchType(SearchType.QUERY_THEN_FETCH)
|
.setSearchType(SearchType.QUERY_THEN_FETCH)
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("genres").field("genre")
|
terms("genres").field("genre")
|
||||||
|
@ -938,7 +938,6 @@ public class DoubleTermsIT extends AbstractTermsTestCase {
|
|||||||
|
|
||||||
private void assertMultiSortResponse(double[] expectedKeys, BucketOrder... order) {
|
private void assertMultiSortResponse(double[] expectedKeys, BucketOrder... order) {
|
||||||
SearchResponse response = client().prepareSearch("sort_idx")
|
SearchResponse response = client().prepareSearch("sort_idx")
|
||||||
.setTypes("multi_sort_type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").field(SINGLE_VALUED_FIELD_NAME)
|
terms("terms").field(SINGLE_VALUED_FIELD_NAME)
|
||||||
.collectMode(randomFrom(SubAggCollectionMode.values()))
|
.collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||||
|
@ -1391,7 +1391,6 @@ public class HistogramIT extends OpenSearchIntegTestCase {
|
|||||||
|
|
||||||
private void assertMultiSortResponse(long[] expectedKeys, BucketOrder... order) {
|
private void assertMultiSortResponse(long[] expectedKeys, BucketOrder... order) {
|
||||||
SearchResponse response = client().prepareSearch("sort_idx")
|
SearchResponse response = client().prepareSearch("sort_idx")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
histogram("histo").field(SINGLE_VALUED_FIELD_NAME)
|
histogram("histo").field(SINGLE_VALUED_FIELD_NAME)
|
||||||
.interval(1)
|
.interval(1)
|
||||||
|
@ -886,7 +886,6 @@ public class LongTermsIT extends AbstractTermsTestCase {
|
|||||||
|
|
||||||
private void assertMultiSortResponse(long[] expectedKeys, BucketOrder... order) {
|
private void assertMultiSortResponse(long[] expectedKeys, BucketOrder... order) {
|
||||||
SearchResponse response = client().prepareSearch("sort_idx")
|
SearchResponse response = client().prepareSearch("sort_idx")
|
||||||
.setTypes("multi_sort_type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").field(SINGLE_VALUED_FIELD_NAME)
|
terms("terms").field(SINGLE_VALUED_FIELD_NAME)
|
||||||
.collectMode(randomFrom(SubAggCollectionMode.values()))
|
.collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||||
|
@ -332,7 +332,6 @@ public class MinDocCountIT extends AbstractTermsTestCase {
|
|||||||
private void testMinDocCountOnTerms(String field, Script script, BucketOrder order, String include, boolean retry) throws Exception {
|
private void testMinDocCountOnTerms(String field, Script script, BucketOrder order, String include, boolean retry) throws Exception {
|
||||||
// all terms
|
// all terms
|
||||||
final SearchResponse allTermsResponse = client().prepareSearch("idx")
|
final SearchResponse allTermsResponse = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.setSize(0)
|
.setSize(0)
|
||||||
.setQuery(QUERY)
|
.setQuery(QUERY)
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
@ -352,7 +351,6 @@ public class MinDocCountIT extends AbstractTermsTestCase {
|
|||||||
for (long minDocCount = 0; minDocCount < 20; ++minDocCount) {
|
for (long minDocCount = 0; minDocCount < 20; ++minDocCount) {
|
||||||
final int size = randomIntBetween(1, cardinality + 2);
|
final int size = randomIntBetween(1, cardinality + 2);
|
||||||
final SearchRequest request = client().prepareSearch("idx")
|
final SearchRequest request = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.setSize(0)
|
.setSize(0)
|
||||||
.setQuery(QUERY)
|
.setQuery(QUERY)
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
@ -407,7 +405,6 @@ public class MinDocCountIT extends AbstractTermsTestCase {
|
|||||||
private void testMinDocCountOnHistogram(BucketOrder order) throws Exception {
|
private void testMinDocCountOnHistogram(BucketOrder order) throws Exception {
|
||||||
final int interval = randomIntBetween(1, 3);
|
final int interval = randomIntBetween(1, 3);
|
||||||
final SearchResponse allResponse = client().prepareSearch("idx")
|
final SearchResponse allResponse = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.setSize(0)
|
.setSize(0)
|
||||||
.setQuery(QUERY)
|
.setQuery(QUERY)
|
||||||
.addAggregation(histogram("histo").field("d").interval(interval).order(order).minDocCount(0))
|
.addAggregation(histogram("histo").field("d").interval(interval).order(order).minDocCount(0))
|
||||||
@ -417,7 +414,6 @@ public class MinDocCountIT extends AbstractTermsTestCase {
|
|||||||
|
|
||||||
for (long minDocCount = 0; minDocCount < 50; ++minDocCount) {
|
for (long minDocCount = 0; minDocCount < 50; ++minDocCount) {
|
||||||
final SearchResponse response = client().prepareSearch("idx")
|
final SearchResponse response = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.setSize(0)
|
.setSize(0)
|
||||||
.setQuery(QUERY)
|
.setQuery(QUERY)
|
||||||
.addAggregation(histogram("histo").field("d").interval(interval).order(order).minDocCount(minDocCount))
|
.addAggregation(histogram("histo").field("d").interval(interval).order(order).minDocCount(minDocCount))
|
||||||
@ -428,7 +424,6 @@ public class MinDocCountIT extends AbstractTermsTestCase {
|
|||||||
|
|
||||||
private void testMinDocCountOnDateHistogram(BucketOrder order) throws Exception {
|
private void testMinDocCountOnDateHistogram(BucketOrder order) throws Exception {
|
||||||
final SearchResponse allResponse = client().prepareSearch("idx")
|
final SearchResponse allResponse = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.setSize(0)
|
.setSize(0)
|
||||||
.setQuery(QUERY)
|
.setQuery(QUERY)
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
@ -440,7 +435,6 @@ public class MinDocCountIT extends AbstractTermsTestCase {
|
|||||||
|
|
||||||
for (long minDocCount = 0; minDocCount < 50; ++minDocCount) {
|
for (long minDocCount = 0; minDocCount < 50; ++minDocCount) {
|
||||||
final SearchResponse response = client().prepareSearch("idx")
|
final SearchResponse response = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.setSize(0)
|
.setSize(0)
|
||||||
.setQuery(QUERY)
|
.setQuery(QUERY)
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
|
@ -478,7 +478,6 @@ public class NestedIT extends OpenSearchIntegTestCase {
|
|||||||
indexRandom(true, indexRequests);
|
indexRandom(true, indexRequests);
|
||||||
|
|
||||||
SearchResponse response = client().prepareSearch("idx2")
|
SearchResponse response = client().prepareSearch("idx2")
|
||||||
.setTypes("provider")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("startDate").field("dates.month.start")
|
terms("startDate").field("dates.month.start")
|
||||||
.subAggregation(
|
.subAggregation(
|
||||||
@ -586,7 +585,6 @@ public class NestedIT extends OpenSearchIntegTestCase {
|
|||||||
refresh();
|
refresh();
|
||||||
|
|
||||||
SearchResponse response = client().prepareSearch("idx4")
|
SearchResponse response = client().prepareSearch("idx4")
|
||||||
.setTypes("product")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("category").field("categories")
|
terms("category").field("categories")
|
||||||
.subAggregation(nested("property", "property").subAggregation(terms("property_id").field("property.id")))
|
.subAggregation(nested("property", "property").subAggregation(terms("property_id").field("property.id")))
|
||||||
|
@ -120,7 +120,6 @@ public class SamplerIT extends OpenSearchIntegTestCase {
|
|||||||
// statement
|
// statement
|
||||||
boolean asc = randomBoolean();
|
boolean asc = randomBoolean();
|
||||||
SearchResponse response = client().prepareSearch("test")
|
SearchResponse response = client().prepareSearch("test")
|
||||||
.setTypes("book")
|
|
||||||
.setSearchType(SearchType.QUERY_THEN_FETCH)
|
.setSearchType(SearchType.QUERY_THEN_FETCH)
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("genres").field("genre")
|
terms("genres").field("genre")
|
||||||
|
@ -51,7 +51,6 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
|
|||||||
indexData();
|
indexData();
|
||||||
|
|
||||||
SearchResponse response = client().prepareSearch("idx")
|
SearchResponse response = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.setQuery(matchAllQuery())
|
.setQuery(matchAllQuery())
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("keys").field("key").size(3).collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.count(false))
|
terms("keys").field("key").size(3).collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.count(false))
|
||||||
@ -76,7 +75,6 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
|
|||||||
indexData();
|
indexData();
|
||||||
|
|
||||||
SearchResponse response = client().prepareSearch("idx")
|
SearchResponse response = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.setQuery(matchAllQuery())
|
.setQuery(matchAllQuery())
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("keys").field("key")
|
terms("keys").field("key")
|
||||||
@ -106,7 +104,6 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
|
|||||||
indexData();
|
indexData();
|
||||||
|
|
||||||
SearchResponse response = client().prepareSearch("idx")
|
SearchResponse response = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.setQuery(matchAllQuery())
|
.setQuery(matchAllQuery())
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("keys").field("key")
|
terms("keys").field("key")
|
||||||
@ -136,7 +133,6 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
|
|||||||
indexData();
|
indexData();
|
||||||
|
|
||||||
SearchResponse response = client().prepareSearch("idx")
|
SearchResponse response = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.setRouting(routing1)
|
.setRouting(routing1)
|
||||||
.setQuery(matchAllQuery())
|
.setQuery(matchAllQuery())
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
@ -166,7 +162,6 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
|
|||||||
indexData();
|
indexData();
|
||||||
|
|
||||||
SearchResponse response = client().prepareSearch("idx")
|
SearchResponse response = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.setQuery(matchAllQuery())
|
.setQuery(matchAllQuery())
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("keys").field("key").size(3).collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.key(true))
|
terms("keys").field("key").size(3).collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.key(true))
|
||||||
@ -191,7 +186,6 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
|
|||||||
indexData();
|
indexData();
|
||||||
|
|
||||||
SearchResponse response = client().prepareSearch("idx")
|
SearchResponse response = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.setQuery(matchAllQuery())
|
.setQuery(matchAllQuery())
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("keys").field("key").size(3).collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.count(false))
|
terms("keys").field("key").size(3).collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.count(false))
|
||||||
@ -216,7 +210,6 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
|
|||||||
indexData();
|
indexData();
|
||||||
|
|
||||||
SearchResponse response = client().prepareSearch("idx")
|
SearchResponse response = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.setQuery(matchAllQuery())
|
.setQuery(matchAllQuery())
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("keys").field("key")
|
terms("keys").field("key")
|
||||||
@ -245,7 +238,6 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
|
|||||||
indexData();
|
indexData();
|
||||||
|
|
||||||
SearchResponse response = client().prepareSearch("idx")
|
SearchResponse response = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.setQuery(matchAllQuery())
|
.setQuery(matchAllQuery())
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("keys").field("key")
|
terms("keys").field("key")
|
||||||
@ -275,7 +267,6 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
|
|||||||
indexData();
|
indexData();
|
||||||
|
|
||||||
SearchResponse response = client().prepareSearch("idx")
|
SearchResponse response = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.setRouting(routing1)
|
.setRouting(routing1)
|
||||||
.setQuery(matchAllQuery())
|
.setQuery(matchAllQuery())
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
@ -305,7 +296,6 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
|
|||||||
indexData();
|
indexData();
|
||||||
|
|
||||||
SearchResponse response = client().prepareSearch("idx")
|
SearchResponse response = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.setQuery(matchAllQuery())
|
.setQuery(matchAllQuery())
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("keys").field("key").size(3).collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.key(true))
|
terms("keys").field("key").size(3).collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.key(true))
|
||||||
@ -330,7 +320,6 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
|
|||||||
indexData();
|
indexData();
|
||||||
|
|
||||||
SearchResponse response = client().prepareSearch("idx")
|
SearchResponse response = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.setQuery(matchAllQuery())
|
.setQuery(matchAllQuery())
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("keys").field("key").size(3).collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.count(false))
|
terms("keys").field("key").size(3).collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.count(false))
|
||||||
@ -355,7 +344,6 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
|
|||||||
indexData();
|
indexData();
|
||||||
|
|
||||||
SearchResponse response = client().prepareSearch("idx")
|
SearchResponse response = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.setQuery(matchAllQuery())
|
.setQuery(matchAllQuery())
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("keys").field("key")
|
terms("keys").field("key")
|
||||||
@ -384,7 +372,6 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
|
|||||||
indexData();
|
indexData();
|
||||||
|
|
||||||
SearchResponse response = client().prepareSearch("idx")
|
SearchResponse response = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.setQuery(matchAllQuery())
|
.setQuery(matchAllQuery())
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("keys").field("key")
|
terms("keys").field("key")
|
||||||
@ -413,7 +400,6 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
|
|||||||
indexData();
|
indexData();
|
||||||
|
|
||||||
SearchResponse response = client().prepareSearch("idx")
|
SearchResponse response = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.setRouting(routing1)
|
.setRouting(routing1)
|
||||||
.setQuery(matchAllQuery())
|
.setQuery(matchAllQuery())
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
@ -443,7 +429,6 @@ public class ShardSizeTermsIT extends ShardSizeTestCase {
|
|||||||
indexData();
|
indexData();
|
||||||
|
|
||||||
SearchResponse response = client().prepareSearch("idx")
|
SearchResponse response = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.setQuery(matchAllQuery())
|
.setQuery(matchAllQuery())
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("keys").field("key").size(3).collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.key(true))
|
terms("keys").field("key").size(3).collectMode(randomFrom(SubAggCollectionMode.values())).order(BucketOrder.key(true))
|
||||||
|
@ -148,11 +148,9 @@ public class SignificantTermsSignificanceScoreIT extends OpenSearchIntegTestCase
|
|||||||
// Use significant_text on text fields but occasionally run with alternative of
|
// Use significant_text on text fields but occasionally run with alternative of
|
||||||
// significant_terms on legacy fieldData=true too.
|
// significant_terms on legacy fieldData=true too.
|
||||||
request = client().prepareSearch(INDEX_NAME)
|
request = client().prepareSearch(INDEX_NAME)
|
||||||
.setTypes(DOC_TYPE)
|
|
||||||
.addAggregation(terms("class").field(CLASS_FIELD).subAggregation(significantText("sig_terms", TEXT_FIELD)));
|
.addAggregation(terms("class").field(CLASS_FIELD).subAggregation(significantText("sig_terms", TEXT_FIELD)));
|
||||||
} else {
|
} else {
|
||||||
request = client().prepareSearch(INDEX_NAME)
|
request = client().prepareSearch(INDEX_NAME)
|
||||||
.setTypes(DOC_TYPE)
|
|
||||||
.addAggregation(terms("class").field(CLASS_FIELD).subAggregation(significantTerms("sig_terms").field(TEXT_FIELD)));
|
.addAggregation(terms("class").field(CLASS_FIELD).subAggregation(significantTerms("sig_terms").field(TEXT_FIELD)));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -245,13 +243,11 @@ public class SignificantTermsSignificanceScoreIT extends OpenSearchIntegTestCase
|
|||||||
SearchRequestBuilder request;
|
SearchRequestBuilder request;
|
||||||
if (randomBoolean()) {
|
if (randomBoolean()) {
|
||||||
request = client().prepareSearch(INDEX_NAME)
|
request = client().prepareSearch(INDEX_NAME)
|
||||||
.setTypes(DOC_TYPE)
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("class").field(CLASS_FIELD).subAggregation(significantTerms("sig_terms").field(TEXT_FIELD).minDocCount(1))
|
terms("class").field(CLASS_FIELD).subAggregation(significantTerms("sig_terms").field(TEXT_FIELD).minDocCount(1))
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
request = client().prepareSearch(INDEX_NAME)
|
request = client().prepareSearch(INDEX_NAME)
|
||||||
.setTypes(DOC_TYPE)
|
|
||||||
.addAggregation(terms("class").field(CLASS_FIELD).subAggregation(significantText("sig_terms", TEXT_FIELD).minDocCount(1)));
|
.addAggregation(terms("class").field(CLASS_FIELD).subAggregation(significantText("sig_terms", TEXT_FIELD).minDocCount(1)));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -282,7 +278,6 @@ public class SignificantTermsSignificanceScoreIT extends OpenSearchIntegTestCase
|
|||||||
SearchRequestBuilder request1;
|
SearchRequestBuilder request1;
|
||||||
if (useSigText) {
|
if (useSigText) {
|
||||||
request1 = client().prepareSearch(INDEX_NAME)
|
request1 = client().prepareSearch(INDEX_NAME)
|
||||||
.setTypes(DOC_TYPE)
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("class").field(CLASS_FIELD)
|
terms("class").field(CLASS_FIELD)
|
||||||
.subAggregation(
|
.subAggregation(
|
||||||
@ -292,7 +287,6 @@ public class SignificantTermsSignificanceScoreIT extends OpenSearchIntegTestCase
|
|||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
request1 = client().prepareSearch(INDEX_NAME)
|
request1 = client().prepareSearch(INDEX_NAME)
|
||||||
.setTypes(DOC_TYPE)
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("class").field(CLASS_FIELD)
|
terms("class").field(CLASS_FIELD)
|
||||||
.subAggregation(
|
.subAggregation(
|
||||||
@ -309,7 +303,6 @@ public class SignificantTermsSignificanceScoreIT extends OpenSearchIntegTestCase
|
|||||||
SearchRequestBuilder request2;
|
SearchRequestBuilder request2;
|
||||||
if (useSigText) {
|
if (useSigText) {
|
||||||
request2 = client().prepareSearch(INDEX_NAME)
|
request2 = client().prepareSearch(INDEX_NAME)
|
||||||
.setTypes(DOC_TYPE)
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
filter("0", QueryBuilders.termQuery(CLASS_FIELD, "0")).subAggregation(
|
filter("0", QueryBuilders.termQuery(CLASS_FIELD, "0")).subAggregation(
|
||||||
significantText("sig_terms", TEXT_FIELD).minDocCount(1)
|
significantText("sig_terms", TEXT_FIELD).minDocCount(1)
|
||||||
@ -326,7 +319,6 @@ public class SignificantTermsSignificanceScoreIT extends OpenSearchIntegTestCase
|
|||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
request2 = client().prepareSearch(INDEX_NAME)
|
request2 = client().prepareSearch(INDEX_NAME)
|
||||||
.setTypes(DOC_TYPE)
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
filter("0", QueryBuilders.termQuery(CLASS_FIELD, "0")).subAggregation(
|
filter("0", QueryBuilders.termQuery(CLASS_FIELD, "0")).subAggregation(
|
||||||
significantTerms("sig_terms").field(TEXT_FIELD)
|
significantTerms("sig_terms").field(TEXT_FIELD)
|
||||||
|
@ -303,7 +303,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
|
|||||||
int size = randomIntBetween(1, 20);
|
int size = randomIntBetween(1, 20);
|
||||||
int shardSize = randomIntBetween(size, size * 2);
|
int shardSize = randomIntBetween(size, size * 2);
|
||||||
SearchResponse accurateResponse = client().prepareSearch("idx")
|
SearchResponse accurateResponse = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").executionHint(randomExecutionHint())
|
terms("terms").executionHint(randomExecutionHint())
|
||||||
.field(STRING_FIELD_NAME)
|
.field(STRING_FIELD_NAME)
|
||||||
@ -317,7 +316,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
|
|||||||
assertSearchResponse(accurateResponse);
|
assertSearchResponse(accurateResponse);
|
||||||
|
|
||||||
SearchResponse testResponse = client().prepareSearch("idx")
|
SearchResponse testResponse = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").executionHint(randomExecutionHint())
|
terms("terms").executionHint(randomExecutionHint())
|
||||||
.field(STRING_FIELD_NAME)
|
.field(STRING_FIELD_NAME)
|
||||||
@ -337,7 +335,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
|
|||||||
int size = randomIntBetween(1, 20);
|
int size = randomIntBetween(1, 20);
|
||||||
int shardSize = randomIntBetween(size, size * 2);
|
int shardSize = randomIntBetween(size, size * 2);
|
||||||
SearchResponse accurateResponse = client().prepareSearch("idx_single_shard")
|
SearchResponse accurateResponse = client().prepareSearch("idx_single_shard")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").executionHint(randomExecutionHint())
|
terms("terms").executionHint(randomExecutionHint())
|
||||||
.field(STRING_FIELD_NAME)
|
.field(STRING_FIELD_NAME)
|
||||||
@ -351,7 +348,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
|
|||||||
assertSearchResponse(accurateResponse);
|
assertSearchResponse(accurateResponse);
|
||||||
|
|
||||||
SearchResponse testResponse = client().prepareSearch("idx_single_shard")
|
SearchResponse testResponse = client().prepareSearch("idx_single_shard")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").executionHint(randomExecutionHint())
|
terms("terms").executionHint(randomExecutionHint())
|
||||||
.field(STRING_FIELD_NAME)
|
.field(STRING_FIELD_NAME)
|
||||||
@ -372,7 +368,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
|
|||||||
int shardSize = randomIntBetween(size, size * 2);
|
int shardSize = randomIntBetween(size, size * 2);
|
||||||
|
|
||||||
SearchResponse testResponse = client().prepareSearch("idx_with_routing")
|
SearchResponse testResponse = client().prepareSearch("idx_with_routing")
|
||||||
.setTypes("type")
|
|
||||||
.setRouting(String.valueOf(between(1, numRoutingValues)))
|
.setRouting(String.valueOf(between(1, numRoutingValues)))
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").executionHint(randomExecutionHint())
|
terms("terms").executionHint(randomExecutionHint())
|
||||||
@ -393,7 +388,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
|
|||||||
int size = randomIntBetween(1, 20);
|
int size = randomIntBetween(1, 20);
|
||||||
int shardSize = randomIntBetween(size, size * 2);
|
int shardSize = randomIntBetween(size, size * 2);
|
||||||
SearchResponse accurateResponse = client().prepareSearch("idx_single_shard")
|
SearchResponse accurateResponse = client().prepareSearch("idx_single_shard")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").executionHint(randomExecutionHint())
|
terms("terms").executionHint(randomExecutionHint())
|
||||||
.field(STRING_FIELD_NAME)
|
.field(STRING_FIELD_NAME)
|
||||||
@ -408,7 +402,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
|
|||||||
assertSearchResponse(accurateResponse);
|
assertSearchResponse(accurateResponse);
|
||||||
|
|
||||||
SearchResponse testResponse = client().prepareSearch("idx_single_shard")
|
SearchResponse testResponse = client().prepareSearch("idx_single_shard")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").executionHint(randomExecutionHint())
|
terms("terms").executionHint(randomExecutionHint())
|
||||||
.field(STRING_FIELD_NAME)
|
.field(STRING_FIELD_NAME)
|
||||||
@ -429,7 +422,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
|
|||||||
int size = randomIntBetween(1, 20);
|
int size = randomIntBetween(1, 20);
|
||||||
int shardSize = randomIntBetween(size, size * 2);
|
int shardSize = randomIntBetween(size, size * 2);
|
||||||
SearchResponse accurateResponse = client().prepareSearch("idx_single_shard")
|
SearchResponse accurateResponse = client().prepareSearch("idx_single_shard")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").executionHint(randomExecutionHint())
|
terms("terms").executionHint(randomExecutionHint())
|
||||||
.field(STRING_FIELD_NAME)
|
.field(STRING_FIELD_NAME)
|
||||||
@ -444,7 +436,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
|
|||||||
assertSearchResponse(accurateResponse);
|
assertSearchResponse(accurateResponse);
|
||||||
|
|
||||||
SearchResponse testResponse = client().prepareSearch("idx_single_shard")
|
SearchResponse testResponse = client().prepareSearch("idx_single_shard")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").executionHint(randomExecutionHint())
|
terms("terms").executionHint(randomExecutionHint())
|
||||||
.field(STRING_FIELD_NAME)
|
.field(STRING_FIELD_NAME)
|
||||||
@ -465,7 +456,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
|
|||||||
int size = randomIntBetween(1, 20);
|
int size = randomIntBetween(1, 20);
|
||||||
int shardSize = randomIntBetween(size, size * 2);
|
int shardSize = randomIntBetween(size, size * 2);
|
||||||
SearchResponse accurateResponse = client().prepareSearch("idx_single_shard")
|
SearchResponse accurateResponse = client().prepareSearch("idx_single_shard")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").executionHint(randomExecutionHint())
|
terms("terms").executionHint(randomExecutionHint())
|
||||||
.field(STRING_FIELD_NAME)
|
.field(STRING_FIELD_NAME)
|
||||||
@ -480,7 +470,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
|
|||||||
assertSearchResponse(accurateResponse);
|
assertSearchResponse(accurateResponse);
|
||||||
|
|
||||||
SearchResponse testResponse = client().prepareSearch("idx_single_shard")
|
SearchResponse testResponse = client().prepareSearch("idx_single_shard")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").executionHint(randomExecutionHint())
|
terms("terms").executionHint(randomExecutionHint())
|
||||||
.field(STRING_FIELD_NAME)
|
.field(STRING_FIELD_NAME)
|
||||||
@ -501,7 +490,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
|
|||||||
int size = randomIntBetween(1, 20);
|
int size = randomIntBetween(1, 20);
|
||||||
int shardSize = randomIntBetween(size, size * 2);
|
int shardSize = randomIntBetween(size, size * 2);
|
||||||
SearchResponse accurateResponse = client().prepareSearch("idx_single_shard")
|
SearchResponse accurateResponse = client().prepareSearch("idx_single_shard")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").executionHint(randomExecutionHint())
|
terms("terms").executionHint(randomExecutionHint())
|
||||||
.field(STRING_FIELD_NAME)
|
.field(STRING_FIELD_NAME)
|
||||||
@ -517,7 +505,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
|
|||||||
assertSearchResponse(accurateResponse);
|
assertSearchResponse(accurateResponse);
|
||||||
|
|
||||||
SearchResponse testResponse = client().prepareSearch("idx_single_shard")
|
SearchResponse testResponse = client().prepareSearch("idx_single_shard")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").executionHint(randomExecutionHint())
|
terms("terms").executionHint(randomExecutionHint())
|
||||||
.field(STRING_FIELD_NAME)
|
.field(STRING_FIELD_NAME)
|
||||||
@ -539,7 +526,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
|
|||||||
int size = randomIntBetween(1, 20);
|
int size = randomIntBetween(1, 20);
|
||||||
int shardSize = randomIntBetween(size, size * 2);
|
int shardSize = randomIntBetween(size, size * 2);
|
||||||
SearchResponse accurateResponse = client().prepareSearch("idx_single_shard")
|
SearchResponse accurateResponse = client().prepareSearch("idx_single_shard")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").executionHint(randomExecutionHint())
|
terms("terms").executionHint(randomExecutionHint())
|
||||||
.field(STRING_FIELD_NAME)
|
.field(STRING_FIELD_NAME)
|
||||||
@ -555,7 +541,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
|
|||||||
assertSearchResponse(accurateResponse);
|
assertSearchResponse(accurateResponse);
|
||||||
|
|
||||||
SearchResponse testResponse = client().prepareSearch("idx_single_shard")
|
SearchResponse testResponse = client().prepareSearch("idx_single_shard")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").executionHint(randomExecutionHint())
|
terms("terms").executionHint(randomExecutionHint())
|
||||||
.field(STRING_FIELD_NAME)
|
.field(STRING_FIELD_NAME)
|
||||||
@ -577,7 +562,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
|
|||||||
int size = randomIntBetween(1, 20);
|
int size = randomIntBetween(1, 20);
|
||||||
int shardSize = randomIntBetween(size, size * 2);
|
int shardSize = randomIntBetween(size, size * 2);
|
||||||
SearchResponse accurateResponse = client().prepareSearch("idx")
|
SearchResponse accurateResponse = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").executionHint(randomExecutionHint())
|
terms("terms").executionHint(randomExecutionHint())
|
||||||
.field(LONG_FIELD_NAME)
|
.field(LONG_FIELD_NAME)
|
||||||
@ -591,7 +575,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
|
|||||||
assertSearchResponse(accurateResponse);
|
assertSearchResponse(accurateResponse);
|
||||||
|
|
||||||
SearchResponse testResponse = client().prepareSearch("idx")
|
SearchResponse testResponse = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").executionHint(randomExecutionHint())
|
terms("terms").executionHint(randomExecutionHint())
|
||||||
.field(LONG_FIELD_NAME)
|
.field(LONG_FIELD_NAME)
|
||||||
@ -611,7 +594,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
|
|||||||
int size = randomIntBetween(1, 20);
|
int size = randomIntBetween(1, 20);
|
||||||
int shardSize = randomIntBetween(size, size * 2);
|
int shardSize = randomIntBetween(size, size * 2);
|
||||||
SearchResponse accurateResponse = client().prepareSearch("idx_single_shard")
|
SearchResponse accurateResponse = client().prepareSearch("idx_single_shard")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").executionHint(randomExecutionHint())
|
terms("terms").executionHint(randomExecutionHint())
|
||||||
.field(LONG_FIELD_NAME)
|
.field(LONG_FIELD_NAME)
|
||||||
@ -625,7 +607,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
|
|||||||
assertSearchResponse(accurateResponse);
|
assertSearchResponse(accurateResponse);
|
||||||
|
|
||||||
SearchResponse testResponse = client().prepareSearch("idx_single_shard")
|
SearchResponse testResponse = client().prepareSearch("idx_single_shard")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").executionHint(randomExecutionHint())
|
terms("terms").executionHint(randomExecutionHint())
|
||||||
.field(LONG_FIELD_NAME)
|
.field(LONG_FIELD_NAME)
|
||||||
@ -646,7 +627,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
|
|||||||
int shardSize = randomIntBetween(size, size * 2);
|
int shardSize = randomIntBetween(size, size * 2);
|
||||||
|
|
||||||
SearchResponse testResponse = client().prepareSearch("idx_with_routing")
|
SearchResponse testResponse = client().prepareSearch("idx_with_routing")
|
||||||
.setTypes("type")
|
|
||||||
.setRouting(String.valueOf(between(1, numRoutingValues)))
|
.setRouting(String.valueOf(between(1, numRoutingValues)))
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").executionHint(randomExecutionHint())
|
terms("terms").executionHint(randomExecutionHint())
|
||||||
@ -667,7 +647,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
|
|||||||
int size = randomIntBetween(1, 20);
|
int size = randomIntBetween(1, 20);
|
||||||
int shardSize = randomIntBetween(size, size * 2);
|
int shardSize = randomIntBetween(size, size * 2);
|
||||||
SearchResponse accurateResponse = client().prepareSearch("idx_single_shard")
|
SearchResponse accurateResponse = client().prepareSearch("idx_single_shard")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").executionHint(randomExecutionHint())
|
terms("terms").executionHint(randomExecutionHint())
|
||||||
.field(LONG_FIELD_NAME)
|
.field(LONG_FIELD_NAME)
|
||||||
@ -682,7 +661,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
|
|||||||
assertSearchResponse(accurateResponse);
|
assertSearchResponse(accurateResponse);
|
||||||
|
|
||||||
SearchResponse testResponse = client().prepareSearch("idx_single_shard")
|
SearchResponse testResponse = client().prepareSearch("idx_single_shard")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").executionHint(randomExecutionHint())
|
terms("terms").executionHint(randomExecutionHint())
|
||||||
.field(LONG_FIELD_NAME)
|
.field(LONG_FIELD_NAME)
|
||||||
@ -703,7 +681,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
|
|||||||
int size = randomIntBetween(1, 20);
|
int size = randomIntBetween(1, 20);
|
||||||
int shardSize = randomIntBetween(size, size * 2);
|
int shardSize = randomIntBetween(size, size * 2);
|
||||||
SearchResponse accurateResponse = client().prepareSearch("idx_single_shard")
|
SearchResponse accurateResponse = client().prepareSearch("idx_single_shard")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").executionHint(randomExecutionHint())
|
terms("terms").executionHint(randomExecutionHint())
|
||||||
.field(LONG_FIELD_NAME)
|
.field(LONG_FIELD_NAME)
|
||||||
@ -718,7 +695,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
|
|||||||
assertSearchResponse(accurateResponse);
|
assertSearchResponse(accurateResponse);
|
||||||
|
|
||||||
SearchResponse testResponse = client().prepareSearch("idx_single_shard")
|
SearchResponse testResponse = client().prepareSearch("idx_single_shard")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").executionHint(randomExecutionHint())
|
terms("terms").executionHint(randomExecutionHint())
|
||||||
.field(LONG_FIELD_NAME)
|
.field(LONG_FIELD_NAME)
|
||||||
@ -739,7 +715,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
|
|||||||
int size = randomIntBetween(1, 20);
|
int size = randomIntBetween(1, 20);
|
||||||
int shardSize = randomIntBetween(size, size * 2);
|
int shardSize = randomIntBetween(size, size * 2);
|
||||||
SearchResponse accurateResponse = client().prepareSearch("idx_single_shard")
|
SearchResponse accurateResponse = client().prepareSearch("idx_single_shard")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").executionHint(randomExecutionHint())
|
terms("terms").executionHint(randomExecutionHint())
|
||||||
.field(LONG_FIELD_NAME)
|
.field(LONG_FIELD_NAME)
|
||||||
@ -754,7 +729,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
|
|||||||
assertSearchResponse(accurateResponse);
|
assertSearchResponse(accurateResponse);
|
||||||
|
|
||||||
SearchResponse testResponse = client().prepareSearch("idx_single_shard")
|
SearchResponse testResponse = client().prepareSearch("idx_single_shard")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").executionHint(randomExecutionHint())
|
terms("terms").executionHint(randomExecutionHint())
|
||||||
.field(LONG_FIELD_NAME)
|
.field(LONG_FIELD_NAME)
|
||||||
@ -775,7 +749,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
|
|||||||
int size = randomIntBetween(1, 20);
|
int size = randomIntBetween(1, 20);
|
||||||
int shardSize = randomIntBetween(size, size * 2);
|
int shardSize = randomIntBetween(size, size * 2);
|
||||||
SearchResponse accurateResponse = client().prepareSearch("idx_single_shard")
|
SearchResponse accurateResponse = client().prepareSearch("idx_single_shard")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").executionHint(randomExecutionHint())
|
terms("terms").executionHint(randomExecutionHint())
|
||||||
.field(LONG_FIELD_NAME)
|
.field(LONG_FIELD_NAME)
|
||||||
@ -791,7 +764,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
|
|||||||
assertSearchResponse(accurateResponse);
|
assertSearchResponse(accurateResponse);
|
||||||
|
|
||||||
SearchResponse testResponse = client().prepareSearch("idx_single_shard")
|
SearchResponse testResponse = client().prepareSearch("idx_single_shard")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").executionHint(randomExecutionHint())
|
terms("terms").executionHint(randomExecutionHint())
|
||||||
.field(LONG_FIELD_NAME)
|
.field(LONG_FIELD_NAME)
|
||||||
@ -813,7 +785,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
|
|||||||
int size = randomIntBetween(1, 20);
|
int size = randomIntBetween(1, 20);
|
||||||
int shardSize = randomIntBetween(size, size * 2);
|
int shardSize = randomIntBetween(size, size * 2);
|
||||||
SearchResponse accurateResponse = client().prepareSearch("idx_single_shard")
|
SearchResponse accurateResponse = client().prepareSearch("idx_single_shard")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").executionHint(randomExecutionHint())
|
terms("terms").executionHint(randomExecutionHint())
|
||||||
.field(LONG_FIELD_NAME)
|
.field(LONG_FIELD_NAME)
|
||||||
@ -829,7 +800,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
|
|||||||
assertSearchResponse(accurateResponse);
|
assertSearchResponse(accurateResponse);
|
||||||
|
|
||||||
SearchResponse testResponse = client().prepareSearch("idx_single_shard")
|
SearchResponse testResponse = client().prepareSearch("idx_single_shard")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").executionHint(randomExecutionHint())
|
terms("terms").executionHint(randomExecutionHint())
|
||||||
.field(LONG_FIELD_NAME)
|
.field(LONG_FIELD_NAME)
|
||||||
@ -851,7 +821,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
|
|||||||
int size = randomIntBetween(1, 20);
|
int size = randomIntBetween(1, 20);
|
||||||
int shardSize = randomIntBetween(size, size * 2);
|
int shardSize = randomIntBetween(size, size * 2);
|
||||||
SearchResponse accurateResponse = client().prepareSearch("idx")
|
SearchResponse accurateResponse = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").executionHint(randomExecutionHint())
|
terms("terms").executionHint(randomExecutionHint())
|
||||||
.field(DOUBLE_FIELD_NAME)
|
.field(DOUBLE_FIELD_NAME)
|
||||||
@ -865,7 +834,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
|
|||||||
assertSearchResponse(accurateResponse);
|
assertSearchResponse(accurateResponse);
|
||||||
|
|
||||||
SearchResponse testResponse = client().prepareSearch("idx")
|
SearchResponse testResponse = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").executionHint(randomExecutionHint())
|
terms("terms").executionHint(randomExecutionHint())
|
||||||
.field(DOUBLE_FIELD_NAME)
|
.field(DOUBLE_FIELD_NAME)
|
||||||
@ -885,7 +853,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
|
|||||||
int size = randomIntBetween(1, 20);
|
int size = randomIntBetween(1, 20);
|
||||||
int shardSize = randomIntBetween(size, size * 2);
|
int shardSize = randomIntBetween(size, size * 2);
|
||||||
SearchResponse accurateResponse = client().prepareSearch("idx_single_shard")
|
SearchResponse accurateResponse = client().prepareSearch("idx_single_shard")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").executionHint(randomExecutionHint())
|
terms("terms").executionHint(randomExecutionHint())
|
||||||
.field(DOUBLE_FIELD_NAME)
|
.field(DOUBLE_FIELD_NAME)
|
||||||
@ -899,7 +866,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
|
|||||||
assertSearchResponse(accurateResponse);
|
assertSearchResponse(accurateResponse);
|
||||||
|
|
||||||
SearchResponse testResponse = client().prepareSearch("idx_single_shard")
|
SearchResponse testResponse = client().prepareSearch("idx_single_shard")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").executionHint(randomExecutionHint())
|
terms("terms").executionHint(randomExecutionHint())
|
||||||
.field(DOUBLE_FIELD_NAME)
|
.field(DOUBLE_FIELD_NAME)
|
||||||
@ -920,7 +886,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
|
|||||||
int shardSize = randomIntBetween(size, size * 2);
|
int shardSize = randomIntBetween(size, size * 2);
|
||||||
|
|
||||||
SearchResponse testResponse = client().prepareSearch("idx_with_routing")
|
SearchResponse testResponse = client().prepareSearch("idx_with_routing")
|
||||||
.setTypes("type")
|
|
||||||
.setRouting(String.valueOf(between(1, numRoutingValues)))
|
.setRouting(String.valueOf(between(1, numRoutingValues)))
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").executionHint(randomExecutionHint())
|
terms("terms").executionHint(randomExecutionHint())
|
||||||
@ -941,7 +906,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
|
|||||||
int size = randomIntBetween(1, 20);
|
int size = randomIntBetween(1, 20);
|
||||||
int shardSize = randomIntBetween(size, size * 2);
|
int shardSize = randomIntBetween(size, size * 2);
|
||||||
SearchResponse accurateResponse = client().prepareSearch("idx_single_shard")
|
SearchResponse accurateResponse = client().prepareSearch("idx_single_shard")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").executionHint(randomExecutionHint())
|
terms("terms").executionHint(randomExecutionHint())
|
||||||
.field(DOUBLE_FIELD_NAME)
|
.field(DOUBLE_FIELD_NAME)
|
||||||
@ -956,7 +920,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
|
|||||||
assertSearchResponse(accurateResponse);
|
assertSearchResponse(accurateResponse);
|
||||||
|
|
||||||
SearchResponse testResponse = client().prepareSearch("idx_single_shard")
|
SearchResponse testResponse = client().prepareSearch("idx_single_shard")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").executionHint(randomExecutionHint())
|
terms("terms").executionHint(randomExecutionHint())
|
||||||
.field(DOUBLE_FIELD_NAME)
|
.field(DOUBLE_FIELD_NAME)
|
||||||
@ -977,7 +940,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
|
|||||||
int size = randomIntBetween(1, 20);
|
int size = randomIntBetween(1, 20);
|
||||||
int shardSize = randomIntBetween(size, size * 2);
|
int shardSize = randomIntBetween(size, size * 2);
|
||||||
SearchResponse accurateResponse = client().prepareSearch("idx_single_shard")
|
SearchResponse accurateResponse = client().prepareSearch("idx_single_shard")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").executionHint(randomExecutionHint())
|
terms("terms").executionHint(randomExecutionHint())
|
||||||
.field(DOUBLE_FIELD_NAME)
|
.field(DOUBLE_FIELD_NAME)
|
||||||
@ -992,7 +954,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
|
|||||||
assertSearchResponse(accurateResponse);
|
assertSearchResponse(accurateResponse);
|
||||||
|
|
||||||
SearchResponse testResponse = client().prepareSearch("idx_single_shard")
|
SearchResponse testResponse = client().prepareSearch("idx_single_shard")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").executionHint(randomExecutionHint())
|
terms("terms").executionHint(randomExecutionHint())
|
||||||
.field(DOUBLE_FIELD_NAME)
|
.field(DOUBLE_FIELD_NAME)
|
||||||
@ -1013,7 +974,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
|
|||||||
int size = randomIntBetween(1, 20);
|
int size = randomIntBetween(1, 20);
|
||||||
int shardSize = randomIntBetween(size, size * 2);
|
int shardSize = randomIntBetween(size, size * 2);
|
||||||
SearchResponse accurateResponse = client().prepareSearch("idx_single_shard")
|
SearchResponse accurateResponse = client().prepareSearch("idx_single_shard")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").executionHint(randomExecutionHint())
|
terms("terms").executionHint(randomExecutionHint())
|
||||||
.field(DOUBLE_FIELD_NAME)
|
.field(DOUBLE_FIELD_NAME)
|
||||||
@ -1028,7 +988,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
|
|||||||
assertSearchResponse(accurateResponse);
|
assertSearchResponse(accurateResponse);
|
||||||
|
|
||||||
SearchResponse testResponse = client().prepareSearch("idx_single_shard")
|
SearchResponse testResponse = client().prepareSearch("idx_single_shard")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").executionHint(randomExecutionHint())
|
terms("terms").executionHint(randomExecutionHint())
|
||||||
.field(DOUBLE_FIELD_NAME)
|
.field(DOUBLE_FIELD_NAME)
|
||||||
@ -1049,7 +1008,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
|
|||||||
int size = randomIntBetween(1, 20);
|
int size = randomIntBetween(1, 20);
|
||||||
int shardSize = randomIntBetween(size, size * 2);
|
int shardSize = randomIntBetween(size, size * 2);
|
||||||
SearchResponse accurateResponse = client().prepareSearch("idx_single_shard")
|
SearchResponse accurateResponse = client().prepareSearch("idx_single_shard")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").executionHint(randomExecutionHint())
|
terms("terms").executionHint(randomExecutionHint())
|
||||||
.field(DOUBLE_FIELD_NAME)
|
.field(DOUBLE_FIELD_NAME)
|
||||||
@ -1065,7 +1023,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
|
|||||||
assertSearchResponse(accurateResponse);
|
assertSearchResponse(accurateResponse);
|
||||||
|
|
||||||
SearchResponse testResponse = client().prepareSearch("idx_single_shard")
|
SearchResponse testResponse = client().prepareSearch("idx_single_shard")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").executionHint(randomExecutionHint())
|
terms("terms").executionHint(randomExecutionHint())
|
||||||
.field(DOUBLE_FIELD_NAME)
|
.field(DOUBLE_FIELD_NAME)
|
||||||
@ -1087,7 +1044,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
|
|||||||
int size = randomIntBetween(1, 20);
|
int size = randomIntBetween(1, 20);
|
||||||
int shardSize = randomIntBetween(size, size * 2);
|
int shardSize = randomIntBetween(size, size * 2);
|
||||||
SearchResponse accurateResponse = client().prepareSearch("idx_single_shard")
|
SearchResponse accurateResponse = client().prepareSearch("idx_single_shard")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").executionHint(randomExecutionHint())
|
terms("terms").executionHint(randomExecutionHint())
|
||||||
.field(DOUBLE_FIELD_NAME)
|
.field(DOUBLE_FIELD_NAME)
|
||||||
@ -1103,7 +1059,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
|
|||||||
assertSearchResponse(accurateResponse);
|
assertSearchResponse(accurateResponse);
|
||||||
|
|
||||||
SearchResponse testResponse = client().prepareSearch("idx_single_shard")
|
SearchResponse testResponse = client().prepareSearch("idx_single_shard")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").executionHint(randomExecutionHint())
|
terms("terms").executionHint(randomExecutionHint())
|
||||||
.field(DOUBLE_FIELD_NAME)
|
.field(DOUBLE_FIELD_NAME)
|
||||||
@ -1128,7 +1083,6 @@ public class TermsDocCountErrorIT extends OpenSearchIntegTestCase {
|
|||||||
*/
|
*/
|
||||||
public void testFixedDocs() throws Exception {
|
public void testFixedDocs() throws Exception {
|
||||||
SearchResponse response = client().prepareSearch("idx_fixed_docs_0", "idx_fixed_docs_1", "idx_fixed_docs_2")
|
SearchResponse response = client().prepareSearch("idx_fixed_docs_0", "idx_fixed_docs_1", "idx_fixed_docs_2")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").executionHint(randomExecutionHint())
|
terms("terms").executionHint(randomExecutionHint())
|
||||||
.field(STRING_FIELD_NAME)
|
.field(STRING_FIELD_NAME)
|
||||||
|
@ -368,7 +368,6 @@ public class StringTermsIT extends AbstractTermsTestCase {
|
|||||||
private void runTestFieldWithPartitionedFiltering(String field) throws Exception {
|
private void runTestFieldWithPartitionedFiltering(String field) throws Exception {
|
||||||
// Find total number of unique terms
|
// Find total number of unique terms
|
||||||
SearchResponse allResponse = client().prepareSearch("idx")
|
SearchResponse allResponse = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(terms("terms").field(field).size(10000).collectMode(randomFrom(SubAggCollectionMode.values())))
|
.addAggregation(terms("terms").field(field).size(10000).collectMode(randomFrom(SubAggCollectionMode.values())))
|
||||||
.get();
|
.get();
|
||||||
assertSearchResponse(allResponse);
|
assertSearchResponse(allResponse);
|
||||||
@ -382,7 +381,6 @@ public class StringTermsIT extends AbstractTermsTestCase {
|
|||||||
Set<String> foundTerms = new HashSet<>();
|
Set<String> foundTerms = new HashSet<>();
|
||||||
for (int partition = 0; partition < numPartitions; partition++) {
|
for (int partition = 0; partition < numPartitions; partition++) {
|
||||||
SearchResponse response = client().prepareSearch("idx")
|
SearchResponse response = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").field(field)
|
terms("terms").field(field)
|
||||||
.includeExclude(new IncludeExclude(partition, numPartitions))
|
.includeExclude(new IncludeExclude(partition, numPartitions))
|
||||||
@ -402,7 +400,6 @@ public class StringTermsIT extends AbstractTermsTestCase {
|
|||||||
|
|
||||||
public void testSingleValuedFieldWithValueScript() throws Exception {
|
public void testSingleValuedFieldWithValueScript() throws Exception {
|
||||||
SearchResponse response = client().prepareSearch("idx")
|
SearchResponse response = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").executionHint(randomExecutionHint())
|
terms("terms").executionHint(randomExecutionHint())
|
||||||
.field(SINGLE_VALUED_FIELD_NAME)
|
.field(SINGLE_VALUED_FIELD_NAME)
|
||||||
@ -428,7 +425,6 @@ public class StringTermsIT extends AbstractTermsTestCase {
|
|||||||
|
|
||||||
public void testMultiValuedFieldWithValueScriptNotUnique() throws Exception {
|
public void testMultiValuedFieldWithValueScriptNotUnique() throws Exception {
|
||||||
SearchResponse response = client().prepareSearch("idx")
|
SearchResponse response = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").executionHint(randomExecutionHint())
|
terms("terms").executionHint(randomExecutionHint())
|
||||||
.field(MULTI_VALUED_FIELD_NAME)
|
.field(MULTI_VALUED_FIELD_NAME)
|
||||||
@ -452,7 +448,6 @@ public class StringTermsIT extends AbstractTermsTestCase {
|
|||||||
|
|
||||||
public void testMultiValuedScript() throws Exception {
|
public void testMultiValuedScript() throws Exception {
|
||||||
SearchResponse response = client().prepareSearch("idx")
|
SearchResponse response = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").executionHint(randomExecutionHint())
|
terms("terms").executionHint(randomExecutionHint())
|
||||||
.script(
|
.script(
|
||||||
@ -488,7 +483,6 @@ public class StringTermsIT extends AbstractTermsTestCase {
|
|||||||
|
|
||||||
public void testMultiValuedFieldWithValueScript() throws Exception {
|
public void testMultiValuedFieldWithValueScript() throws Exception {
|
||||||
SearchResponse response = client().prepareSearch("idx")
|
SearchResponse response = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").executionHint(randomExecutionHint())
|
terms("terms").executionHint(randomExecutionHint())
|
||||||
.field(MULTI_VALUED_FIELD_NAME)
|
.field(MULTI_VALUED_FIELD_NAME)
|
||||||
@ -537,7 +531,6 @@ public class StringTermsIT extends AbstractTermsTestCase {
|
|||||||
);
|
);
|
||||||
|
|
||||||
SearchResponse response = client().prepareSearch("idx")
|
SearchResponse response = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").collectMode(randomFrom(SubAggCollectionMode.values())).executionHint(randomExecutionHint()).script(script)
|
terms("terms").collectMode(randomFrom(SubAggCollectionMode.values())).executionHint(randomExecutionHint()).script(script)
|
||||||
)
|
)
|
||||||
@ -567,7 +560,6 @@ public class StringTermsIT extends AbstractTermsTestCase {
|
|||||||
);
|
);
|
||||||
|
|
||||||
SearchResponse response = client().prepareSearch("idx")
|
SearchResponse response = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").collectMode(randomFrom(SubAggCollectionMode.values())).executionHint(randomExecutionHint()).script(script)
|
terms("terms").collectMode(randomFrom(SubAggCollectionMode.values())).executionHint(randomExecutionHint()).script(script)
|
||||||
)
|
)
|
||||||
@ -590,7 +582,6 @@ public class StringTermsIT extends AbstractTermsTestCase {
|
|||||||
|
|
||||||
public void testScriptMultiValued() throws Exception {
|
public void testScriptMultiValued() throws Exception {
|
||||||
SearchResponse response = client().prepareSearch("idx")
|
SearchResponse response = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").collectMode(randomFrom(SubAggCollectionMode.values()))
|
terms("terms").collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||||
.executionHint(randomExecutionHint())
|
.executionHint(randomExecutionHint())
|
||||||
@ -626,7 +617,6 @@ public class StringTermsIT extends AbstractTermsTestCase {
|
|||||||
|
|
||||||
public void testPartiallyUnmapped() throws Exception {
|
public void testPartiallyUnmapped() throws Exception {
|
||||||
SearchResponse response = client().prepareSearch("idx", "idx_unmapped")
|
SearchResponse response = client().prepareSearch("idx", "idx_unmapped")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").executionHint(randomExecutionHint())
|
terms("terms").executionHint(randomExecutionHint())
|
||||||
.field(SINGLE_VALUED_FIELD_NAME)
|
.field(SINGLE_VALUED_FIELD_NAME)
|
||||||
@ -652,7 +642,6 @@ public class StringTermsIT extends AbstractTermsTestCase {
|
|||||||
public void testStringTermsNestedIntoPerBucketAggregator() throws Exception {
|
public void testStringTermsNestedIntoPerBucketAggregator() throws Exception {
|
||||||
// no execution hint so that the logic that decides whether or not to use ordinals is executed
|
// no execution hint so that the logic that decides whether or not to use ordinals is executed
|
||||||
SearchResponse response = client().prepareSearch("idx")
|
SearchResponse response = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
filter("filter", termQuery(MULTI_VALUED_FIELD_NAME, "val3")).subAggregation(
|
filter("filter", termQuery(MULTI_VALUED_FIELD_NAME, "val3")).subAggregation(
|
||||||
terms("terms").field(MULTI_VALUED_FIELD_NAME).collectMode(randomFrom(SubAggCollectionMode.values()))
|
terms("terms").field(MULTI_VALUED_FIELD_NAME).collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||||
@ -681,7 +670,6 @@ public class StringTermsIT extends AbstractTermsTestCase {
|
|||||||
boolean asc = true;
|
boolean asc = true;
|
||||||
try {
|
try {
|
||||||
client().prepareSearch("idx")
|
client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").executionHint(randomExecutionHint())
|
terms("terms").executionHint(randomExecutionHint())
|
||||||
.field(SINGLE_VALUED_FIELD_NAME)
|
.field(SINGLE_VALUED_FIELD_NAME)
|
||||||
@ -710,7 +698,6 @@ public class StringTermsIT extends AbstractTermsTestCase {
|
|||||||
public void testSingleValuedFieldOrderedBySingleBucketSubAggregationAsc() throws Exception {
|
public void testSingleValuedFieldOrderedBySingleBucketSubAggregationAsc() throws Exception {
|
||||||
boolean asc = randomBoolean();
|
boolean asc = randomBoolean();
|
||||||
SearchResponse response = client().prepareSearch("idx")
|
SearchResponse response = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("tags").executionHint(randomExecutionHint())
|
terms("tags").executionHint(randomExecutionHint())
|
||||||
.field("tag")
|
.field("tag")
|
||||||
@ -749,7 +736,6 @@ public class StringTermsIT extends AbstractTermsTestCase {
|
|||||||
public void testSingleValuedFieldOrderedBySubAggregationAscMultiHierarchyLevels() throws Exception {
|
public void testSingleValuedFieldOrderedBySubAggregationAscMultiHierarchyLevels() throws Exception {
|
||||||
boolean asc = randomBoolean();
|
boolean asc = randomBoolean();
|
||||||
SearchResponse response = client().prepareSearch("idx")
|
SearchResponse response = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("tags").executionHint(randomExecutionHint())
|
terms("tags").executionHint(randomExecutionHint())
|
||||||
.field("tag")
|
.field("tag")
|
||||||
@ -813,7 +799,6 @@ public class StringTermsIT extends AbstractTermsTestCase {
|
|||||||
String statsName = statsNameBuilder.toString();
|
String statsName = statsNameBuilder.toString();
|
||||||
boolean asc = randomBoolean();
|
boolean asc = randomBoolean();
|
||||||
SearchResponse response = client().prepareSearch("idx")
|
SearchResponse response = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("tags").executionHint(randomExecutionHint())
|
terms("tags").executionHint(randomExecutionHint())
|
||||||
.field("tag")
|
.field("tag")
|
||||||
@ -877,7 +862,6 @@ public class StringTermsIT extends AbstractTermsTestCase {
|
|||||||
String statsName = statsNameBuilder.toString();
|
String statsName = statsNameBuilder.toString();
|
||||||
boolean asc = randomBoolean();
|
boolean asc = randomBoolean();
|
||||||
SearchResponse response = client().prepareSearch("idx")
|
SearchResponse response = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("tags").executionHint(randomExecutionHint())
|
terms("tags").executionHint(randomExecutionHint())
|
||||||
.field("tag")
|
.field("tag")
|
||||||
@ -936,7 +920,6 @@ public class StringTermsIT extends AbstractTermsTestCase {
|
|||||||
for (String index : Arrays.asList("idx", "idx_unmapped")) {
|
for (String index : Arrays.asList("idx", "idx_unmapped")) {
|
||||||
try {
|
try {
|
||||||
client().prepareSearch(index)
|
client().prepareSearch(index)
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").executionHint(randomExecutionHint())
|
terms("terms").executionHint(randomExecutionHint())
|
||||||
.field(SINGLE_VALUED_FIELD_NAME)
|
.field(SINGLE_VALUED_FIELD_NAME)
|
||||||
@ -957,7 +940,6 @@ public class StringTermsIT extends AbstractTermsTestCase {
|
|||||||
for (String index : Arrays.asList("idx", "idx_unmapped")) {
|
for (String index : Arrays.asList("idx", "idx_unmapped")) {
|
||||||
try {
|
try {
|
||||||
client().prepareSearch(index)
|
client().prepareSearch(index)
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").executionHint(randomExecutionHint())
|
terms("terms").executionHint(randomExecutionHint())
|
||||||
.field(SINGLE_VALUED_FIELD_NAME)
|
.field(SINGLE_VALUED_FIELD_NAME)
|
||||||
@ -982,7 +964,6 @@ public class StringTermsIT extends AbstractTermsTestCase {
|
|||||||
for (String index : Arrays.asList("idx", "idx_unmapped")) {
|
for (String index : Arrays.asList("idx", "idx_unmapped")) {
|
||||||
try {
|
try {
|
||||||
SearchResponse response = client().prepareSearch(index)
|
SearchResponse response = client().prepareSearch(index)
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").executionHint(randomExecutionHint())
|
terms("terms").executionHint(randomExecutionHint())
|
||||||
.field(SINGLE_VALUED_FIELD_NAME)
|
.field(SINGLE_VALUED_FIELD_NAME)
|
||||||
@ -1008,7 +989,6 @@ public class StringTermsIT extends AbstractTermsTestCase {
|
|||||||
for (String index : Arrays.asList("idx", "idx_unmapped")) {
|
for (String index : Arrays.asList("idx", "idx_unmapped")) {
|
||||||
try {
|
try {
|
||||||
client().prepareSearch(index)
|
client().prepareSearch(index)
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").executionHint(randomExecutionHint())
|
terms("terms").executionHint(randomExecutionHint())
|
||||||
.field(SINGLE_VALUED_FIELD_NAME)
|
.field(SINGLE_VALUED_FIELD_NAME)
|
||||||
@ -1033,7 +1013,6 @@ public class StringTermsIT extends AbstractTermsTestCase {
|
|||||||
public void testSingleValuedFieldOrderedByMultiValueSubAggregationAsc() throws Exception {
|
public void testSingleValuedFieldOrderedByMultiValueSubAggregationAsc() throws Exception {
|
||||||
boolean asc = true;
|
boolean asc = true;
|
||||||
SearchResponse response = client().prepareSearch("idx")
|
SearchResponse response = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").executionHint(randomExecutionHint())
|
terms("terms").executionHint(randomExecutionHint())
|
||||||
.field(SINGLE_VALUED_FIELD_NAME)
|
.field(SINGLE_VALUED_FIELD_NAME)
|
||||||
@ -1066,7 +1045,6 @@ public class StringTermsIT extends AbstractTermsTestCase {
|
|||||||
public void testSingleValuedFieldOrderedByMultiValueSubAggregationDesc() throws Exception {
|
public void testSingleValuedFieldOrderedByMultiValueSubAggregationDesc() throws Exception {
|
||||||
boolean asc = false;
|
boolean asc = false;
|
||||||
SearchResponse response = client().prepareSearch("idx")
|
SearchResponse response = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").executionHint(randomExecutionHint())
|
terms("terms").executionHint(randomExecutionHint())
|
||||||
.field(SINGLE_VALUED_FIELD_NAME)
|
.field(SINGLE_VALUED_FIELD_NAME)
|
||||||
@ -1100,7 +1078,6 @@ public class StringTermsIT extends AbstractTermsTestCase {
|
|||||||
public void testSingleValuedFieldOrderedByMultiValueExtendedStatsAsc() throws Exception {
|
public void testSingleValuedFieldOrderedByMultiValueExtendedStatsAsc() throws Exception {
|
||||||
boolean asc = true;
|
boolean asc = true;
|
||||||
SearchResponse response = client().prepareSearch("idx")
|
SearchResponse response = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").executionHint(randomExecutionHint())
|
terms("terms").executionHint(randomExecutionHint())
|
||||||
.field(SINGLE_VALUED_FIELD_NAME)
|
.field(SINGLE_VALUED_FIELD_NAME)
|
||||||
@ -1134,7 +1111,6 @@ public class StringTermsIT extends AbstractTermsTestCase {
|
|||||||
public void testSingleValuedFieldOrderedByStatsAggAscWithTermsSubAgg() throws Exception {
|
public void testSingleValuedFieldOrderedByStatsAggAscWithTermsSubAgg() throws Exception {
|
||||||
boolean asc = true;
|
boolean asc = true;
|
||||||
SearchResponse response = client().prepareSearch("idx")
|
SearchResponse response = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").executionHint(randomExecutionHint())
|
terms("terms").executionHint(randomExecutionHint())
|
||||||
.field(SINGLE_VALUED_FIELD_NAME)
|
.field(SINGLE_VALUED_FIELD_NAME)
|
||||||
@ -1253,7 +1229,6 @@ public class StringTermsIT extends AbstractTermsTestCase {
|
|||||||
|
|
||||||
public void testIndexMetaField() throws Exception {
|
public void testIndexMetaField() throws Exception {
|
||||||
SearchResponse response = client().prepareSearch("idx", "empty_bucket_idx")
|
SearchResponse response = client().prepareSearch("idx", "empty_bucket_idx")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").collectMode(randomFrom(SubAggCollectionMode.values()))
|
terms("terms").collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||||
.executionHint(randomExecutionHint())
|
.executionHint(randomExecutionHint())
|
||||||
|
@ -204,7 +204,6 @@ public class CardinalityIT extends OpenSearchIntegTestCase {
|
|||||||
|
|
||||||
public void testUnmapped() throws Exception {
|
public void testUnmapped() throws Exception {
|
||||||
SearchResponse response = client().prepareSearch("idx_unmapped")
|
SearchResponse response = client().prepareSearch("idx_unmapped")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field("str_value"))
|
.addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field("str_value"))
|
||||||
.get();
|
.get();
|
||||||
|
|
||||||
@ -218,7 +217,6 @@ public class CardinalityIT extends OpenSearchIntegTestCase {
|
|||||||
|
|
||||||
public void testPartiallyUnmapped() throws Exception {
|
public void testPartiallyUnmapped() throws Exception {
|
||||||
SearchResponse response = client().prepareSearch("idx", "idx_unmapped")
|
SearchResponse response = client().prepareSearch("idx", "idx_unmapped")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field("str_value"))
|
.addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field("str_value"))
|
||||||
.get();
|
.get();
|
||||||
|
|
||||||
@ -232,7 +230,6 @@ public class CardinalityIT extends OpenSearchIntegTestCase {
|
|||||||
|
|
||||||
public void testSingleValuedString() throws Exception {
|
public void testSingleValuedString() throws Exception {
|
||||||
SearchResponse response = client().prepareSearch("idx")
|
SearchResponse response = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field("str_value"))
|
.addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field("str_value"))
|
||||||
.get();
|
.get();
|
||||||
|
|
||||||
@ -246,7 +243,6 @@ public class CardinalityIT extends OpenSearchIntegTestCase {
|
|||||||
|
|
||||||
public void testSingleValuedNumeric() throws Exception {
|
public void testSingleValuedNumeric() throws Exception {
|
||||||
SearchResponse response = client().prepareSearch("idx")
|
SearchResponse response = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field(singleNumericField()))
|
.addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field(singleNumericField()))
|
||||||
.get();
|
.get();
|
||||||
|
|
||||||
@ -289,7 +285,6 @@ public class CardinalityIT extends OpenSearchIntegTestCase {
|
|||||||
|
|
||||||
public void testSingleValuedNumericHashed() throws Exception {
|
public void testSingleValuedNumericHashed() throws Exception {
|
||||||
SearchResponse response = client().prepareSearch("idx")
|
SearchResponse response = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field(singleNumericField()))
|
.addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field(singleNumericField()))
|
||||||
.get();
|
.get();
|
||||||
|
|
||||||
@ -303,7 +298,6 @@ public class CardinalityIT extends OpenSearchIntegTestCase {
|
|||||||
|
|
||||||
public void testMultiValuedString() throws Exception {
|
public void testMultiValuedString() throws Exception {
|
||||||
SearchResponse response = client().prepareSearch("idx")
|
SearchResponse response = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field("str_values"))
|
.addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field("str_values"))
|
||||||
.get();
|
.get();
|
||||||
|
|
||||||
@ -317,7 +311,6 @@ public class CardinalityIT extends OpenSearchIntegTestCase {
|
|||||||
|
|
||||||
public void testMultiValuedNumeric() throws Exception {
|
public void testMultiValuedNumeric() throws Exception {
|
||||||
SearchResponse response = client().prepareSearch("idx")
|
SearchResponse response = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field(multiNumericField(false)))
|
.addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field(multiNumericField(false)))
|
||||||
.get();
|
.get();
|
||||||
|
|
||||||
@ -331,7 +324,6 @@ public class CardinalityIT extends OpenSearchIntegTestCase {
|
|||||||
|
|
||||||
public void testMultiValuedNumericHashed() throws Exception {
|
public void testMultiValuedNumericHashed() throws Exception {
|
||||||
SearchResponse response = client().prepareSearch("idx")
|
SearchResponse response = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field(multiNumericField(true)))
|
.addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).field(multiNumericField(true)))
|
||||||
.get();
|
.get();
|
||||||
|
|
||||||
@ -345,7 +337,6 @@ public class CardinalityIT extends OpenSearchIntegTestCase {
|
|||||||
|
|
||||||
public void testSingleValuedStringScript() throws Exception {
|
public void testSingleValuedStringScript() throws Exception {
|
||||||
SearchResponse response = client().prepareSearch("idx")
|
SearchResponse response = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
cardinality("cardinality").precisionThreshold(precisionThreshold)
|
cardinality("cardinality").precisionThreshold(precisionThreshold)
|
||||||
.script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['str_value'].value", emptyMap()))
|
.script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['str_value'].value", emptyMap()))
|
||||||
@ -362,7 +353,6 @@ public class CardinalityIT extends OpenSearchIntegTestCase {
|
|||||||
|
|
||||||
public void testMultiValuedStringScript() throws Exception {
|
public void testMultiValuedStringScript() throws Exception {
|
||||||
SearchResponse response = client().prepareSearch("idx")
|
SearchResponse response = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
cardinality("cardinality").precisionThreshold(precisionThreshold)
|
cardinality("cardinality").precisionThreshold(precisionThreshold)
|
||||||
.script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['str_values']", emptyMap()))
|
.script(new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc['str_values']", emptyMap()))
|
||||||
@ -380,7 +370,6 @@ public class CardinalityIT extends OpenSearchIntegTestCase {
|
|||||||
public void testSingleValuedNumericScript() throws Exception {
|
public void testSingleValuedNumericScript() throws Exception {
|
||||||
Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc[' + singleNumericField() + '].value", emptyMap());
|
Script script = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "doc[' + singleNumericField() + '].value", emptyMap());
|
||||||
SearchResponse response = client().prepareSearch("idx")
|
SearchResponse response = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).script(script))
|
.addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).script(script))
|
||||||
.get();
|
.get();
|
||||||
|
|
||||||
@ -400,7 +389,6 @@ public class CardinalityIT extends OpenSearchIntegTestCase {
|
|||||||
Collections.emptyMap()
|
Collections.emptyMap()
|
||||||
);
|
);
|
||||||
SearchResponse response = client().prepareSearch("idx")
|
SearchResponse response = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).script(script))
|
.addAggregation(cardinality("cardinality").precisionThreshold(precisionThreshold).script(script))
|
||||||
.get();
|
.get();
|
||||||
|
|
||||||
@ -414,7 +402,6 @@ public class CardinalityIT extends OpenSearchIntegTestCase {
|
|||||||
|
|
||||||
public void testSingleValuedStringValueScript() throws Exception {
|
public void testSingleValuedStringValueScript() throws Exception {
|
||||||
SearchResponse response = client().prepareSearch("idx")
|
SearchResponse response = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
cardinality("cardinality").precisionThreshold(precisionThreshold)
|
cardinality("cardinality").precisionThreshold(precisionThreshold)
|
||||||
.field("str_value")
|
.field("str_value")
|
||||||
@ -432,7 +419,6 @@ public class CardinalityIT extends OpenSearchIntegTestCase {
|
|||||||
|
|
||||||
public void testMultiValuedStringValueScript() throws Exception {
|
public void testMultiValuedStringValueScript() throws Exception {
|
||||||
SearchResponse response = client().prepareSearch("idx")
|
SearchResponse response = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
cardinality("cardinality").precisionThreshold(precisionThreshold)
|
cardinality("cardinality").precisionThreshold(precisionThreshold)
|
||||||
.field("str_values")
|
.field("str_values")
|
||||||
@ -450,7 +436,6 @@ public class CardinalityIT extends OpenSearchIntegTestCase {
|
|||||||
|
|
||||||
public void testSingleValuedNumericValueScript() throws Exception {
|
public void testSingleValuedNumericValueScript() throws Exception {
|
||||||
SearchResponse response = client().prepareSearch("idx")
|
SearchResponse response = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
cardinality("cardinality").precisionThreshold(precisionThreshold)
|
cardinality("cardinality").precisionThreshold(precisionThreshold)
|
||||||
.field(singleNumericField())
|
.field(singleNumericField())
|
||||||
@ -468,7 +453,6 @@ public class CardinalityIT extends OpenSearchIntegTestCase {
|
|||||||
|
|
||||||
public void testMultiValuedNumericValueScript() throws Exception {
|
public void testMultiValuedNumericValueScript() throws Exception {
|
||||||
SearchResponse response = client().prepareSearch("idx")
|
SearchResponse response = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
cardinality("cardinality").precisionThreshold(precisionThreshold)
|
cardinality("cardinality").precisionThreshold(precisionThreshold)
|
||||||
.field(multiNumericField(false))
|
.field(multiNumericField(false))
|
||||||
@ -486,7 +470,6 @@ public class CardinalityIT extends OpenSearchIntegTestCase {
|
|||||||
|
|
||||||
public void testAsSubAgg() throws Exception {
|
public void testAsSubAgg() throws Exception {
|
||||||
SearchResponse response = client().prepareSearch("idx")
|
SearchResponse response = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
terms("terms").field("str_value")
|
terms("terms").field("str_value")
|
||||||
.collectMode(randomFrom(SubAggCollectionMode.values()))
|
.collectMode(randomFrom(SubAggCollectionMode.values()))
|
||||||
|
@ -241,7 +241,6 @@ public class SerialDiffIT extends OpenSearchIntegTestCase {
|
|||||||
|
|
||||||
public void testBasicDiff() {
|
public void testBasicDiff() {
|
||||||
SearchResponse response = client().prepareSearch("idx")
|
SearchResponse response = client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
histogram("histo").field(INTERVAL_FIELD)
|
histogram("histo").field(INTERVAL_FIELD)
|
||||||
.interval(interval)
|
.interval(interval)
|
||||||
@ -286,7 +285,6 @@ public class SerialDiffIT extends OpenSearchIntegTestCase {
|
|||||||
public void testInvalidLagSize() {
|
public void testInvalidLagSize() {
|
||||||
try {
|
try {
|
||||||
client().prepareSearch("idx")
|
client().prepareSearch("idx")
|
||||||
.setTypes("type")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
histogram("histo").field(INTERVAL_FIELD)
|
histogram("histo").field(INTERVAL_FIELD)
|
||||||
.interval(interval)
|
.interval(interval)
|
||||||
|
@ -192,7 +192,6 @@ public class SearchWithRandomIOExceptionsIT extends OpenSearchIntegTestCase {
|
|||||||
int expectedResults = added[docToQuery] ? 1 : 0;
|
int expectedResults = added[docToQuery] ? 1 : 0;
|
||||||
logger.info("Searching for [test:{}]", English.intToEnglish(docToQuery));
|
logger.info("Searching for [test:{}]", English.intToEnglish(docToQuery));
|
||||||
SearchResponse searchResponse = client().prepareSearch()
|
SearchResponse searchResponse = client().prepareSearch()
|
||||||
.setTypes("type")
|
|
||||||
.setQuery(QueryBuilders.matchQuery("test", English.intToEnglish(docToQuery)))
|
.setQuery(QueryBuilders.matchQuery("test", English.intToEnglish(docToQuery)))
|
||||||
.setSize(expectedResults)
|
.setSize(expectedResults)
|
||||||
.get();
|
.get();
|
||||||
@ -202,7 +201,6 @@ public class SearchWithRandomIOExceptionsIT extends OpenSearchIntegTestCase {
|
|||||||
}
|
}
|
||||||
// check match all
|
// check match all
|
||||||
searchResponse = client().prepareSearch()
|
searchResponse = client().prepareSearch()
|
||||||
.setTypes("type")
|
|
||||||
.setQuery(QueryBuilders.matchAllQuery())
|
.setQuery(QueryBuilders.matchAllQuery())
|
||||||
.setSize(numCreated + numInitialDocs)
|
.setSize(numCreated + numInitialDocs)
|
||||||
.addSort("_uid", SortOrder.ASC)
|
.addSort("_uid", SortOrder.ASC)
|
||||||
@ -239,10 +237,7 @@ public class SearchWithRandomIOExceptionsIT extends OpenSearchIntegTestCase {
|
|||||||
);
|
);
|
||||||
client().admin().indices().prepareOpen("test").execute().get();
|
client().admin().indices().prepareOpen("test").execute().get();
|
||||||
ensureGreen();
|
ensureGreen();
|
||||||
SearchResponse searchResponse = client().prepareSearch()
|
SearchResponse searchResponse = client().prepareSearch().setQuery(QueryBuilders.matchQuery("test", "init")).get();
|
||||||
.setTypes("type")
|
|
||||||
.setQuery(QueryBuilders.matchQuery("test", "init"))
|
|
||||||
.get();
|
|
||||||
assertNoFailures(searchResponse);
|
assertNoFailures(searchResponse);
|
||||||
assertHitCount(searchResponse, numInitialDocs);
|
assertHitCount(searchResponse, numInitialDocs);
|
||||||
}
|
}
|
||||||
|
@ -71,7 +71,6 @@ public class CustomHighlighterSearchIT extends OpenSearchIntegTestCase {
|
|||||||
|
|
||||||
public void testThatCustomHighlightersAreSupported() throws IOException {
|
public void testThatCustomHighlightersAreSupported() throws IOException {
|
||||||
SearchResponse searchResponse = client().prepareSearch("test")
|
SearchResponse searchResponse = client().prepareSearch("test")
|
||||||
.setTypes("test")
|
|
||||||
.setQuery(QueryBuilders.matchAllQuery())
|
.setQuery(QueryBuilders.matchAllQuery())
|
||||||
.highlighter(new HighlightBuilder().field("name").highlighterType("test-custom"))
|
.highlighter(new HighlightBuilder().field("name").highlighterType("test-custom"))
|
||||||
.get();
|
.get();
|
||||||
@ -86,7 +85,6 @@ public class CustomHighlighterSearchIT extends OpenSearchIntegTestCase {
|
|||||||
highlightConfig.options(options);
|
highlightConfig.options(options);
|
||||||
|
|
||||||
SearchResponse searchResponse = client().prepareSearch("test")
|
SearchResponse searchResponse = client().prepareSearch("test")
|
||||||
.setTypes("test")
|
|
||||||
.setQuery(QueryBuilders.matchAllQuery())
|
.setQuery(QueryBuilders.matchAllQuery())
|
||||||
.highlighter(new HighlightBuilder().field(highlightConfig))
|
.highlighter(new HighlightBuilder().field(highlightConfig))
|
||||||
.get();
|
.get();
|
||||||
@ -100,7 +98,6 @@ public class CustomHighlighterSearchIT extends OpenSearchIntegTestCase {
|
|||||||
options.put("myGlobalOption", "someValue");
|
options.put("myGlobalOption", "someValue");
|
||||||
|
|
||||||
SearchResponse searchResponse = client().prepareSearch("test")
|
SearchResponse searchResponse = client().prepareSearch("test")
|
||||||
.setTypes("test")
|
|
||||||
.setQuery(QueryBuilders.matchAllQuery())
|
.setQuery(QueryBuilders.matchAllQuery())
|
||||||
.highlighter(new HighlightBuilder().field("name").highlighterType("test-custom").options(options))
|
.highlighter(new HighlightBuilder().field("name").highlighterType("test-custom").options(options))
|
||||||
.get();
|
.get();
|
||||||
@ -111,7 +108,6 @@ public class CustomHighlighterSearchIT extends OpenSearchIntegTestCase {
|
|||||||
|
|
||||||
public void testThatCustomHighlighterReceivesFieldsInOrder() throws Exception {
|
public void testThatCustomHighlighterReceivesFieldsInOrder() throws Exception {
|
||||||
SearchResponse searchResponse = client().prepareSearch("test")
|
SearchResponse searchResponse = client().prepareSearch("test")
|
||||||
.setTypes("test")
|
|
||||||
.setQuery(QueryBuilders.boolQuery().must(QueryBuilders.matchAllQuery()).should(QueryBuilders.termQuery("name", "arbitrary")))
|
.setQuery(QueryBuilders.boolQuery().must(QueryBuilders.matchAllQuery()).should(QueryBuilders.termQuery("name", "arbitrary")))
|
||||||
.highlighter(
|
.highlighter(
|
||||||
new HighlightBuilder().highlighterType("test-custom")
|
new HighlightBuilder().highlighterType("test-custom")
|
||||||
|
@ -2171,7 +2171,7 @@ public class HighlighterSearchIT extends OpenSearchIntegTestCase {
|
|||||||
index("test", "type1", "2", "text", new String[] { "", text2 });
|
index("test", "type1", "2", "text", new String[] { "", text2 });
|
||||||
refresh();
|
refresh();
|
||||||
|
|
||||||
IdsQueryBuilder idsQueryBuilder = QueryBuilders.idsQuery("type1").addIds("2");
|
IdsQueryBuilder idsQueryBuilder = QueryBuilders.idsQuery().addIds("2");
|
||||||
field.highlighterType("plain");
|
field.highlighterType("plain");
|
||||||
response = client().prepareSearch("test").setQuery(idsQueryBuilder).highlighter(new HighlightBuilder().field(field)).get();
|
response = client().prepareSearch("test").setQuery(idsQueryBuilder).highlighter(new HighlightBuilder().field(field)).get();
|
||||||
assertNotHighlighted(response, 0, "text");
|
assertNotHighlighted(response, 0, "text");
|
||||||
@ -2188,7 +2188,7 @@ public class HighlighterSearchIT extends OpenSearchIntegTestCase {
|
|||||||
// But if the field was actually empty then you should get no highlighting field
|
// But if the field was actually empty then you should get no highlighting field
|
||||||
index("test", "type1", "3", "text", new String[] {});
|
index("test", "type1", "3", "text", new String[] {});
|
||||||
refresh();
|
refresh();
|
||||||
idsQueryBuilder = QueryBuilders.idsQuery("type1").addIds("3");
|
idsQueryBuilder = QueryBuilders.idsQuery().addIds("3");
|
||||||
field.highlighterType("plain");
|
field.highlighterType("plain");
|
||||||
response = client().prepareSearch("test").setQuery(idsQueryBuilder).highlighter(new HighlightBuilder().field(field)).get();
|
response = client().prepareSearch("test").setQuery(idsQueryBuilder).highlighter(new HighlightBuilder().field(field)).get();
|
||||||
assertNotHighlighted(response, 0, "text");
|
assertNotHighlighted(response, 0, "text");
|
||||||
@ -2205,7 +2205,7 @@ public class HighlighterSearchIT extends OpenSearchIntegTestCase {
|
|||||||
index("test", "type1", "4");
|
index("test", "type1", "4");
|
||||||
refresh();
|
refresh();
|
||||||
|
|
||||||
idsQueryBuilder = QueryBuilders.idsQuery("type1").addIds("4");
|
idsQueryBuilder = QueryBuilders.idsQuery().addIds("4");
|
||||||
field.highlighterType("plain");
|
field.highlighterType("plain");
|
||||||
response = client().prepareSearch("test").setQuery(idsQueryBuilder).highlighter(new HighlightBuilder().field(field)).get();
|
response = client().prepareSearch("test").setQuery(idsQueryBuilder).highlighter(new HighlightBuilder().field(field)).get();
|
||||||
assertNotHighlighted(response, 0, "text");
|
assertNotHighlighted(response, 0, "text");
|
||||||
@ -3042,7 +3042,6 @@ public class HighlighterSearchIT extends OpenSearchIntegTestCase {
|
|||||||
|
|
||||||
for (String highlighter : ALL_TYPES) {
|
for (String highlighter : ALL_TYPES) {
|
||||||
SearchResponse response = client().prepareSearch("test")
|
SearchResponse response = client().prepareSearch("test")
|
||||||
.setTypes("typename")
|
|
||||||
.setQuery(matchQuery("foo", "test"))
|
.setQuery(matchQuery("foo", "test"))
|
||||||
.highlighter(new HighlightBuilder().field("foo").highlighterType(highlighter).requireFieldMatch(false))
|
.highlighter(new HighlightBuilder().field("foo").highlighterType(highlighter).requireFieldMatch(false))
|
||||||
.get();
|
.get();
|
||||||
@ -3071,7 +3070,6 @@ public class HighlighterSearchIT extends OpenSearchIntegTestCase {
|
|||||||
|
|
||||||
for (String highlighter : ALL_TYPES) {
|
for (String highlighter : ALL_TYPES) {
|
||||||
SearchResponse response = client().prepareSearch("filtered_alias")
|
SearchResponse response = client().prepareSearch("filtered_alias")
|
||||||
.setTypes("typename")
|
|
||||||
.setQuery(matchQuery("foo", "test"))
|
.setQuery(matchQuery("foo", "test"))
|
||||||
.highlighter(new HighlightBuilder().field("foo").highlighterType(highlighter).requireFieldMatch(false))
|
.highlighter(new HighlightBuilder().field("foo").highlighterType(highlighter).requireFieldMatch(false))
|
||||||
.get();
|
.get();
|
||||||
|
@ -737,11 +737,7 @@ public class SearchFieldsIT extends OpenSearchIntegTestCase {
|
|||||||
.setRefreshPolicy(IMMEDIATE)
|
.setRefreshPolicy(IMMEDIATE)
|
||||||
.get();
|
.get();
|
||||||
|
|
||||||
SearchResponse searchResponse = client().prepareSearch("my-index")
|
SearchResponse searchResponse = client().prepareSearch("my-index").addStoredField("field1").addStoredField("_routing").get();
|
||||||
.setTypes("my-type1")
|
|
||||||
.addStoredField("field1")
|
|
||||||
.addStoredField("_routing")
|
|
||||||
.get();
|
|
||||||
|
|
||||||
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L));
|
assertThat(searchResponse.getHits().getTotalHits().value, equalTo(1L));
|
||||||
assertThat(searchResponse.getHits().getAt(0).field("field1"), nullValue());
|
assertThat(searchResponse.getHits().getAt(0).field("field1"), nullValue());
|
||||||
@ -755,7 +751,7 @@ public class SearchFieldsIT extends OpenSearchIntegTestCase {
|
|||||||
.get();
|
.get();
|
||||||
|
|
||||||
assertFailures(
|
assertFailures(
|
||||||
client().prepareSearch("my-index").setTypes("my-type1").addStoredField("field1"),
|
client().prepareSearch("my-index").addStoredField("field1"),
|
||||||
RestStatus.BAD_REQUEST,
|
RestStatus.BAD_REQUEST,
|
||||||
containsString("field [field1] isn't a leaf field")
|
containsString("field [field1] isn't a leaf field")
|
||||||
);
|
);
|
||||||
@ -838,7 +834,6 @@ public class SearchFieldsIT extends OpenSearchIntegTestCase {
|
|||||||
indexRandom(true, client().prepareIndex("test", "type", "1").setSource("test_field", "foobar"));
|
indexRandom(true, client().prepareIndex("test", "type", "1").setSource("test_field", "foobar"));
|
||||||
refresh();
|
refresh();
|
||||||
SearchResponse searchResponse = client().prepareSearch("test")
|
SearchResponse searchResponse = client().prepareSearch("test")
|
||||||
.setTypes("type")
|
|
||||||
.setSource(new SearchSourceBuilder().query(QueryBuilders.matchAllQuery()).docValueField("test_field"))
|
.setSource(new SearchSourceBuilder().query(QueryBuilders.matchAllQuery()).docValueField("test_field"))
|
||||||
.get();
|
.get();
|
||||||
assertHitCount(searchResponse, 1);
|
assertHitCount(searchResponse, 1);
|
||||||
|
@ -220,7 +220,6 @@ public class GeoDistanceIT extends OpenSearchIntegTestCase {
|
|||||||
String name = "TestPosition";
|
String name = "TestPosition";
|
||||||
|
|
||||||
search.setQuery(QueryBuilders.matchAllQuery())
|
search.setQuery(QueryBuilders.matchAllQuery())
|
||||||
.setTypes("type1")
|
|
||||||
.addAggregation(
|
.addAggregation(
|
||||||
AggregationBuilders.geoDistance(name, new GeoPoint(tgt_lat, tgt_lon))
|
AggregationBuilders.geoDistance(name, new GeoPoint(tgt_lat, tgt_lon))
|
||||||
.field("location")
|
.field("location")
|
||||||
|
@ -640,7 +640,7 @@ public class MoreLikeThisIT extends OpenSearchIntegTestCase {
|
|||||||
.include(true)
|
.include(true)
|
||||||
.minTermFreq(1)
|
.minTermFreq(1)
|
||||||
.minDocFreq(1);
|
.minDocFreq(1);
|
||||||
SearchResponse mltResponse = client().prepareSearch().setTypes("type1").setQuery(queryBuilder).get();
|
SearchResponse mltResponse = client().prepareSearch().setQuery(queryBuilder).get();
|
||||||
assertHitCount(mltResponse, 3L);
|
assertHitCount(mltResponse, 3L);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -672,7 +672,7 @@ public class MoreLikeThisIT extends OpenSearchIntegTestCase {
|
|||||||
.minDocFreq(1)
|
.minDocFreq(1)
|
||||||
.maxQueryTerms(max_query_terms)
|
.maxQueryTerms(max_query_terms)
|
||||||
.minimumShouldMatch("0%");
|
.minimumShouldMatch("0%");
|
||||||
SearchResponse response = client().prepareSearch("test").setTypes("type1").setQuery(mltQuery).get();
|
SearchResponse response = client().prepareSearch("test").setQuery(mltQuery).get();
|
||||||
assertSearchResponse(response);
|
assertSearchResponse(response);
|
||||||
assertHitCount(response, max_query_terms);
|
assertHitCount(response, max_query_terms);
|
||||||
}
|
}
|
||||||
@ -705,7 +705,7 @@ public class MoreLikeThisIT extends OpenSearchIntegTestCase {
|
|||||||
.minDocFreq(1)
|
.minDocFreq(1)
|
||||||
.minimumShouldMatch(minimumShouldMatch);
|
.minimumShouldMatch(minimumShouldMatch);
|
||||||
logger.info("Testing with minimum_should_match = {}", minimumShouldMatch);
|
logger.info("Testing with minimum_should_match = {}", minimumShouldMatch);
|
||||||
SearchResponse response = client().prepareSearch("test").setTypes("type1").setQuery(mltQuery).get();
|
SearchResponse response = client().prepareSearch("test").setQuery(mltQuery).get();
|
||||||
assertSearchResponse(response);
|
assertSearchResponse(response);
|
||||||
if (minimumShouldMatch.equals("0%")) {
|
if (minimumShouldMatch.equals("0%")) {
|
||||||
assertHitCount(response, 10);
|
assertHitCount(response, 10);
|
||||||
@ -735,7 +735,7 @@ public class MoreLikeThisIT extends OpenSearchIntegTestCase {
|
|||||||
.minDocFreq(0)
|
.minDocFreq(0)
|
||||||
.maxQueryTerms(100)
|
.maxQueryTerms(100)
|
||||||
.minimumShouldMatch("100%"); // strict all terms must match!
|
.minimumShouldMatch("100%"); // strict all terms must match!
|
||||||
SearchResponse response = client().prepareSearch("test").setTypes("type1").setQuery(mltQuery).get();
|
SearchResponse response = client().prepareSearch("test").setQuery(mltQuery).get();
|
||||||
assertSearchResponse(response);
|
assertSearchResponse(response);
|
||||||
assertHitCount(response, 1);
|
assertHitCount(response, 1);
|
||||||
}
|
}
|
||||||
@ -809,7 +809,7 @@ public class MoreLikeThisIT extends OpenSearchIntegTestCase {
|
|||||||
.minDocFreq(0)
|
.minDocFreq(0)
|
||||||
.maxQueryTerms(100)
|
.maxQueryTerms(100)
|
||||||
.minimumShouldMatch("0%");
|
.minimumShouldMatch("0%");
|
||||||
SearchResponse response = client().prepareSearch("test").setTypes("type1").setQuery(mltQuery).get();
|
SearchResponse response = client().prepareSearch("test").setQuery(mltQuery).get();
|
||||||
assertSearchResponse(response);
|
assertSearchResponse(response);
|
||||||
assertHitCount(response, numFields);
|
assertHitCount(response, numFields);
|
||||||
|
|
||||||
@ -824,7 +824,7 @@ public class MoreLikeThisIT extends OpenSearchIntegTestCase {
|
|||||||
.include(true)
|
.include(true)
|
||||||
.minimumShouldMatch("0%");
|
.minimumShouldMatch("0%");
|
||||||
|
|
||||||
response = client().prepareSearch("test").setTypes("type1").setQuery(mltQuery).get();
|
response = client().prepareSearch("test").setQuery(mltQuery).get();
|
||||||
assertSearchResponse(response);
|
assertSearchResponse(response);
|
||||||
assertHitCount(response, numFields - (i + 1));
|
assertHitCount(response, numFields - (i + 1));
|
||||||
}
|
}
|
||||||
@ -848,7 +848,7 @@ public class MoreLikeThisIT extends OpenSearchIntegTestCase {
|
|||||||
.minDocFreq(0)
|
.minDocFreq(0)
|
||||||
.include(true)
|
.include(true)
|
||||||
.minimumShouldMatch("1%");
|
.minimumShouldMatch("1%");
|
||||||
SearchResponse response = client().prepareSearch("test").setTypes("type1").setQuery(mltQuery).get();
|
SearchResponse response = client().prepareSearch("test").setQuery(mltQuery).get();
|
||||||
assertSearchResponse(response);
|
assertSearchResponse(response);
|
||||||
assertHitCount(response, 2);
|
assertHitCount(response, 2);
|
||||||
|
|
||||||
@ -856,7 +856,7 @@ public class MoreLikeThisIT extends OpenSearchIntegTestCase {
|
|||||||
.minDocFreq(0)
|
.minDocFreq(0)
|
||||||
.include(true)
|
.include(true)
|
||||||
.minimumShouldMatch("1%");
|
.minimumShouldMatch("1%");
|
||||||
response = client().prepareSearch("test").setTypes("type1").setQuery(mltQuery).get();
|
response = client().prepareSearch("test").setQuery(mltQuery).get();
|
||||||
assertSearchResponse(response);
|
assertSearchResponse(response);
|
||||||
assertHitCount(response, 1);
|
assertHitCount(response, 1);
|
||||||
}
|
}
|
||||||
|
@ -548,7 +548,6 @@ public class SimpleNestedIT extends OpenSearchIntegTestCase {
|
|||||||
refresh();
|
refresh();
|
||||||
|
|
||||||
SearchResponse searchResponse = client().prepareSearch("test")
|
SearchResponse searchResponse = client().prepareSearch("test")
|
||||||
.setTypes("type1")
|
|
||||||
.setQuery(QueryBuilders.matchAllQuery())
|
.setQuery(QueryBuilders.matchAllQuery())
|
||||||
.addSort(SortBuilders.fieldSort("nested1.field1").order(SortOrder.ASC).setNestedPath("nested1"))
|
.addSort(SortBuilders.fieldSort("nested1.field1").order(SortOrder.ASC).setNestedPath("nested1"))
|
||||||
.get();
|
.get();
|
||||||
@ -562,7 +561,6 @@ public class SimpleNestedIT extends OpenSearchIntegTestCase {
|
|||||||
assertThat(searchResponse.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("4"));
|
assertThat(searchResponse.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("4"));
|
||||||
|
|
||||||
searchResponse = client().prepareSearch("test")
|
searchResponse = client().prepareSearch("test")
|
||||||
.setTypes("type1")
|
|
||||||
.setQuery(QueryBuilders.matchAllQuery())
|
.setQuery(QueryBuilders.matchAllQuery())
|
||||||
.addSort(SortBuilders.fieldSort("nested1.field1").order(SortOrder.DESC).setNestedPath("nested1"))
|
.addSort(SortBuilders.fieldSort("nested1.field1").order(SortOrder.DESC).setNestedPath("nested1"))
|
||||||
.get();
|
.get();
|
||||||
@ -658,7 +656,6 @@ public class SimpleNestedIT extends OpenSearchIntegTestCase {
|
|||||||
refresh();
|
refresh();
|
||||||
|
|
||||||
SearchRequestBuilder searchRequestBuilder = client().prepareSearch("test")
|
SearchRequestBuilder searchRequestBuilder = client().prepareSearch("test")
|
||||||
.setTypes("type1")
|
|
||||||
.setQuery(QueryBuilders.matchAllQuery())
|
.setQuery(QueryBuilders.matchAllQuery())
|
||||||
.addSort(
|
.addSort(
|
||||||
SortBuilders.fieldSort("nested1.field1")
|
SortBuilders.fieldSort("nested1.field1")
|
||||||
@ -683,7 +680,6 @@ public class SimpleNestedIT extends OpenSearchIntegTestCase {
|
|||||||
assertThat(searchResponse.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("10"));
|
assertThat(searchResponse.getHits().getHits()[2].getSortValues()[0].toString(), equalTo("10"));
|
||||||
|
|
||||||
searchRequestBuilder = client().prepareSearch("test")
|
searchRequestBuilder = client().prepareSearch("test")
|
||||||
.setTypes("type1")
|
|
||||||
.setQuery(QueryBuilders.matchAllQuery())
|
.setQuery(QueryBuilders.matchAllQuery())
|
||||||
.addSort(
|
.addSort(
|
||||||
SortBuilders.fieldSort("nested1.field1")
|
SortBuilders.fieldSort("nested1.field1")
|
||||||
|
@ -516,7 +516,6 @@ public class QueryProfilerIT extends OpenSearchIntegTestCase {
|
|||||||
SearchResponse resp = client().prepareSearch()
|
SearchResponse resp = client().prepareSearch()
|
||||||
.setQuery(q)
|
.setQuery(q)
|
||||||
.setIndices("test")
|
.setIndices("test")
|
||||||
.setTypes("type1")
|
|
||||||
.setProfile(true)
|
.setProfile(true)
|
||||||
.setSearchType(SearchType.QUERY_THEN_FETCH)
|
.setSearchType(SearchType.QUERY_THEN_FETCH)
|
||||||
.get();
|
.get();
|
||||||
|
@ -45,7 +45,6 @@ import org.opensearch.action.search.SearchPhaseExecutionException;
|
|||||||
import org.opensearch.action.search.SearchResponse;
|
import org.opensearch.action.search.SearchResponse;
|
||||||
import org.opensearch.action.search.SearchType;
|
import org.opensearch.action.search.SearchType;
|
||||||
import org.opensearch.bootstrap.JavaVersion;
|
import org.opensearch.bootstrap.JavaVersion;
|
||||||
import org.opensearch.common.Strings;
|
|
||||||
import org.opensearch.common.document.DocumentField;
|
import org.opensearch.common.document.DocumentField;
|
||||||
import org.opensearch.common.lucene.search.SpanBooleanQueryRewriteWithMaxClause;
|
import org.opensearch.common.lucene.search.SpanBooleanQueryRewriteWithMaxClause;
|
||||||
import org.opensearch.common.regex.Regex;
|
import org.opensearch.common.regex.Regex;
|
||||||
@ -124,7 +123,6 @@ import static org.opensearch.index.query.QueryBuilders.spanTermQuery;
|
|||||||
import static org.opensearch.index.query.QueryBuilders.termQuery;
|
import static org.opensearch.index.query.QueryBuilders.termQuery;
|
||||||
import static org.opensearch.index.query.QueryBuilders.termsLookupQuery;
|
import static org.opensearch.index.query.QueryBuilders.termsLookupQuery;
|
||||||
import static org.opensearch.index.query.QueryBuilders.termsQuery;
|
import static org.opensearch.index.query.QueryBuilders.termsQuery;
|
||||||
import static org.opensearch.index.query.QueryBuilders.typeQuery;
|
|
||||||
import static org.opensearch.index.query.QueryBuilders.wildcardQuery;
|
import static org.opensearch.index.query.QueryBuilders.wildcardQuery;
|
||||||
import static org.opensearch.index.query.QueryBuilders.wrapperQuery;
|
import static org.opensearch.index.query.QueryBuilders.wrapperQuery;
|
||||||
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked;
|
import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked;
|
||||||
@ -557,23 +555,6 @@ public class SearchQueryIT extends OpenSearchIntegTestCase {
|
|||||||
assertHitCount(searchResponse, 0L);
|
assertHitCount(searchResponse, 0L);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testTypeFilter() throws Exception {
|
|
||||||
assertAcked(prepareCreate("test"));
|
|
||||||
indexRandom(
|
|
||||||
true,
|
|
||||||
client().prepareIndex("test", "type1", "1").setSource("field1", "value1"),
|
|
||||||
client().prepareIndex("test", "type1", "2").setSource("field1", "value1")
|
|
||||||
);
|
|
||||||
|
|
||||||
assertHitCount(client().prepareSearch().setQuery(typeQuery("type1")).get(), 2L);
|
|
||||||
assertHitCount(client().prepareSearch().setQuery(typeQuery("type2")).get(), 0L);
|
|
||||||
|
|
||||||
assertHitCount(client().prepareSearch().setTypes("type1").setQuery(matchAllQuery()).get(), 2L);
|
|
||||||
assertHitCount(client().prepareSearch().setTypes("type2").setQuery(matchAllQuery()).get(), 0L);
|
|
||||||
|
|
||||||
assertHitCount(client().prepareSearch().setTypes("type1", "type2").setQuery(matchAllQuery()).get(), 2L);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testIdsQueryTestsIdIndexed() throws Exception {
|
public void testIdsQueryTestsIdIndexed() throws Exception {
|
||||||
assertAcked(client().admin().indices().prepareCreate("test"));
|
assertAcked(client().admin().indices().prepareCreate("test"));
|
||||||
|
|
||||||
@ -584,29 +565,19 @@ public class SearchQueryIT extends OpenSearchIntegTestCase {
|
|||||||
client().prepareIndex("test", "type1", "3").setSource("field1", "value3")
|
client().prepareIndex("test", "type1", "3").setSource("field1", "value3")
|
||||||
);
|
);
|
||||||
|
|
||||||
SearchResponse searchResponse = client().prepareSearch().setQuery(constantScoreQuery(idsQuery("type1").addIds("1", "3"))).get();
|
SearchResponse searchResponse = client().prepareSearch().setQuery(constantScoreQuery(idsQuery().addIds("1", "3"))).get();
|
||||||
assertHitCount(searchResponse, 2L);
|
assertHitCount(searchResponse, 2L);
|
||||||
assertSearchHits(searchResponse, "1", "3");
|
assertSearchHits(searchResponse, "1", "3");
|
||||||
|
|
||||||
// no type
|
|
||||||
searchResponse = client().prepareSearch().setQuery(constantScoreQuery(idsQuery().addIds("1", "3"))).get();
|
|
||||||
assertHitCount(searchResponse, 2L);
|
|
||||||
assertSearchHits(searchResponse, "1", "3");
|
|
||||||
|
|
||||||
searchResponse = client().prepareSearch().setQuery(idsQuery("type1").addIds("1", "3")).get();
|
|
||||||
assertHitCount(searchResponse, 2L);
|
|
||||||
assertSearchHits(searchResponse, "1", "3");
|
|
||||||
|
|
||||||
// no type
|
|
||||||
searchResponse = client().prepareSearch().setQuery(idsQuery().addIds("1", "3")).get();
|
searchResponse = client().prepareSearch().setQuery(idsQuery().addIds("1", "3")).get();
|
||||||
assertHitCount(searchResponse, 2L);
|
assertHitCount(searchResponse, 2L);
|
||||||
assertSearchHits(searchResponse, "1", "3");
|
assertSearchHits(searchResponse, "1", "3");
|
||||||
|
|
||||||
searchResponse = client().prepareSearch().setQuery(idsQuery("type1").addIds("7", "10")).get();
|
searchResponse = client().prepareSearch().setQuery(idsQuery().addIds("7", "10")).get();
|
||||||
assertHitCount(searchResponse, 0L);
|
assertHitCount(searchResponse, 0L);
|
||||||
|
|
||||||
// repeat..., with terms
|
// repeat..., with terms
|
||||||
searchResponse = client().prepareSearch().setTypes("type1").setQuery(constantScoreQuery(termsQuery("_id", "1", "3"))).get();
|
searchResponse = client().prepareSearch().setQuery(constantScoreQuery(termsQuery("_id", "1", "3"))).get();
|
||||||
assertHitCount(searchResponse, 2L);
|
assertHitCount(searchResponse, 2L);
|
||||||
assertSearchHits(searchResponse, "1", "3");
|
assertSearchHits(searchResponse, "1", "3");
|
||||||
}
|
}
|
||||||
@ -1298,7 +1269,7 @@ public class SearchQueryIT extends OpenSearchIntegTestCase {
|
|||||||
client().prepareIndex("test", "_doc", "3").setSource("field1", "value3").get();
|
client().prepareIndex("test", "_doc", "3").setSource("field1", "value3").get();
|
||||||
refresh();
|
refresh();
|
||||||
|
|
||||||
SearchResponse searchResponse = client().prepareSearch().setQuery(idsQuery("_doc").addIds("1", "2")).get();
|
SearchResponse searchResponse = client().prepareSearch().setQuery(idsQuery().addIds("1", "2")).get();
|
||||||
assertHitCount(searchResponse, 2L);
|
assertHitCount(searchResponse, 2L);
|
||||||
assertThat(searchResponse.getHits().getHits().length, equalTo(2));
|
assertThat(searchResponse.getHits().getHits().length, equalTo(2));
|
||||||
|
|
||||||
@ -1310,11 +1281,11 @@ public class SearchQueryIT extends OpenSearchIntegTestCase {
|
|||||||
assertHitCount(searchResponse, 2L);
|
assertHitCount(searchResponse, 2L);
|
||||||
assertThat(searchResponse.getHits().getHits().length, equalTo(2));
|
assertThat(searchResponse.getHits().getHits().length, equalTo(2));
|
||||||
|
|
||||||
searchResponse = client().prepareSearch().setQuery(idsQuery(Strings.EMPTY_ARRAY).addIds("1")).get();
|
searchResponse = client().prepareSearch().setQuery(idsQuery().addIds("1")).get();
|
||||||
assertHitCount(searchResponse, 1L);
|
assertHitCount(searchResponse, 1L);
|
||||||
assertThat(searchResponse.getHits().getHits().length, equalTo(1));
|
assertThat(searchResponse.getHits().getHits().length, equalTo(1));
|
||||||
|
|
||||||
searchResponse = client().prepareSearch().setQuery(idsQuery("type1", "type2", "_doc").addIds("1", "2", "3", "4")).get();
|
searchResponse = client().prepareSearch().setQuery(idsQuery().addIds("1", "2", "3", "4")).get();
|
||||||
assertHitCount(searchResponse, 3L);
|
assertHitCount(searchResponse, 3L);
|
||||||
assertThat(searchResponse.getHits().getHits().length, equalTo(3));
|
assertThat(searchResponse.getHits().getHits().length, equalTo(3));
|
||||||
}
|
}
|
||||||
|
@ -264,7 +264,7 @@ public class SimpleQueryStringIT extends OpenSearchIntegTestCase {
|
|||||||
assertHitCount(searchResponse, 1L);
|
assertHitCount(searchResponse, 1L);
|
||||||
assertSearchHits(searchResponse, "1");
|
assertSearchHits(searchResponse, "1");
|
||||||
|
|
||||||
searchResponse = client().prepareSearch().setTypes("type1").setQuery(simpleQueryStringQuery("foo bar baz").field("body")).get();
|
searchResponse = client().prepareSearch().setQuery(simpleQueryStringQuery("foo bar baz").field("body")).get();
|
||||||
assertHitCount(searchResponse, 1L);
|
assertHitCount(searchResponse, 1L);
|
||||||
assertSearchHits(searchResponse, "1");
|
assertSearchHits(searchResponse, "1");
|
||||||
|
|
||||||
@ -272,7 +272,7 @@ public class SimpleQueryStringIT extends OpenSearchIntegTestCase {
|
|||||||
assertHitCount(searchResponse, 1L);
|
assertHitCount(searchResponse, 1L);
|
||||||
assertSearchHits(searchResponse, "1");
|
assertSearchHits(searchResponse, "1");
|
||||||
|
|
||||||
searchResponse = client().prepareSearch().setTypes("type1").setQuery(simpleQueryStringQuery("foo bar baz").field("body.sub")).get();
|
searchResponse = client().prepareSearch().setQuery(simpleQueryStringQuery("foo bar baz").field("body.sub")).get();
|
||||||
assertHitCount(searchResponse, 1L);
|
assertHitCount(searchResponse, 1L);
|
||||||
assertSearchHits(searchResponse, "1");
|
assertSearchHits(searchResponse, "1");
|
||||||
}
|
}
|
||||||
|
@ -543,7 +543,6 @@ public class SearchScrollIT extends OpenSearchIntegTestCase {
|
|||||||
refresh();
|
refresh();
|
||||||
|
|
||||||
SearchResponse response = client().prepareSearch("test")
|
SearchResponse response = client().prepareSearch("test")
|
||||||
.setTypes("test")
|
|
||||||
.addSort(new FieldSortBuilder("no_field").order(SortOrder.ASC).missing("_last"))
|
.addSort(new FieldSortBuilder("no_field").order(SortOrder.ASC).missing("_last"))
|
||||||
.setScroll("1m")
|
.setScroll("1m")
|
||||||
.get();
|
.get();
|
||||||
@ -556,7 +555,6 @@ public class SearchScrollIT extends OpenSearchIntegTestCase {
|
|||||||
assertNoSearchHits(response);
|
assertNoSearchHits(response);
|
||||||
|
|
||||||
response = client().prepareSearch("test")
|
response = client().prepareSearch("test")
|
||||||
.setTypes("test")
|
|
||||||
.addSort(new FieldSortBuilder("no_field").order(SortOrder.ASC).missing("_first"))
|
.addSort(new FieldSortBuilder("no_field").order(SortOrder.ASC).missing("_first"))
|
||||||
.setScroll("1m")
|
.setScroll("1m")
|
||||||
.get();
|
.get();
|
||||||
|
@ -1031,7 +1031,7 @@ public class CompletionSuggestSearchIT extends OpenSearchIntegTestCase {
|
|||||||
|
|
||||||
SearchPhaseExecutionException e = expectThrows(
|
SearchPhaseExecutionException e = expectThrows(
|
||||||
SearchPhaseExecutionException.class,
|
SearchPhaseExecutionException.class,
|
||||||
() -> client().prepareSearch(INDEX).setTypes(TYPE).addSort(new FieldSortBuilder(FIELD)).get()
|
() -> client().prepareSearch(INDEX).addSort(new FieldSortBuilder(FIELD)).get()
|
||||||
);
|
);
|
||||||
assertThat(e.status().getStatus(), is(400));
|
assertThat(e.status().getStatus(), is(400));
|
||||||
assertThat(e.toString(), containsString("Fielddata is not supported on field [" + FIELD + "] of type [completion]"));
|
assertThat(e.toString(), containsString("Fielddata is not supported on field [" + FIELD + "] of type [completion]"));
|
||||||
|
@ -222,7 +222,6 @@ public class TransportValidateQueryAction extends TransportBroadcastAction<
|
|||||||
String error = null;
|
String error = null;
|
||||||
ShardSearchRequest shardSearchLocalRequest = new ShardSearchRequest(
|
ShardSearchRequest shardSearchLocalRequest = new ShardSearchRequest(
|
||||||
request.shardId(),
|
request.shardId(),
|
||||||
request.types(),
|
|
||||||
request.nowInMillis(),
|
request.nowInMillis(),
|
||||||
request.filteringAliases()
|
request.filteringAliases()
|
||||||
);
|
);
|
||||||
|
@ -43,7 +43,6 @@ import org.opensearch.cluster.ClusterState;
|
|||||||
import org.opensearch.cluster.metadata.IndexNameExpressionResolver;
|
import org.opensearch.cluster.metadata.IndexNameExpressionResolver;
|
||||||
import org.opensearch.cluster.routing.ShardIterator;
|
import org.opensearch.cluster.routing.ShardIterator;
|
||||||
import org.opensearch.cluster.service.ClusterService;
|
import org.opensearch.cluster.service.ClusterService;
|
||||||
import org.opensearch.common.Strings;
|
|
||||||
import org.opensearch.common.inject.Inject;
|
import org.opensearch.common.inject.Inject;
|
||||||
import org.opensearch.common.io.stream.Writeable;
|
import org.opensearch.common.io.stream.Writeable;
|
||||||
import org.opensearch.common.lease.Releasables;
|
import org.opensearch.common.lease.Releasables;
|
||||||
@ -51,7 +50,6 @@ import org.opensearch.index.IndexService;
|
|||||||
import org.opensearch.index.engine.Engine;
|
import org.opensearch.index.engine.Engine;
|
||||||
import org.opensearch.index.get.GetResult;
|
import org.opensearch.index.get.GetResult;
|
||||||
import org.opensearch.index.mapper.IdFieldMapper;
|
import org.opensearch.index.mapper.IdFieldMapper;
|
||||||
import org.opensearch.index.mapper.MapperService;
|
|
||||||
import org.opensearch.index.mapper.Uid;
|
import org.opensearch.index.mapper.Uid;
|
||||||
import org.opensearch.index.shard.IndexShard;
|
import org.opensearch.index.shard.IndexShard;
|
||||||
import org.opensearch.index.shard.ShardId;
|
import org.opensearch.index.shard.ShardId;
|
||||||
@ -136,13 +134,7 @@ public class TransportExplainAction extends TransportSingleShardAction<ExplainRe
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected ExplainResponse shardOperation(ExplainRequest request, ShardId shardId) throws IOException {
|
protected ExplainResponse shardOperation(ExplainRequest request, ShardId shardId) throws IOException {
|
||||||
String[] types;
|
ShardSearchRequest shardSearchLocalRequest = new ShardSearchRequest(shardId, request.nowInMillis, request.filteringAlias());
|
||||||
if (MapperService.SINGLE_MAPPING_NAME.equals(request.type())) { // typeless explain call
|
|
||||||
types = Strings.EMPTY_ARRAY;
|
|
||||||
} else {
|
|
||||||
types = new String[] { request.type() };
|
|
||||||
}
|
|
||||||
ShardSearchRequest shardSearchLocalRequest = new ShardSearchRequest(shardId, types, request.nowInMillis, request.filteringAlias());
|
|
||||||
SearchContext context = searchService.createSearchContext(shardSearchLocalRequest, SearchService.NO_TIMEOUT);
|
SearchContext context = searchService.createSearchContext(shardSearchLocalRequest, SearchService.NO_TIMEOUT);
|
||||||
Engine.GetResult result = null;
|
Engine.GetResult result = null;
|
||||||
try {
|
try {
|
||||||
|
@ -195,7 +195,7 @@ public class TransportFieldCapabilitiesIndexAction extends HandledTransportActio
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
assert req.nowInMillis() != 0L;
|
assert req.nowInMillis() != 0L;
|
||||||
ShardSearchRequest searchRequest = new ShardSearchRequest(req.shardId(), null, req.nowInMillis(), AliasFilter.EMPTY);
|
ShardSearchRequest searchRequest = new ShardSearchRequest(req.shardId(), req.nowInMillis(), AliasFilter.EMPTY);
|
||||||
searchRequest.source(new SearchSourceBuilder().query(req.indexFilter()));
|
searchRequest.source(new SearchSourceBuilder().query(req.indexFilter()));
|
||||||
return searchService.canMatch(searchRequest).canMatch();
|
return searchService.canMatch(searchRequest).canMatch();
|
||||||
}
|
}
|
||||||
|
@ -192,7 +192,6 @@ public class MultiSearchRequest extends ActionRequest implements CompositeIndice
|
|||||||
CheckedBiConsumer<SearchRequest, XContentParser, IOException> consumer,
|
CheckedBiConsumer<SearchRequest, XContentParser, IOException> consumer,
|
||||||
String[] indices,
|
String[] indices,
|
||||||
IndicesOptions indicesOptions,
|
IndicesOptions indicesOptions,
|
||||||
String[] types,
|
|
||||||
String routing,
|
String routing,
|
||||||
String searchType,
|
String searchType,
|
||||||
Boolean ccsMinimizeRoundtrips,
|
Boolean ccsMinimizeRoundtrips,
|
||||||
@ -225,9 +224,6 @@ public class MultiSearchRequest extends ActionRequest implements CompositeIndice
|
|||||||
if (indicesOptions != null) {
|
if (indicesOptions != null) {
|
||||||
searchRequest.indicesOptions(indicesOptions);
|
searchRequest.indicesOptions(indicesOptions);
|
||||||
}
|
}
|
||||||
if (types != null && types.length > 0) {
|
|
||||||
searchRequest.types(types);
|
|
||||||
}
|
|
||||||
if (routing != null) {
|
if (routing != null) {
|
||||||
searchRequest.routing(routing);
|
searchRequest.routing(routing);
|
||||||
}
|
}
|
||||||
@ -256,8 +252,6 @@ public class MultiSearchRequest extends ActionRequest implements CompositeIndice
|
|||||||
throw new IllegalArgumentException("explicit index in multi search is not allowed");
|
throw new IllegalArgumentException("explicit index in multi search is not allowed");
|
||||||
}
|
}
|
||||||
searchRequest.indices(nodeStringArrayValue(value));
|
searchRequest.indices(nodeStringArrayValue(value));
|
||||||
} else if ("type".equals(entry.getKey()) || "types".equals(entry.getKey())) {
|
|
||||||
searchRequest.types(nodeStringArrayValue(value));
|
|
||||||
} else if ("search_type".equals(entry.getKey()) || "searchType".equals(entry.getKey())) {
|
} else if ("search_type".equals(entry.getKey()) || "searchType".equals(entry.getKey())) {
|
||||||
searchRequest.searchType(nodeStringValue(value, null));
|
searchRequest.searchType(nodeStringValue(value, null));
|
||||||
} else if ("ccs_minimize_roundtrips".equals(entry.getKey()) || "ccsMinimizeRoundtrips".equals(entry.getKey())) {
|
} else if ("ccs_minimize_roundtrips".equals(entry.getKey()) || "ccsMinimizeRoundtrips".equals(entry.getKey())) {
|
||||||
@ -359,9 +353,6 @@ public class MultiSearchRequest extends ActionRequest implements CompositeIndice
|
|||||||
xContentBuilder.field("ignore_unavailable", request.indicesOptions().ignoreUnavailable());
|
xContentBuilder.field("ignore_unavailable", request.indicesOptions().ignoreUnavailable());
|
||||||
xContentBuilder.field("allow_no_indices", request.indicesOptions().allowNoIndices());
|
xContentBuilder.field("allow_no_indices", request.indicesOptions().allowNoIndices());
|
||||||
}
|
}
|
||||||
if (request.types() != null) {
|
|
||||||
xContentBuilder.field("types", request.types());
|
|
||||||
}
|
|
||||||
if (request.searchType() != null) {
|
if (request.searchType() != null) {
|
||||||
xContentBuilder.field("search_type", request.searchType().name().toLowerCase(Locale.ROOT));
|
xContentBuilder.field("search_type", request.searchType().name().toLowerCase(Locale.ROOT));
|
||||||
}
|
}
|
||||||
|
@ -106,8 +106,6 @@ public class SearchRequest extends ActionRequest implements IndicesRequest.Repla
|
|||||||
|
|
||||||
private Integer preFilterShardSize;
|
private Integer preFilterShardSize;
|
||||||
|
|
||||||
private String[] types = Strings.EMPTY_ARRAY;
|
|
||||||
|
|
||||||
private boolean ccsMinimizeRoundtrips = true;
|
private boolean ccsMinimizeRoundtrips = true;
|
||||||
|
|
||||||
public static final IndicesOptions DEFAULT_INDICES_OPTIONS = IndicesOptions.strictExpandOpenAndForbidClosedIgnoreThrottled();
|
public static final IndicesOptions DEFAULT_INDICES_OPTIONS = IndicesOptions.strictExpandOpenAndForbidClosedIgnoreThrottled();
|
||||||
@ -204,7 +202,6 @@ public class SearchRequest extends ActionRequest implements IndicesRequest.Repla
|
|||||||
this.scroll = searchRequest.scroll;
|
this.scroll = searchRequest.scroll;
|
||||||
this.searchType = searchRequest.searchType;
|
this.searchType = searchRequest.searchType;
|
||||||
this.source = searchRequest.source;
|
this.source = searchRequest.source;
|
||||||
this.types = searchRequest.types;
|
|
||||||
this.localClusterAlias = localClusterAlias;
|
this.localClusterAlias = localClusterAlias;
|
||||||
this.absoluteStartMillis = absoluteStartMillis;
|
this.absoluteStartMillis = absoluteStartMillis;
|
||||||
this.finalReduce = finalReduce;
|
this.finalReduce = finalReduce;
|
||||||
@ -225,7 +222,15 @@ public class SearchRequest extends ActionRequest implements IndicesRequest.Repla
|
|||||||
preference = in.readOptionalString();
|
preference = in.readOptionalString();
|
||||||
scroll = in.readOptionalWriteable(Scroll::new);
|
scroll = in.readOptionalWriteable(Scroll::new);
|
||||||
source = in.readOptionalWriteable(SearchSourceBuilder::new);
|
source = in.readOptionalWriteable(SearchSourceBuilder::new);
|
||||||
types = in.readStringArray();
|
if (in.getVersion().before(Version.V_2_0_0)) {
|
||||||
|
// types no longer relevant so ignore
|
||||||
|
String[] types = in.readStringArray();
|
||||||
|
if (types.length > 0) {
|
||||||
|
throw new IllegalStateException(
|
||||||
|
"types are no longer supported in search requests but found [" + Arrays.toString(types) + "]"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
indicesOptions = IndicesOptions.readIndicesOptions(in);
|
indicesOptions = IndicesOptions.readIndicesOptions(in);
|
||||||
requestCache = in.readOptionalBoolean();
|
requestCache = in.readOptionalBoolean();
|
||||||
batchedReduceSize = in.readVInt();
|
batchedReduceSize = in.readVInt();
|
||||||
@ -262,7 +267,10 @@ public class SearchRequest extends ActionRequest implements IndicesRequest.Repla
|
|||||||
out.writeOptionalString(preference);
|
out.writeOptionalString(preference);
|
||||||
out.writeOptionalWriteable(scroll);
|
out.writeOptionalWriteable(scroll);
|
||||||
out.writeOptionalWriteable(source);
|
out.writeOptionalWriteable(source);
|
||||||
out.writeStringArray(types);
|
if (out.getVersion().before(Version.V_2_0_0)) {
|
||||||
|
// types not supported so send an empty array to previous versions
|
||||||
|
out.writeStringArray(Strings.EMPTY_ARRAY);
|
||||||
|
}
|
||||||
indicesOptions.writeIndicesOptions(out);
|
indicesOptions.writeIndicesOptions(out);
|
||||||
out.writeOptionalBoolean(requestCache);
|
out.writeOptionalBoolean(requestCache);
|
||||||
out.writeVInt(batchedReduceSize);
|
out.writeVInt(batchedReduceSize);
|
||||||
@ -408,35 +416,6 @@ public class SearchRequest extends ActionRequest implements IndicesRequest.Repla
|
|||||||
this.ccsMinimizeRoundtrips = ccsMinimizeRoundtrips;
|
this.ccsMinimizeRoundtrips = ccsMinimizeRoundtrips;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* The document types to execute the search against. Defaults to be executed against
|
|
||||||
* all types.
|
|
||||||
*
|
|
||||||
* @deprecated Types are in the process of being removed. Instead of using a type, prefer to
|
|
||||||
* filter on a field on the document.
|
|
||||||
*/
|
|
||||||
@Deprecated
|
|
||||||
public String[] types() {
|
|
||||||
return types;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* The document types to execute the search against. Defaults to be executed against
|
|
||||||
* all types.
|
|
||||||
*
|
|
||||||
* @deprecated Types are in the process of being removed. Instead of using a type, prefer to
|
|
||||||
* filter on a field on the document.
|
|
||||||
*/
|
|
||||||
@Deprecated
|
|
||||||
public SearchRequest types(String... types) {
|
|
||||||
Objects.requireNonNull(types, "types must not be null");
|
|
||||||
for (String type : types) {
|
|
||||||
Objects.requireNonNull(type, "type must not be null");
|
|
||||||
}
|
|
||||||
this.types = types;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A comma separated list of routing values to control the shards the search will be executed on.
|
* A comma separated list of routing values to control the shards the search will be executed on.
|
||||||
*/
|
*/
|
||||||
@ -702,9 +681,6 @@ public class SearchRequest extends ActionRequest implements IndicesRequest.Repla
|
|||||||
sb.append("indices[");
|
sb.append("indices[");
|
||||||
Strings.arrayToDelimitedString(indices, ",", sb);
|
Strings.arrayToDelimitedString(indices, ",", sb);
|
||||||
sb.append("], ");
|
sb.append("], ");
|
||||||
sb.append("types[");
|
|
||||||
Strings.arrayToDelimitedString(types, ",", sb);
|
|
||||||
sb.append("], ");
|
|
||||||
sb.append("search_type[").append(searchType).append("], ");
|
sb.append("search_type[").append(searchType).append("], ");
|
||||||
if (scroll != null) {
|
if (scroll != null) {
|
||||||
sb.append("scroll[").append(scroll.keepAlive()).append("], ");
|
sb.append("scroll[").append(scroll.keepAlive()).append("], ");
|
||||||
@ -733,7 +709,6 @@ public class SearchRequest extends ActionRequest implements IndicesRequest.Repla
|
|||||||
&& Objects.equals(source, that.source)
|
&& Objects.equals(source, that.source)
|
||||||
&& Objects.equals(requestCache, that.requestCache)
|
&& Objects.equals(requestCache, that.requestCache)
|
||||||
&& Objects.equals(scroll, that.scroll)
|
&& Objects.equals(scroll, that.scroll)
|
||||||
&& Arrays.equals(types, that.types)
|
|
||||||
&& Objects.equals(batchedReduceSize, that.batchedReduceSize)
|
&& Objects.equals(batchedReduceSize, that.batchedReduceSize)
|
||||||
&& Objects.equals(maxConcurrentShardRequests, that.maxConcurrentShardRequests)
|
&& Objects.equals(maxConcurrentShardRequests, that.maxConcurrentShardRequests)
|
||||||
&& Objects.equals(preFilterShardSize, that.preFilterShardSize)
|
&& Objects.equals(preFilterShardSize, that.preFilterShardSize)
|
||||||
@ -755,7 +730,6 @@ public class SearchRequest extends ActionRequest implements IndicesRequest.Repla
|
|||||||
source,
|
source,
|
||||||
requestCache,
|
requestCache,
|
||||||
scroll,
|
scroll,
|
||||||
Arrays.hashCode(types),
|
|
||||||
indicesOptions,
|
indicesOptions,
|
||||||
batchedReduceSize,
|
batchedReduceSize,
|
||||||
maxConcurrentShardRequests,
|
maxConcurrentShardRequests,
|
||||||
@ -777,8 +751,6 @@ public class SearchRequest extends ActionRequest implements IndicesRequest.Repla
|
|||||||
+ Arrays.toString(indices)
|
+ Arrays.toString(indices)
|
||||||
+ ", indicesOptions="
|
+ ", indicesOptions="
|
||||||
+ indicesOptions
|
+ indicesOptions
|
||||||
+ ", types="
|
|
||||||
+ Arrays.toString(types)
|
|
||||||
+ ", routing='"
|
+ ", routing='"
|
||||||
+ routing
|
+ routing
|
||||||
+ '\''
|
+ '\''
|
||||||
|
@ -72,17 +72,6 @@ public class SearchRequestBuilder extends ActionRequestBuilder<SearchRequest, Se
|
|||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* The document types to execute the search against. Defaults to be executed against
|
|
||||||
* all types.
|
|
||||||
* @deprecated Types are going away, prefer filtering on a field.
|
|
||||||
*/
|
|
||||||
@Deprecated
|
|
||||||
public SearchRequestBuilder setTypes(String... types) {
|
|
||||||
request.types(types);
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The search type to execute, defaults to {@link SearchType#DEFAULT}.
|
* The search type to execute, defaults to {@link SearchType#DEFAULT}.
|
||||||
*/
|
*/
|
||||||
|
@ -624,8 +624,7 @@ public class IndexService extends AbstractIndexComponent implements IndicesClust
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates a new QueryShardContext. The context has not types set yet, if types are required set them via
|
* Creates a new QueryShardContext.
|
||||||
* {@link QueryShardContext#setTypes(String...)}.
|
|
||||||
*
|
*
|
||||||
* Passing a {@code null} {@link IndexSearcher} will return a valid context, however it won't be able to make
|
* Passing a {@code null} {@link IndexSearcher} will return a valid context, however it won't be able to make
|
||||||
* {@link IndexReader}-specific optimizations, such as rewriting containing range queries.
|
* {@link IndexReader}-specific optimizations, such as rewriting containing range queries.
|
||||||
|
@ -47,7 +47,6 @@ import org.opensearch.search.internal.SearchContext;
|
|||||||
import org.opensearch.tasks.Task;
|
import org.opensearch.tasks.Task;
|
||||||
|
|
||||||
import java.nio.charset.Charset;
|
import java.nio.charset.Charset;
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
@ -220,8 +219,6 @@ public final class SearchSlowLog implements SearchOperationListener {
|
|||||||
} else {
|
} else {
|
||||||
messageFields.put("total_hits", "-1");
|
messageFields.put("total_hits", "-1");
|
||||||
}
|
}
|
||||||
String[] types = context.getQueryShardContext().getTypes();
|
|
||||||
messageFields.put("types", escapeJson(asJsonArray(types != null ? Arrays.stream(types) : Stream.empty())));
|
|
||||||
messageFields.put(
|
messageFields.put(
|
||||||
"stats",
|
"stats",
|
||||||
escapeJson(asJsonArray(context.groupStats() != null ? context.groupStats().stream() : Stream.empty()))
|
escapeJson(asJsonArray(context.groupStats() != null ? context.groupStats().stream() : Stream.empty()))
|
||||||
@ -259,13 +256,6 @@ public final class SearchSlowLog implements SearchOperationListener {
|
|||||||
sb.append("-1");
|
sb.append("-1");
|
||||||
}
|
}
|
||||||
sb.append("], ");
|
sb.append("], ");
|
||||||
if (context.getQueryShardContext().getTypes() == null) {
|
|
||||||
sb.append("types[], ");
|
|
||||||
} else {
|
|
||||||
sb.append("types[");
|
|
||||||
Strings.arrayToDelimitedString(context.getQueryShardContext().getTypes(), ",", sb);
|
|
||||||
sb.append("], ");
|
|
||||||
}
|
|
||||||
if (context.groupStats() == null) {
|
if (context.groupStats() == null) {
|
||||||
sb.append("stats[], ");
|
sb.append("stats[], ");
|
||||||
} else {
|
} else {
|
||||||
|
@ -32,9 +32,8 @@
|
|||||||
|
|
||||||
package org.opensearch.index.query;
|
package org.opensearch.index.query;
|
||||||
|
|
||||||
import org.apache.lucene.search.MatchNoDocsQuery;
|
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
import org.opensearch.cluster.metadata.Metadata;
|
import org.opensearch.Version;
|
||||||
import org.opensearch.common.ParseField;
|
import org.opensearch.common.ParseField;
|
||||||
import org.opensearch.common.ParsingException;
|
import org.opensearch.common.ParsingException;
|
||||||
import org.opensearch.common.Strings;
|
import org.opensearch.common.Strings;
|
||||||
@ -44,14 +43,12 @@ import org.opensearch.common.logging.DeprecationLogger;
|
|||||||
import org.opensearch.common.xcontent.ObjectParser;
|
import org.opensearch.common.xcontent.ObjectParser;
|
||||||
import org.opensearch.common.xcontent.XContentBuilder;
|
import org.opensearch.common.xcontent.XContentBuilder;
|
||||||
import org.opensearch.common.xcontent.XContentParser;
|
import org.opensearch.common.xcontent.XContentParser;
|
||||||
import org.opensearch.index.mapper.DocumentMapper;
|
|
||||||
import org.opensearch.index.mapper.IdFieldMapper;
|
import org.opensearch.index.mapper.IdFieldMapper;
|
||||||
import org.opensearch.index.mapper.MappedFieldType;
|
import org.opensearch.index.mapper.MappedFieldType;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collection;
|
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
@ -72,8 +69,6 @@ public class IdsQueryBuilder extends AbstractQueryBuilder<IdsQueryBuilder> {
|
|||||||
|
|
||||||
private final Set<String> ids = new HashSet<>();
|
private final Set<String> ids = new HashSet<>();
|
||||||
|
|
||||||
private String[] types = Strings.EMPTY_ARRAY;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates a new IdsQueryBuilder with no types specified upfront
|
* Creates a new IdsQueryBuilder with no types specified upfront
|
||||||
*/
|
*/
|
||||||
@ -86,38 +81,23 @@ public class IdsQueryBuilder extends AbstractQueryBuilder<IdsQueryBuilder> {
|
|||||||
*/
|
*/
|
||||||
public IdsQueryBuilder(StreamInput in) throws IOException {
|
public IdsQueryBuilder(StreamInput in) throws IOException {
|
||||||
super(in);
|
super(in);
|
||||||
types = in.readStringArray();
|
if (in.getVersion().before(Version.V_2_0_0)) {
|
||||||
|
// types no longer relevant so ignore
|
||||||
|
String[] types = in.readStringArray();
|
||||||
|
if (types.length > 0) {
|
||||||
|
throw new IllegalStateException("types are no longer supported in ids query but found [" + Arrays.toString(types) + "]");
|
||||||
|
}
|
||||||
|
}
|
||||||
Collections.addAll(ids, in.readStringArray());
|
Collections.addAll(ids, in.readStringArray());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void doWriteTo(StreamOutput out) throws IOException {
|
protected void doWriteTo(StreamOutput out) throws IOException {
|
||||||
out.writeStringArray(types);
|
if (out.getVersion().before(Version.V_2_0_0)) {
|
||||||
out.writeStringArray(ids.toArray(new String[ids.size()]));
|
// types not supported so send an empty array to previous versions
|
||||||
}
|
out.writeStringArray(Strings.EMPTY_ARRAY);
|
||||||
|
|
||||||
/**
|
|
||||||
* Add types to query
|
|
||||||
*
|
|
||||||
* @deprecated Types are in the process of being removed, prefer to filter on a field instead.
|
|
||||||
*/
|
|
||||||
@Deprecated
|
|
||||||
public IdsQueryBuilder types(String... types) {
|
|
||||||
if (types == null) {
|
|
||||||
throw new IllegalArgumentException("[" + NAME + "] types cannot be null");
|
|
||||||
}
|
}
|
||||||
this.types = types;
|
out.writeStringArray(ids.toArray(new String[ids.size()]));
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns the types used in this query
|
|
||||||
*
|
|
||||||
* @deprecated Types are in the process of being removed, prefer to filter on a field instead.
|
|
||||||
*/
|
|
||||||
@Deprecated
|
|
||||||
public String[] types() {
|
|
||||||
return this.types;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -141,9 +121,6 @@ public class IdsQueryBuilder extends AbstractQueryBuilder<IdsQueryBuilder> {
|
|||||||
@Override
|
@Override
|
||||||
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
|
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
builder.startObject(NAME);
|
builder.startObject(NAME);
|
||||||
if (types.length > 0) {
|
|
||||||
builder.array(TYPE_FIELD.getPreferredName(), types);
|
|
||||||
}
|
|
||||||
builder.startArray(VALUES_FIELD.getPreferredName());
|
builder.startArray(VALUES_FIELD.getPreferredName());
|
||||||
for (String value : ids) {
|
for (String value : ids) {
|
||||||
builder.value(value);
|
builder.value(value);
|
||||||
@ -156,18 +133,13 @@ public class IdsQueryBuilder extends AbstractQueryBuilder<IdsQueryBuilder> {
|
|||||||
private static final ObjectParser<IdsQueryBuilder, Void> PARSER = new ObjectParser<>(NAME, IdsQueryBuilder::new);
|
private static final ObjectParser<IdsQueryBuilder, Void> PARSER = new ObjectParser<>(NAME, IdsQueryBuilder::new);
|
||||||
|
|
||||||
static {
|
static {
|
||||||
PARSER.declareStringArray(fromList(String.class, IdsQueryBuilder::types), IdsQueryBuilder.TYPE_FIELD);
|
|
||||||
PARSER.declareStringArray(fromList(String.class, IdsQueryBuilder::addIds), IdsQueryBuilder.VALUES_FIELD);
|
PARSER.declareStringArray(fromList(String.class, IdsQueryBuilder::addIds), IdsQueryBuilder.VALUES_FIELD);
|
||||||
declareStandardFields(PARSER);
|
declareStandardFields(PARSER);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static IdsQueryBuilder fromXContent(XContentParser parser) {
|
public static IdsQueryBuilder fromXContent(XContentParser parser) {
|
||||||
try {
|
try {
|
||||||
IdsQueryBuilder builder = PARSER.apply(parser, null);
|
return PARSER.apply(parser, null);
|
||||||
if (builder.types().length > 0) {
|
|
||||||
deprecationLogger.deprecate("ids_query_with_types", TYPES_DEPRECATION_MESSAGE);
|
|
||||||
}
|
|
||||||
return builder;
|
|
||||||
} catch (IllegalArgumentException e) {
|
} catch (IllegalArgumentException e) {
|
||||||
throw new ParsingException(parser.getTokenLocation(), e.getMessage(), e);
|
throw new ParsingException(parser.getTokenLocation(), e.getMessage(), e);
|
||||||
}
|
}
|
||||||
@ -193,34 +165,20 @@ public class IdsQueryBuilder extends AbstractQueryBuilder<IdsQueryBuilder> {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected Query doToQuery(QueryShardContext context) throws IOException {
|
protected Query doToQuery(QueryShardContext context) throws IOException {
|
||||||
MappedFieldType idField = context.fieldMapper(IdFieldMapper.NAME);
|
MappedFieldType idField = context.getFieldType(IdFieldMapper.NAME);
|
||||||
if (idField == null || ids.isEmpty()) {
|
if (idField == null || ids.isEmpty()) {
|
||||||
throw new IllegalStateException("Rewrite first");
|
throw new IllegalStateException("Rewrite first");
|
||||||
}
|
}
|
||||||
final DocumentMapper mapper = context.getMapperService().documentMapper();
|
return idField.termsQuery(new ArrayList<>(ids), context);
|
||||||
Collection<String> typesForQuery;
|
|
||||||
if (types.length == 0) {
|
|
||||||
typesForQuery = context.queryTypes();
|
|
||||||
} else if (types.length == 1 && Metadata.ALL.equals(types[0])) {
|
|
||||||
typesForQuery = Collections.singleton(mapper.type());
|
|
||||||
} else {
|
|
||||||
typesForQuery = new HashSet<>(Arrays.asList(types));
|
|
||||||
}
|
|
||||||
|
|
||||||
if (typesForQuery.contains(mapper.type())) {
|
|
||||||
return idField.termsQuery(new ArrayList<>(ids), context);
|
|
||||||
} else {
|
|
||||||
return new MatchNoDocsQuery("Type mismatch");
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected int doHashCode() {
|
protected int doHashCode() {
|
||||||
return Objects.hash(ids, Arrays.hashCode(types));
|
return Objects.hash(ids);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected boolean doEquals(IdsQueryBuilder other) {
|
protected boolean doEquals(IdsQueryBuilder other) {
|
||||||
return Objects.equals(ids, other.ids) && Arrays.equals(types, other.types);
|
return Objects.equals(ids, other.ids);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -153,18 +153,6 @@ public final class QueryBuilders {
|
|||||||
return new IdsQueryBuilder();
|
return new IdsQueryBuilder();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Constructs a query that will match only specific ids within types.
|
|
||||||
*
|
|
||||||
* @param types The mapping/doc type
|
|
||||||
*
|
|
||||||
* @deprecated Types are in the process of being removed, use {@link #idsQuery()} instead.
|
|
||||||
*/
|
|
||||||
@Deprecated
|
|
||||||
public static IdsQueryBuilder idsQuery(String... types) {
|
|
||||||
return new IdsQueryBuilder().types(types);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A Query that matches documents containing a term.
|
* A Query that matches documents containing a term.
|
||||||
*
|
*
|
||||||
@ -622,15 +610,6 @@ public final class QueryBuilders {
|
|||||||
return new WrapperQueryBuilder(source);
|
return new WrapperQueryBuilder(source);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* A filter based on doc/mapping type.
|
|
||||||
* @deprecated Types are going away, prefer filtering on a field.
|
|
||||||
*/
|
|
||||||
@Deprecated
|
|
||||||
public static TypeQueryBuilder typeQuery(String type) {
|
|
||||||
return new TypeQueryBuilder(type);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A terms query that can extract the terms from another doc in an index.
|
* A terms query that can extract the terms from another doc in an index.
|
||||||
*/
|
*/
|
||||||
|
@ -44,7 +44,6 @@ import org.opensearch.action.ActionListener;
|
|||||||
import org.opensearch.client.Client;
|
import org.opensearch.client.Client;
|
||||||
import org.opensearch.common.CheckedFunction;
|
import org.opensearch.common.CheckedFunction;
|
||||||
import org.opensearch.common.ParsingException;
|
import org.opensearch.common.ParsingException;
|
||||||
import org.opensearch.common.Strings;
|
|
||||||
import org.opensearch.common.TriFunction;
|
import org.opensearch.common.TriFunction;
|
||||||
import org.opensearch.common.io.stream.NamedWriteableRegistry;
|
import org.opensearch.common.io.stream.NamedWriteableRegistry;
|
||||||
import org.opensearch.common.lucene.search.Queries;
|
import org.opensearch.common.lucene.search.Queries;
|
||||||
@ -76,9 +75,6 @@ import org.opensearch.search.lookup.SearchLookup;
|
|||||||
import org.opensearch.transport.RemoteClusterAware;
|
import org.opensearch.transport.RemoteClusterAware;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.Collection;
|
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
@ -105,7 +101,6 @@ public class QueryShardContext extends QueryRewriteContext {
|
|||||||
private final TriFunction<MappedFieldType, String, Supplier<SearchLookup>, IndexFieldData<?>> indexFieldDataService;
|
private final TriFunction<MappedFieldType, String, Supplier<SearchLookup>, IndexFieldData<?>> indexFieldDataService;
|
||||||
private final int shardId;
|
private final int shardId;
|
||||||
private final IndexSearcher searcher;
|
private final IndexSearcher searcher;
|
||||||
private String[] types = Strings.EMPTY_ARRAY;
|
|
||||||
private boolean cacheable = true;
|
private boolean cacheable = true;
|
||||||
private final SetOnce<Boolean> frozen = new SetOnce<>();
|
private final SetOnce<Boolean> frozen = new SetOnce<>();
|
||||||
|
|
||||||
@ -113,14 +108,6 @@ public class QueryShardContext extends QueryRewriteContext {
|
|||||||
private final Predicate<String> indexNameMatcher;
|
private final Predicate<String> indexNameMatcher;
|
||||||
private final BooleanSupplier allowExpensiveQueries;
|
private final BooleanSupplier allowExpensiveQueries;
|
||||||
|
|
||||||
public void setTypes(String... types) {
|
|
||||||
this.types = types;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String[] getTypes() {
|
|
||||||
return types;
|
|
||||||
}
|
|
||||||
|
|
||||||
private final Map<String, Query> namedQueries = new HashMap<>();
|
private final Map<String, Query> namedQueries = new HashMap<>();
|
||||||
private boolean allowUnmappedFields;
|
private boolean allowUnmappedFields;
|
||||||
private boolean mapUnmappedFieldAsString;
|
private boolean mapUnmappedFieldAsString;
|
||||||
@ -377,18 +364,6 @@ public class QueryShardContext extends QueryRewriteContext {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns the narrowed down explicit types, or, if not set, all types.
|
|
||||||
*/
|
|
||||||
public Collection<String> queryTypes() {
|
|
||||||
String[] types = getTypes();
|
|
||||||
if (types == null || types.length == 0 || (types.length == 1 && types[0].equals("_all"))) {
|
|
||||||
DocumentMapper mapper = getMapperService().documentMapper();
|
|
||||||
return mapper == null ? Collections.emptyList() : Collections.singleton(mapper.type());
|
|
||||||
}
|
|
||||||
return Arrays.asList(types);
|
|
||||||
}
|
|
||||||
|
|
||||||
private SearchLookup lookup = null;
|
private SearchLookup lookup = null;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -398,8 +373,7 @@ public class QueryShardContext extends QueryRewriteContext {
|
|||||||
if (this.lookup == null) {
|
if (this.lookup == null) {
|
||||||
this.lookup = new SearchLookup(
|
this.lookup = new SearchLookup(
|
||||||
getMapperService(),
|
getMapperService(),
|
||||||
(fieldType, searchLookup) -> indexFieldDataService.apply(fieldType, fullyQualifiedIndex.getName(), searchLookup),
|
(fieldType, searchLookup) -> indexFieldDataService.apply(fieldType, fullyQualifiedIndex.getName(), searchLookup)
|
||||||
types
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
return this.lookup;
|
return this.lookup;
|
||||||
@ -415,8 +389,7 @@ public class QueryShardContext extends QueryRewriteContext {
|
|||||||
*/
|
*/
|
||||||
return new SearchLookup(
|
return new SearchLookup(
|
||||||
getMapperService(),
|
getMapperService(),
|
||||||
(fieldType, searchLookup) -> indexFieldDataService.apply(fieldType, fullyQualifiedIndex.getName(), searchLookup),
|
(fieldType, searchLookup) -> indexFieldDataService.apply(fieldType, fullyQualifiedIndex.getName(), searchLookup)
|
||||||
types
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -503,9 +503,6 @@ public abstract class AbstractBulkByScrollRequest<Self extends AbstractBulkByScr
|
|||||||
} else {
|
} else {
|
||||||
b.append("[all indices]");
|
b.append("[all indices]");
|
||||||
}
|
}
|
||||||
if (searchRequest.types() != null && searchRequest.types().length != 0) {
|
|
||||||
b.append(Arrays.toString(searchRequest.types()));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -91,9 +91,8 @@ public class ClientScrollableHitSource extends ScrollableHitSource {
|
|||||||
public void doStart(RejectAwareActionListener<Response> searchListener) {
|
public void doStart(RejectAwareActionListener<Response> searchListener) {
|
||||||
if (logger.isDebugEnabled()) {
|
if (logger.isDebugEnabled()) {
|
||||||
logger.debug(
|
logger.debug(
|
||||||
"executing initial scroll against {}{}",
|
"executing initial scroll against {}",
|
||||||
isEmpty(firstSearchRequest.indices()) ? "all indices" : firstSearchRequest.indices(),
|
isEmpty(firstSearchRequest.indices()) ? "all indices" : firstSearchRequest.indices()
|
||||||
isEmpty(firstSearchRequest.types()) ? "" : firstSearchRequest.types()
|
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
client.search(firstSearchRequest, wrapListener(searchListener));
|
client.search(firstSearchRequest, wrapListener(searchListener));
|
||||||
|
@ -102,19 +102,6 @@ public class DeleteByQueryRequest extends AbstractBulkByScrollRequest<DeleteByQu
|
|||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Set the document types for the delete
|
|
||||||
* @deprecated Types are in the process of being removed. Instead of
|
|
||||||
* using a type, prefer to filter on a field of the document.
|
|
||||||
*/
|
|
||||||
@Deprecated
|
|
||||||
public DeleteByQueryRequest setDocTypes(String... types) {
|
|
||||||
if (types != null) {
|
|
||||||
getSearchRequest().types(types);
|
|
||||||
}
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Set routing limiting the process to the shards that match that routing value
|
* Set routing limiting the process to the shards that match that routing value
|
||||||
*/
|
*/
|
||||||
@ -155,21 +142,6 @@ public class DeleteByQueryRequest extends AbstractBulkByScrollRequest<DeleteByQu
|
|||||||
return getSearchRequest().routing();
|
return getSearchRequest().routing();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Gets the document types on which this request would be executed. Returns an empty array if all
|
|
||||||
* types are to be processed.
|
|
||||||
* @deprecated Types are in the process of being removed. Instead of
|
|
||||||
* using a type, prefer to filter on a field of the document.
|
|
||||||
*/
|
|
||||||
@Deprecated
|
|
||||||
public String[] getDocTypes() {
|
|
||||||
if (getSearchRequest().types() != null) {
|
|
||||||
return getSearchRequest().types();
|
|
||||||
} else {
|
|
||||||
return new String[0];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected DeleteByQueryRequest self() {
|
protected DeleteByQueryRequest self() {
|
||||||
return this;
|
return this;
|
||||||
@ -223,29 +195,6 @@ public class DeleteByQueryRequest extends AbstractBulkByScrollRequest<DeleteByQu
|
|||||||
return getSearchRequest().indicesOptions();
|
return getSearchRequest().indicesOptions();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Gets the document types on which this request would be executed.
|
|
||||||
* @deprecated Types are in the process of being removed. Instead of
|
|
||||||
* using a type, prefer to filter on a field of the document.
|
|
||||||
*/
|
|
||||||
@Deprecated
|
|
||||||
public String[] types() {
|
|
||||||
assert getSearchRequest() != null;
|
|
||||||
return getSearchRequest().types();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Set the document types for the delete
|
|
||||||
* @deprecated Types are in the process of being removed. Instead of
|
|
||||||
* using a type, prefer to filter on a field of the document.
|
|
||||||
*/
|
|
||||||
@Deprecated
|
|
||||||
public DeleteByQueryRequest types(String... types) {
|
|
||||||
assert getSearchRequest() != null;
|
|
||||||
getSearchRequest().types(types);
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
|
||||||
builder.startObject();
|
builder.startObject();
|
||||||
|
@ -167,16 +167,6 @@ public class ReindexRequest extends AbstractBulkIndexByScrollRequest<ReindexRequ
|
|||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Set the document types which need to be copied from the source indices
|
|
||||||
*/
|
|
||||||
public ReindexRequest setSourceDocTypes(String... docTypes) {
|
|
||||||
if (docTypes != null) {
|
|
||||||
this.getSearchRequest().types(docTypes);
|
|
||||||
}
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Sets the scroll size for setting how many documents are to be processed in one batch during reindex
|
* Sets the scroll size for setting how many documents are to be processed in one batch during reindex
|
||||||
*/
|
*/
|
||||||
@ -330,10 +320,6 @@ public class ReindexRequest extends AbstractBulkIndexByScrollRequest<ReindexRequ
|
|||||||
builder.rawField("query", remoteInfo.getQuery().streamInput(), RemoteInfo.QUERY_CONTENT_TYPE.type());
|
builder.rawField("query", remoteInfo.getQuery().streamInput(), RemoteInfo.QUERY_CONTENT_TYPE.type());
|
||||||
}
|
}
|
||||||
builder.array("index", getSearchRequest().indices());
|
builder.array("index", getSearchRequest().indices());
|
||||||
String[] types = getSearchRequest().types();
|
|
||||||
if (types.length > 0) {
|
|
||||||
builder.array("type", types);
|
|
||||||
}
|
|
||||||
getSearchRequest().source().innerToXContent(builder, params);
|
getSearchRequest().source().innerToXContent(builder, params);
|
||||||
builder.endObject();
|
builder.endObject();
|
||||||
}
|
}
|
||||||
@ -383,11 +369,6 @@ public class ReindexRequest extends AbstractBulkIndexByScrollRequest<ReindexRequ
|
|||||||
if (indices != null) {
|
if (indices != null) {
|
||||||
request.getSearchRequest().indices(indices);
|
request.getSearchRequest().indices(indices);
|
||||||
}
|
}
|
||||||
String[] types = extractStringArray(source, "type");
|
|
||||||
if (types != null) {
|
|
||||||
deprecationLogger.deprecate("reindex_with_types", TYPES_DEPRECATION_MESSAGE);
|
|
||||||
request.getSearchRequest().types(types);
|
|
||||||
}
|
|
||||||
request.setRemoteInfo(buildRemoteInfo(source));
|
request.setRemoteInfo(buildRemoteInfo(source));
|
||||||
XContentBuilder builder = XContentFactory.contentBuilder(parser.contentType());
|
XContentBuilder builder = XContentFactory.contentBuilder(parser.contentType());
|
||||||
builder.map(source);
|
builder.map(source);
|
||||||
|
@ -98,19 +98,6 @@ public class UpdateByQueryRequest extends AbstractBulkIndexByScrollRequest<Updat
|
|||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Set the document types for the update
|
|
||||||
* @deprecated Types are in the process of being removed. Instead of
|
|
||||||
* using a type, prefer to filter on a field of the document.
|
|
||||||
*/
|
|
||||||
@Deprecated
|
|
||||||
public UpdateByQueryRequest setDocTypes(String... types) {
|
|
||||||
if (types != null) {
|
|
||||||
getSearchRequest().types(types);
|
|
||||||
}
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Set routing limiting the process to the shards that match that routing value
|
* Set routing limiting the process to the shards that match that routing value
|
||||||
*/
|
*/
|
||||||
@ -151,21 +138,6 @@ public class UpdateByQueryRequest extends AbstractBulkIndexByScrollRequest<Updat
|
|||||||
return getSearchRequest().routing();
|
return getSearchRequest().routing();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Gets the document types on which this request would be executed. Returns an empty array if all
|
|
||||||
* types are to be processed.
|
|
||||||
* @deprecated Types are in the process of being removed. Instead of
|
|
||||||
* using a type, prefer to filter on a field of the document.
|
|
||||||
*/
|
|
||||||
@Deprecated
|
|
||||||
public String[] getDocTypes() {
|
|
||||||
if (getSearchRequest().types() != null) {
|
|
||||||
return getSearchRequest().types();
|
|
||||||
} else {
|
|
||||||
return new String[0];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Ingest pipeline to set on index requests made by this action.
|
* Ingest pipeline to set on index requests made by this action.
|
||||||
*/
|
*/
|
||||||
|
@ -37,7 +37,6 @@ import org.opensearch.action.search.SearchResponse;
|
|||||||
import org.opensearch.action.support.IndicesOptions;
|
import org.opensearch.action.support.IndicesOptions;
|
||||||
import org.opensearch.client.node.NodeClient;
|
import org.opensearch.client.node.NodeClient;
|
||||||
import org.opensearch.common.Strings;
|
import org.opensearch.common.Strings;
|
||||||
import org.opensearch.common.logging.DeprecationLogger;
|
|
||||||
import org.opensearch.common.xcontent.XContentBuilder;
|
import org.opensearch.common.xcontent.XContentBuilder;
|
||||||
import org.opensearch.index.query.QueryBuilder;
|
import org.opensearch.index.query.QueryBuilder;
|
||||||
import org.opensearch.rest.BaseRestHandler;
|
import org.opensearch.rest.BaseRestHandler;
|
||||||
@ -59,8 +58,6 @@ import static org.opensearch.rest.action.RestActions.buildBroadcastShardsHeader;
|
|||||||
import static org.opensearch.search.internal.SearchContext.DEFAULT_TERMINATE_AFTER;
|
import static org.opensearch.search.internal.SearchContext.DEFAULT_TERMINATE_AFTER;
|
||||||
|
|
||||||
public class RestCountAction extends BaseRestHandler {
|
public class RestCountAction extends BaseRestHandler {
|
||||||
private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestCountAction.class);
|
|
||||||
static final String TYPES_DEPRECATION_MESSAGE = "[types removal]" + " Specifying types in count requests is deprecated.";
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<Route> routes() {
|
public List<Route> routes() {
|
||||||
@ -104,11 +101,6 @@ public class RestCountAction extends BaseRestHandler {
|
|||||||
searchSourceBuilder.minScore(minScore);
|
searchSourceBuilder.minScore(minScore);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (request.hasParam("type")) {
|
|
||||||
deprecationLogger.deprecate("count_with_types", TYPES_DEPRECATION_MESSAGE);
|
|
||||||
countRequest.types(Strings.splitStringByCommaToArray(request.param("type")));
|
|
||||||
}
|
|
||||||
|
|
||||||
countRequest.preference(request.param("preference"));
|
countRequest.preference(request.param("preference"));
|
||||||
|
|
||||||
final int terminateAfter = request.paramAsInt("terminate_after", DEFAULT_TERMINATE_AFTER);
|
final int terminateAfter = request.paramAsInt("terminate_after", DEFAULT_TERMINATE_AFTER);
|
||||||
|
@ -68,7 +68,6 @@ import static org.opensearch.rest.RestRequest.Method.POST;
|
|||||||
|
|
||||||
public class RestMultiSearchAction extends BaseRestHandler {
|
public class RestMultiSearchAction extends BaseRestHandler {
|
||||||
private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestMultiSearchAction.class);
|
private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(RestMultiSearchAction.class);
|
||||||
static final String TYPES_DEPRECATION_MESSAGE = "[types removal]" + " Specifying types in multi search requests is deprecated.";
|
|
||||||
|
|
||||||
private static final Set<String> RESPONSE_PARAMS;
|
private static final Set<String> RESPONSE_PARAMS;
|
||||||
|
|
||||||
@ -108,13 +107,6 @@ public class RestMultiSearchAction extends BaseRestHandler {
|
|||||||
@Override
|
@Override
|
||||||
public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
|
public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException {
|
||||||
final MultiSearchRequest multiSearchRequest = parseRequest(request, client.getNamedWriteableRegistry(), allowExplicitIndex);
|
final MultiSearchRequest multiSearchRequest = parseRequest(request, client.getNamedWriteableRegistry(), allowExplicitIndex);
|
||||||
// Emit a single deprecation message if any search request contains types.
|
|
||||||
for (SearchRequest searchRequest : multiSearchRequest.requests()) {
|
|
||||||
if (searchRequest.types().length > 0) {
|
|
||||||
deprecationLogger.deprecate("msearch_with_types", TYPES_DEPRECATION_MESSAGE);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return channel -> {
|
return channel -> {
|
||||||
final RestCancellableNodeClient cancellableClient = new RestCancellableNodeClient(client, request.getHttpChannel());
|
final RestCancellableNodeClient cancellableClient = new RestCancellableNodeClient(client, request.getHttpChannel());
|
||||||
cancellableClient.execute(MultiSearchAction.INSTANCE, multiSearchRequest, new RestToXContentListener<>(channel));
|
cancellableClient.execute(MultiSearchAction.INSTANCE, multiSearchRequest, new RestToXContentListener<>(channel));
|
||||||
@ -192,7 +184,6 @@ public class RestMultiSearchAction extends BaseRestHandler {
|
|||||||
) throws IOException {
|
) throws IOException {
|
||||||
|
|
||||||
String[] indices = Strings.splitStringByCommaToArray(request.param("index"));
|
String[] indices = Strings.splitStringByCommaToArray(request.param("index"));
|
||||||
String[] types = Strings.splitStringByCommaToArray(request.param("type"));
|
|
||||||
String searchType = request.param("search_type");
|
String searchType = request.param("search_type");
|
||||||
boolean ccsMinimizeRoundtrips = request.paramAsBoolean("ccs_minimize_roundtrips", true);
|
boolean ccsMinimizeRoundtrips = request.paramAsBoolean("ccs_minimize_roundtrips", true);
|
||||||
String routing = request.param("routing");
|
String routing = request.param("routing");
|
||||||
@ -206,7 +197,6 @@ public class RestMultiSearchAction extends BaseRestHandler {
|
|||||||
consumer,
|
consumer,
|
||||||
indices,
|
indices,
|
||||||
indicesOptions,
|
indicesOptions,
|
||||||
types,
|
|
||||||
routing,
|
routing,
|
||||||
searchType,
|
searchType,
|
||||||
ccsMinimizeRoundtrips,
|
ccsMinimizeRoundtrips,
|
||||||
|
@ -199,10 +199,6 @@ public class RestSearchAction extends BaseRestHandler {
|
|||||||
searchRequest.scroll(new Scroll(parseTimeValue(scroll, null, "scroll")));
|
searchRequest.scroll(new Scroll(parseTimeValue(scroll, null, "scroll")));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (request.hasParam("type")) {
|
|
||||||
deprecationLogger.deprecate("search_with_types", TYPES_DEPRECATION_MESSAGE);
|
|
||||||
searchRequest.types(Strings.splitStringByCommaToArray(request.param("type")));
|
|
||||||
}
|
|
||||||
searchRequest.routing(request.param("routing"));
|
searchRequest.routing(request.param("routing"));
|
||||||
searchRequest.preference(request.param("preference"));
|
searchRequest.preference(request.param("preference"));
|
||||||
searchRequest.indicesOptions(IndicesOptions.fromRequest(request, searchRequest.indicesOptions()));
|
searchRequest.indicesOptions(IndicesOptions.fromRequest(request, searchRequest.indicesOptions()));
|
||||||
|
@ -55,7 +55,6 @@ import org.opensearch.index.engine.Engine;
|
|||||||
import org.opensearch.index.mapper.MappedFieldType;
|
import org.opensearch.index.mapper.MappedFieldType;
|
||||||
import org.opensearch.index.mapper.MapperService;
|
import org.opensearch.index.mapper.MapperService;
|
||||||
import org.opensearch.index.mapper.ObjectMapper;
|
import org.opensearch.index.mapper.ObjectMapper;
|
||||||
import org.opensearch.index.mapper.TypeFieldMapper;
|
|
||||||
import org.opensearch.index.query.AbstractQueryBuilder;
|
import org.opensearch.index.query.AbstractQueryBuilder;
|
||||||
import org.opensearch.index.query.ParsedQuery;
|
import org.opensearch.index.query.ParsedQuery;
|
||||||
import org.opensearch.index.query.QueryBuilder;
|
import org.opensearch.index.query.QueryBuilder;
|
||||||
@ -209,7 +208,6 @@ final class DefaultSearchContext extends SearchContext {
|
|||||||
request::nowInMillis,
|
request::nowInMillis,
|
||||||
shardTarget.getClusterAlias()
|
shardTarget.getClusterAlias()
|
||||||
);
|
);
|
||||||
queryShardContext.setTypes(request.types());
|
|
||||||
queryBoost = request.indexBoost();
|
queryBoost = request.indexBoost();
|
||||||
this.lowLevelCancellation = lowLevelCancellation;
|
this.lowLevelCancellation = lowLevelCancellation;
|
||||||
}
|
}
|
||||||
@ -321,11 +319,6 @@ final class DefaultSearchContext extends SearchContext {
|
|||||||
@Override
|
@Override
|
||||||
public Query buildFilteredQuery(Query query) {
|
public Query buildFilteredQuery(Query query) {
|
||||||
List<Query> filters = new ArrayList<>();
|
List<Query> filters = new ArrayList<>();
|
||||||
Query typeFilter = createTypeFilter(queryShardContext.getTypes());
|
|
||||||
if (typeFilter != null) {
|
|
||||||
filters.add(typeFilter);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (mapperService().hasNested()
|
if (mapperService().hasNested()
|
||||||
&& new NestedHelper(mapperService()).mightMatchNestedDocs(query)
|
&& new NestedHelper(mapperService()).mightMatchNestedDocs(query)
|
||||||
&& (aliasFilter == null || new NestedHelper(mapperService()).mightMatchNestedDocs(aliasFilter))) {
|
&& (aliasFilter == null || new NestedHelper(mapperService()).mightMatchNestedDocs(aliasFilter))) {
|
||||||
@ -357,17 +350,6 @@ final class DefaultSearchContext extends SearchContext {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private Query createTypeFilter(String[] types) {
|
|
||||||
if (types != null && types.length >= 1) {
|
|
||||||
if (mapperService().documentMapper() == null) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
TypeFieldMapper.TypeFieldType ft = new TypeFieldMapper.TypeFieldType(mapperService().documentMapper().type());
|
|
||||||
return ft.typeFilter(types);
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public ShardSearchContextId id() {
|
public ShardSearchContextId id() {
|
||||||
return readerContext.id();
|
return readerContext.id();
|
||||||
|
@ -71,6 +71,7 @@ import org.opensearch.tasks.TaskId;
|
|||||||
import org.opensearch.transport.TransportRequest;
|
import org.opensearch.transport.TransportRequest;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.util.Arrays;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.function.Function;
|
import java.util.function.Function;
|
||||||
|
|
||||||
@ -87,7 +88,6 @@ public class ShardSearchRequest extends TransportRequest implements IndicesReque
|
|||||||
private final int numberOfShards;
|
private final int numberOfShards;
|
||||||
private final SearchType searchType;
|
private final SearchType searchType;
|
||||||
private final Scroll scroll;
|
private final Scroll scroll;
|
||||||
private final String[] types;
|
|
||||||
private final float indexBoost;
|
private final float indexBoost;
|
||||||
private final Boolean requestCache;
|
private final Boolean requestCache;
|
||||||
private final long nowInMillis;
|
private final long nowInMillis;
|
||||||
@ -152,7 +152,6 @@ public class ShardSearchRequest extends TransportRequest implements IndicesReque
|
|||||||
numberOfShards,
|
numberOfShards,
|
||||||
searchRequest.searchType(),
|
searchRequest.searchType(),
|
||||||
searchRequest.source(),
|
searchRequest.source(),
|
||||||
searchRequest.types(),
|
|
||||||
searchRequest.requestCache(),
|
searchRequest.requestCache(),
|
||||||
aliasFilter,
|
aliasFilter,
|
||||||
indexBoost,
|
indexBoost,
|
||||||
@ -170,14 +169,13 @@ public class ShardSearchRequest extends TransportRequest implements IndicesReque
|
|||||||
assert searchRequest.allowPartialSearchResults() != null;
|
assert searchRequest.allowPartialSearchResults() != null;
|
||||||
}
|
}
|
||||||
|
|
||||||
public ShardSearchRequest(ShardId shardId, String[] types, long nowInMillis, AliasFilter aliasFilter) {
|
public ShardSearchRequest(ShardId shardId, long nowInMillis, AliasFilter aliasFilter) {
|
||||||
this(
|
this(
|
||||||
OriginalIndices.NONE,
|
OriginalIndices.NONE,
|
||||||
shardId,
|
shardId,
|
||||||
-1,
|
-1,
|
||||||
SearchType.QUERY_THEN_FETCH,
|
SearchType.QUERY_THEN_FETCH,
|
||||||
null,
|
null,
|
||||||
types,
|
|
||||||
null,
|
null,
|
||||||
aliasFilter,
|
aliasFilter,
|
||||||
1.0f,
|
1.0f,
|
||||||
@ -198,7 +196,6 @@ public class ShardSearchRequest extends TransportRequest implements IndicesReque
|
|||||||
int numberOfShards,
|
int numberOfShards,
|
||||||
SearchType searchType,
|
SearchType searchType,
|
||||||
SearchSourceBuilder source,
|
SearchSourceBuilder source,
|
||||||
String[] types,
|
|
||||||
Boolean requestCache,
|
Boolean requestCache,
|
||||||
AliasFilter aliasFilter,
|
AliasFilter aliasFilter,
|
||||||
float indexBoost,
|
float indexBoost,
|
||||||
@ -215,7 +212,6 @@ public class ShardSearchRequest extends TransportRequest implements IndicesReque
|
|||||||
this.numberOfShards = numberOfShards;
|
this.numberOfShards = numberOfShards;
|
||||||
this.searchType = searchType;
|
this.searchType = searchType;
|
||||||
this.source = source;
|
this.source = source;
|
||||||
this.types = types;
|
|
||||||
this.requestCache = requestCache;
|
this.requestCache = requestCache;
|
||||||
this.aliasFilter = aliasFilter;
|
this.aliasFilter = aliasFilter;
|
||||||
this.indexBoost = indexBoost;
|
this.indexBoost = indexBoost;
|
||||||
@ -240,7 +236,13 @@ public class ShardSearchRequest extends TransportRequest implements IndicesReque
|
|||||||
numberOfShards = in.readVInt();
|
numberOfShards = in.readVInt();
|
||||||
scroll = in.readOptionalWriteable(Scroll::new);
|
scroll = in.readOptionalWriteable(Scroll::new);
|
||||||
source = in.readOptionalWriteable(SearchSourceBuilder::new);
|
source = in.readOptionalWriteable(SearchSourceBuilder::new);
|
||||||
types = in.readStringArray();
|
if (in.getVersion().before(Version.V_2_0_0)) {
|
||||||
|
// types no longer relevant so ignore
|
||||||
|
String[] types = in.readStringArray();
|
||||||
|
if (types.length > 0) {
|
||||||
|
throw new IllegalStateException("types are no longer supported in ids query but found [" + Arrays.toString(types) + "]");
|
||||||
|
}
|
||||||
|
}
|
||||||
aliasFilter = new AliasFilter(in);
|
aliasFilter = new AliasFilter(in);
|
||||||
indexBoost = in.readFloat();
|
indexBoost = in.readFloat();
|
||||||
nowInMillis = in.readVLong();
|
nowInMillis = in.readVLong();
|
||||||
@ -281,7 +283,6 @@ public class ShardSearchRequest extends TransportRequest implements IndicesReque
|
|||||||
this.numberOfShards = clone.numberOfShards;
|
this.numberOfShards = clone.numberOfShards;
|
||||||
this.scroll = clone.scroll;
|
this.scroll = clone.scroll;
|
||||||
this.source = clone.source;
|
this.source = clone.source;
|
||||||
this.types = clone.types;
|
|
||||||
this.aliasFilter = clone.aliasFilter;
|
this.aliasFilter = clone.aliasFilter;
|
||||||
this.indexBoost = clone.indexBoost;
|
this.indexBoost = clone.indexBoost;
|
||||||
this.nowInMillis = clone.nowInMillis;
|
this.nowInMillis = clone.nowInMillis;
|
||||||
@ -314,7 +315,10 @@ public class ShardSearchRequest extends TransportRequest implements IndicesReque
|
|||||||
}
|
}
|
||||||
out.writeOptionalWriteable(scroll);
|
out.writeOptionalWriteable(scroll);
|
||||||
out.writeOptionalWriteable(source);
|
out.writeOptionalWriteable(source);
|
||||||
out.writeStringArray(types);
|
if (out.getVersion().before(Version.V_2_0_0)) {
|
||||||
|
// types not supported so send an empty array to previous versions
|
||||||
|
out.writeStringArray(Strings.EMPTY_ARRAY);
|
||||||
|
}
|
||||||
aliasFilter.writeTo(out);
|
aliasFilter.writeTo(out);
|
||||||
out.writeFloat(indexBoost);
|
out.writeFloat(indexBoost);
|
||||||
if (asKey == false) {
|
if (asKey == false) {
|
||||||
@ -363,10 +367,6 @@ public class ShardSearchRequest extends TransportRequest implements IndicesReque
|
|||||||
return shardId;
|
return shardId;
|
||||||
}
|
}
|
||||||
|
|
||||||
public String[] types() {
|
|
||||||
return types;
|
|
||||||
}
|
|
||||||
|
|
||||||
public SearchSourceBuilder source() {
|
public SearchSourceBuilder source() {
|
||||||
return source;
|
return source;
|
||||||
}
|
}
|
||||||
|
@ -32,7 +32,6 @@
|
|||||||
package org.opensearch.search.lookup;
|
package org.opensearch.search.lookup;
|
||||||
|
|
||||||
import org.apache.lucene.index.LeafReaderContext;
|
import org.apache.lucene.index.LeafReaderContext;
|
||||||
import org.opensearch.common.Nullable;
|
|
||||||
import org.opensearch.index.fielddata.IndexFieldData;
|
import org.opensearch.index.fielddata.IndexFieldData;
|
||||||
import org.opensearch.index.mapper.MappedFieldType;
|
import org.opensearch.index.mapper.MappedFieldType;
|
||||||
import org.opensearch.index.mapper.MapperService;
|
import org.opensearch.index.mapper.MapperService;
|
||||||
@ -43,13 +42,10 @@ public class DocLookup {
|
|||||||
|
|
||||||
private final MapperService mapperService;
|
private final MapperService mapperService;
|
||||||
private final Function<MappedFieldType, IndexFieldData<?>> fieldDataLookup;
|
private final Function<MappedFieldType, IndexFieldData<?>> fieldDataLookup;
|
||||||
@Nullable
|
|
||||||
private final String[] types;
|
|
||||||
|
|
||||||
DocLookup(MapperService mapperService, Function<MappedFieldType, IndexFieldData<?>> fieldDataLookup, @Nullable String[] types) {
|
DocLookup(MapperService mapperService, Function<MappedFieldType, IndexFieldData<?>> fieldDataLookup) {
|
||||||
this.mapperService = mapperService;
|
this.mapperService = mapperService;
|
||||||
this.fieldDataLookup = fieldDataLookup;
|
this.fieldDataLookup = fieldDataLookup;
|
||||||
this.types = types;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public MapperService mapperService() {
|
public MapperService mapperService() {
|
||||||
@ -61,10 +57,6 @@ public class DocLookup {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public LeafDocLookup getLeafDocLookup(LeafReaderContext context) {
|
public LeafDocLookup getLeafDocLookup(LeafReaderContext context) {
|
||||||
return new LeafDocLookup(mapperService, fieldDataLookup, types, context);
|
return new LeafDocLookup(mapperService, fieldDataLookup, context);
|
||||||
}
|
|
||||||
|
|
||||||
public String[] getTypes() {
|
|
||||||
return types;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -32,22 +32,18 @@
|
|||||||
package org.opensearch.search.lookup;
|
package org.opensearch.search.lookup;
|
||||||
|
|
||||||
import org.apache.lucene.index.LeafReaderContext;
|
import org.apache.lucene.index.LeafReaderContext;
|
||||||
import org.opensearch.common.Nullable;
|
|
||||||
import org.opensearch.index.mapper.MapperService;
|
import org.opensearch.index.mapper.MapperService;
|
||||||
|
|
||||||
public class FieldsLookup {
|
public class FieldsLookup {
|
||||||
|
|
||||||
private final MapperService mapperService;
|
private final MapperService mapperService;
|
||||||
@Nullable
|
|
||||||
private final String[] types;
|
|
||||||
|
|
||||||
FieldsLookup(MapperService mapperService, @Nullable String[] types) {
|
FieldsLookup(MapperService mapperService) {
|
||||||
this.mapperService = mapperService;
|
this.mapperService = mapperService;
|
||||||
this.types = types;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public LeafFieldsLookup getLeafFieldsLookup(LeafReaderContext context) {
|
public LeafFieldsLookup getLeafFieldsLookup(LeafReaderContext context) {
|
||||||
return new LeafFieldsLookup(mapperService, types, context.reader());
|
return new LeafFieldsLookup(mapperService, context.reader());
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -33,7 +33,6 @@ package org.opensearch.search.lookup;
|
|||||||
|
|
||||||
import org.apache.lucene.index.LeafReaderContext;
|
import org.apache.lucene.index.LeafReaderContext;
|
||||||
import org.opensearch.ExceptionsHelper;
|
import org.opensearch.ExceptionsHelper;
|
||||||
import org.opensearch.common.Nullable;
|
|
||||||
import org.opensearch.common.logging.DeprecationLogger;
|
import org.opensearch.common.logging.DeprecationLogger;
|
||||||
import org.opensearch.index.fielddata.IndexFieldData;
|
import org.opensearch.index.fielddata.IndexFieldData;
|
||||||
import org.opensearch.index.fielddata.ScriptDocValues;
|
import org.opensearch.index.fielddata.ScriptDocValues;
|
||||||
@ -43,7 +42,6 @@ import org.opensearch.index.mapper.MapperService;
|
|||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.security.AccessController;
|
import java.security.AccessController;
|
||||||
import java.security.PrivilegedAction;
|
import java.security.PrivilegedAction;
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
@ -60,23 +58,13 @@ public class LeafDocLookup implements Map<String, ScriptDocValues<?>> {
|
|||||||
|
|
||||||
private final MapperService mapperService;
|
private final MapperService mapperService;
|
||||||
private final Function<MappedFieldType, IndexFieldData<?>> fieldDataLookup;
|
private final Function<MappedFieldType, IndexFieldData<?>> fieldDataLookup;
|
||||||
|
|
||||||
@Nullable
|
|
||||||
private final String[] types;
|
|
||||||
|
|
||||||
private final LeafReaderContext reader;
|
private final LeafReaderContext reader;
|
||||||
|
|
||||||
private int docId = -1;
|
private int docId = -1;
|
||||||
|
|
||||||
LeafDocLookup(
|
LeafDocLookup(MapperService mapperService, Function<MappedFieldType, IndexFieldData<?>> fieldDataLookup, LeafReaderContext reader) {
|
||||||
MapperService mapperService,
|
|
||||||
Function<MappedFieldType, IndexFieldData<?>> fieldDataLookup,
|
|
||||||
@Nullable String[] types,
|
|
||||||
LeafReaderContext reader
|
|
||||||
) {
|
|
||||||
this.mapperService = mapperService;
|
this.mapperService = mapperService;
|
||||||
this.fieldDataLookup = fieldDataLookup;
|
this.fieldDataLookup = fieldDataLookup;
|
||||||
this.types = types;
|
|
||||||
this.reader = reader;
|
this.reader = reader;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -100,9 +88,7 @@ public class LeafDocLookup implements Map<String, ScriptDocValues<?>> {
|
|||||||
if (scriptValues == null) {
|
if (scriptValues == null) {
|
||||||
final MappedFieldType fieldType = mapperService.fieldType(fieldName);
|
final MappedFieldType fieldType = mapperService.fieldType(fieldName);
|
||||||
if (fieldType == null) {
|
if (fieldType == null) {
|
||||||
throw new IllegalArgumentException(
|
throw new IllegalArgumentException("No field found for [" + fieldName + "] in mapping");
|
||||||
"No field found for [" + fieldName + "] in mapping with types " + Arrays.toString(types)
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
// load fielddata on behalf of the script: otherwise it would need additional permissions
|
// load fielddata on behalf of the script: otherwise it would need additional permissions
|
||||||
// to deal with pagedbytes/ramusagestimator/etc
|
// to deal with pagedbytes/ramusagestimator/etc
|
||||||
|
@ -33,7 +33,6 @@ package org.opensearch.search.lookup;
|
|||||||
|
|
||||||
import org.apache.lucene.index.LeafReader;
|
import org.apache.lucene.index.LeafReader;
|
||||||
import org.opensearch.OpenSearchParseException;
|
import org.opensearch.OpenSearchParseException;
|
||||||
import org.opensearch.common.Nullable;
|
|
||||||
import org.opensearch.index.fieldvisitor.SingleFieldsVisitor;
|
import org.opensearch.index.fieldvisitor.SingleFieldsVisitor;
|
||||||
import org.opensearch.index.mapper.DocumentMapper;
|
import org.opensearch.index.mapper.DocumentMapper;
|
||||||
import org.opensearch.index.mapper.MappedFieldType;
|
import org.opensearch.index.mapper.MappedFieldType;
|
||||||
@ -42,7 +41,6 @@ import org.opensearch.index.mapper.TypeFieldMapper;
|
|||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
@ -54,19 +52,14 @@ import static java.util.Collections.singletonMap;
|
|||||||
public class LeafFieldsLookup implements Map {
|
public class LeafFieldsLookup implements Map {
|
||||||
|
|
||||||
private final MapperService mapperService;
|
private final MapperService mapperService;
|
||||||
|
|
||||||
@Nullable
|
|
||||||
private final String[] types;
|
|
||||||
|
|
||||||
private final LeafReader reader;
|
private final LeafReader reader;
|
||||||
|
|
||||||
private int docId = -1;
|
private int docId = -1;
|
||||||
|
|
||||||
private final Map<String, FieldLookup> cachedFieldData = new HashMap<>();
|
private final Map<String, FieldLookup> cachedFieldData = new HashMap<>();
|
||||||
|
|
||||||
LeafFieldsLookup(MapperService mapperService, @Nullable String[] types, LeafReader reader) {
|
LeafFieldsLookup(MapperService mapperService, LeafReader reader) {
|
||||||
this.mapperService = mapperService;
|
this.mapperService = mapperService;
|
||||||
this.types = types;
|
|
||||||
this.reader = reader;
|
this.reader = reader;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -148,7 +141,7 @@ public class LeafFieldsLookup implements Map {
|
|||||||
if (data == null) {
|
if (data == null) {
|
||||||
MappedFieldType fieldType = mapperService.fieldType(name);
|
MappedFieldType fieldType = mapperService.fieldType(name);
|
||||||
if (fieldType == null) {
|
if (fieldType == null) {
|
||||||
throw new IllegalArgumentException("No field found for [" + name + "] in mapping with types " + Arrays.toString(types));
|
throw new IllegalArgumentException("No field found for [" + name + "] in mapping");
|
||||||
}
|
}
|
||||||
data = new FieldLookup(fieldType);
|
data = new FieldLookup(fieldType);
|
||||||
cachedFieldData.put(name, data);
|
cachedFieldData.put(name, data);
|
||||||
|
@ -33,7 +33,6 @@
|
|||||||
package org.opensearch.search.lookup;
|
package org.opensearch.search.lookup;
|
||||||
|
|
||||||
import org.apache.lucene.index.LeafReaderContext;
|
import org.apache.lucene.index.LeafReaderContext;
|
||||||
import org.opensearch.common.Nullable;
|
|
||||||
import org.opensearch.index.fielddata.IndexFieldData;
|
import org.opensearch.index.fielddata.IndexFieldData;
|
||||||
import org.opensearch.index.mapper.MappedFieldType;
|
import org.opensearch.index.mapper.MappedFieldType;
|
||||||
import org.opensearch.index.mapper.MapperService;
|
import org.opensearch.index.mapper.MapperService;
|
||||||
@ -75,17 +74,15 @@ public class SearchLookup {
|
|||||||
*/
|
*/
|
||||||
public SearchLookup(
|
public SearchLookup(
|
||||||
MapperService mapperService,
|
MapperService mapperService,
|
||||||
BiFunction<MappedFieldType, Supplier<SearchLookup>, IndexFieldData<?>> fieldDataLookup,
|
BiFunction<MappedFieldType, Supplier<SearchLookup>, IndexFieldData<?>> fieldDataLookup
|
||||||
@Nullable String[] types
|
|
||||||
) {
|
) {
|
||||||
this.fieldChain = Collections.emptySet();
|
this.fieldChain = Collections.emptySet();
|
||||||
docMap = new DocLookup(
|
docMap = new DocLookup(
|
||||||
mapperService,
|
mapperService,
|
||||||
fieldType -> fieldDataLookup.apply(fieldType, () -> forkAndTrackFieldReferences(fieldType.name())),
|
fieldType -> fieldDataLookup.apply(fieldType, () -> forkAndTrackFieldReferences(fieldType.name()))
|
||||||
types
|
|
||||||
);
|
);
|
||||||
sourceLookup = new SourceLookup();
|
sourceLookup = new SourceLookup();
|
||||||
fieldsLookup = new FieldsLookup(mapperService, types);
|
fieldsLookup = new FieldsLookup(mapperService);
|
||||||
this.fieldDataLookup = fieldDataLookup;
|
this.fieldDataLookup = fieldDataLookup;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -100,8 +97,7 @@ public class SearchLookup {
|
|||||||
this.fieldChain = Collections.unmodifiableSet(fieldChain);
|
this.fieldChain = Collections.unmodifiableSet(fieldChain);
|
||||||
this.docMap = new DocLookup(
|
this.docMap = new DocLookup(
|
||||||
searchLookup.docMap.mapperService(),
|
searchLookup.docMap.mapperService(),
|
||||||
fieldType -> searchLookup.fieldDataLookup.apply(fieldType, () -> forkAndTrackFieldReferences(fieldType.name())),
|
fieldType -> searchLookup.fieldDataLookup.apply(fieldType, () -> forkAndTrackFieldReferences(fieldType.name()))
|
||||||
searchLookup.docMap.getTypes()
|
|
||||||
);
|
);
|
||||||
this.sourceLookup = searchLookup.sourceLookup;
|
this.sourceLookup = searchLookup.sourceLookup;
|
||||||
this.fieldsLookup = searchLookup.fieldsLookup;
|
this.fieldsLookup = searchLookup.fieldsLookup;
|
||||||
|
@ -109,7 +109,6 @@ public class ExpandSearchPhaseTests extends OpenSearchTestCase {
|
|||||||
assertThat(groupBuilder.must(), Matchers.contains(QueryBuilders.termQuery("foo", "bar")));
|
assertThat(groupBuilder.must(), Matchers.contains(QueryBuilders.termQuery("foo", "bar")));
|
||||||
}
|
}
|
||||||
assertArrayEquals(mockSearchPhaseContext.getRequest().indices(), searchRequest.indices());
|
assertArrayEquals(mockSearchPhaseContext.getRequest().indices(), searchRequest.indices());
|
||||||
assertArrayEquals(mockSearchPhaseContext.getRequest().types(), searchRequest.types());
|
|
||||||
|
|
||||||
List<MultiSearchResponse.Item> mSearchResponses = new ArrayList<>(numInnerHits);
|
List<MultiSearchResponse.Item> mSearchResponses = new ArrayList<>(numInnerHits);
|
||||||
for (int innerHitNum = 0; innerHitNum < numInnerHits; innerHitNum++) {
|
for (int innerHitNum = 0; innerHitNum < numInnerHits; innerHitNum++) {
|
||||||
|
@ -88,13 +88,11 @@ public class MultiSearchRequestTests extends OpenSearchTestCase {
|
|||||||
request.requests().get(0).indicesOptions(),
|
request.requests().get(0).indicesOptions(),
|
||||||
equalTo(IndicesOptions.fromOptions(true, true, true, true, SearchRequest.DEFAULT_INDICES_OPTIONS))
|
equalTo(IndicesOptions.fromOptions(true, true, true, true, SearchRequest.DEFAULT_INDICES_OPTIONS))
|
||||||
);
|
);
|
||||||
assertThat(request.requests().get(0).types().length, equalTo(0));
|
|
||||||
assertThat(request.requests().get(1).indices()[0], equalTo("test"));
|
assertThat(request.requests().get(1).indices()[0], equalTo("test"));
|
||||||
assertThat(
|
assertThat(
|
||||||
request.requests().get(1).indicesOptions(),
|
request.requests().get(1).indicesOptions(),
|
||||||
equalTo(IndicesOptions.fromOptions(false, true, true, true, SearchRequest.DEFAULT_INDICES_OPTIONS))
|
equalTo(IndicesOptions.fromOptions(false, true, true, true, SearchRequest.DEFAULT_INDICES_OPTIONS))
|
||||||
);
|
);
|
||||||
assertThat(request.requests().get(1).types()[0], equalTo("type1"));
|
|
||||||
assertThat(request.requests().get(2).indices()[0], equalTo("test"));
|
assertThat(request.requests().get(2).indices()[0], equalTo("test"));
|
||||||
assertThat(
|
assertThat(
|
||||||
request.requests().get(2).indicesOptions(),
|
request.requests().get(2).indicesOptions(),
|
||||||
@ -112,12 +110,9 @@ public class MultiSearchRequestTests extends OpenSearchTestCase {
|
|||||||
);
|
);
|
||||||
|
|
||||||
assertThat(request.requests().get(5).indices(), is(Strings.EMPTY_ARRAY));
|
assertThat(request.requests().get(5).indices(), is(Strings.EMPTY_ARRAY));
|
||||||
assertThat(request.requests().get(5).types().length, equalTo(0));
|
|
||||||
assertThat(request.requests().get(6).indices(), is(Strings.EMPTY_ARRAY));
|
assertThat(request.requests().get(6).indices(), is(Strings.EMPTY_ARRAY));
|
||||||
assertThat(request.requests().get(6).types().length, equalTo(0));
|
|
||||||
assertThat(request.requests().get(6).searchType(), equalTo(SearchType.DFS_QUERY_THEN_FETCH));
|
assertThat(request.requests().get(6).searchType(), equalTo(SearchType.DFS_QUERY_THEN_FETCH));
|
||||||
assertThat(request.requests().get(7).indices(), is(Strings.EMPTY_ARRAY));
|
assertThat(request.requests().get(7).indices(), is(Strings.EMPTY_ARRAY));
|
||||||
assertThat(request.requests().get(7).types().length, equalTo(0));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testFailWithUnknownKey() {
|
public void testFailWithUnknownKey() {
|
||||||
@ -148,7 +143,6 @@ public class MultiSearchRequestTests extends OpenSearchTestCase {
|
|||||||
request.requests().get(0).indicesOptions(),
|
request.requests().get(0).indicesOptions(),
|
||||||
equalTo(IndicesOptions.fromOptions(true, true, true, true, SearchRequest.DEFAULT_INDICES_OPTIONS))
|
equalTo(IndicesOptions.fromOptions(true, true, true, true, SearchRequest.DEFAULT_INDICES_OPTIONS))
|
||||||
);
|
);
|
||||||
assertThat(request.requests().get(0).types().length, equalTo(0));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testCancelAfterIntervalAtParentAndFewChildRequest() throws Exception {
|
public void testCancelAfterIntervalAtParentAndFewChildRequest() throws Exception {
|
||||||
@ -197,23 +191,17 @@ public class MultiSearchRequestTests extends OpenSearchTestCase {
|
|||||||
request.requests().get(0).indicesOptions(),
|
request.requests().get(0).indicesOptions(),
|
||||||
equalTo(IndicesOptions.fromOptions(true, true, true, true, SearchRequest.DEFAULT_INDICES_OPTIONS))
|
equalTo(IndicesOptions.fromOptions(true, true, true, true, SearchRequest.DEFAULT_INDICES_OPTIONS))
|
||||||
);
|
);
|
||||||
assertThat(request.requests().get(0).types().length, equalTo(0));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testSimpleAdd2() throws Exception {
|
public void testSimpleAdd2() throws Exception {
|
||||||
MultiSearchRequest request = parseMultiSearchRequestFromFile("/org/opensearch/action/search/simple-msearch2.json");
|
MultiSearchRequest request = parseMultiSearchRequestFromFile("/org/opensearch/action/search/simple-msearch2.json");
|
||||||
assertThat(request.requests().size(), equalTo(5));
|
assertThat(request.requests().size(), equalTo(5));
|
||||||
assertThat(request.requests().get(0).indices()[0], equalTo("test"));
|
assertThat(request.requests().get(0).indices()[0], equalTo("test"));
|
||||||
assertThat(request.requests().get(0).types().length, equalTo(0));
|
|
||||||
assertThat(request.requests().get(1).indices()[0], equalTo("test"));
|
assertThat(request.requests().get(1).indices()[0], equalTo("test"));
|
||||||
assertThat(request.requests().get(1).types()[0], equalTo("type1"));
|
|
||||||
assertThat(request.requests().get(2).indices(), is(Strings.EMPTY_ARRAY));
|
assertThat(request.requests().get(2).indices(), is(Strings.EMPTY_ARRAY));
|
||||||
assertThat(request.requests().get(2).types().length, equalTo(0));
|
|
||||||
assertThat(request.requests().get(3).indices(), is(Strings.EMPTY_ARRAY));
|
assertThat(request.requests().get(3).indices(), is(Strings.EMPTY_ARRAY));
|
||||||
assertThat(request.requests().get(3).types().length, equalTo(0));
|
|
||||||
assertThat(request.requests().get(3).searchType(), equalTo(SearchType.DFS_QUERY_THEN_FETCH));
|
assertThat(request.requests().get(3).searchType(), equalTo(SearchType.DFS_QUERY_THEN_FETCH));
|
||||||
assertThat(request.requests().get(4).indices(), is(Strings.EMPTY_ARRAY));
|
assertThat(request.requests().get(4).indices(), is(Strings.EMPTY_ARRAY));
|
||||||
assertThat(request.requests().get(4).types().length, equalTo(0));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testSimpleAdd3() throws Exception {
|
public void testSimpleAdd3() throws Exception {
|
||||||
@ -223,13 +211,9 @@ public class MultiSearchRequestTests extends OpenSearchTestCase {
|
|||||||
assertThat(request.requests().get(0).indices()[1], equalTo("test1"));
|
assertThat(request.requests().get(0).indices()[1], equalTo("test1"));
|
||||||
assertThat(request.requests().get(1).indices()[0], equalTo("test2"));
|
assertThat(request.requests().get(1).indices()[0], equalTo("test2"));
|
||||||
assertThat(request.requests().get(1).indices()[1], equalTo("test3"));
|
assertThat(request.requests().get(1).indices()[1], equalTo("test3"));
|
||||||
assertThat(request.requests().get(1).types()[0], equalTo("type1"));
|
|
||||||
assertThat(request.requests().get(2).indices()[0], equalTo("test4"));
|
assertThat(request.requests().get(2).indices()[0], equalTo("test4"));
|
||||||
assertThat(request.requests().get(2).indices()[1], equalTo("test1"));
|
assertThat(request.requests().get(2).indices()[1], equalTo("test1"));
|
||||||
assertThat(request.requests().get(2).types()[0], equalTo("type2"));
|
|
||||||
assertThat(request.requests().get(2).types()[1], equalTo("type1"));
|
|
||||||
assertThat(request.requests().get(3).indices(), is(Strings.EMPTY_ARRAY));
|
assertThat(request.requests().get(3).indices(), is(Strings.EMPTY_ARRAY));
|
||||||
assertThat(request.requests().get(3).types().length, equalTo(0));
|
|
||||||
assertThat(request.requests().get(3).searchType(), equalTo(SearchType.DFS_QUERY_THEN_FETCH));
|
assertThat(request.requests().get(3).searchType(), equalTo(SearchType.DFS_QUERY_THEN_FETCH));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -242,13 +226,10 @@ public class MultiSearchRequestTests extends OpenSearchTestCase {
|
|||||||
assertThat(request.requests().get(0).preference(), nullValue());
|
assertThat(request.requests().get(0).preference(), nullValue());
|
||||||
assertThat(request.requests().get(1).indices()[0], equalTo("test2"));
|
assertThat(request.requests().get(1).indices()[0], equalTo("test2"));
|
||||||
assertThat(request.requests().get(1).indices()[1], equalTo("test3"));
|
assertThat(request.requests().get(1).indices()[1], equalTo("test3"));
|
||||||
assertThat(request.requests().get(1).types()[0], equalTo("type1"));
|
|
||||||
assertThat(request.requests().get(1).requestCache(), nullValue());
|
assertThat(request.requests().get(1).requestCache(), nullValue());
|
||||||
assertThat(request.requests().get(1).preference(), equalTo("_local"));
|
assertThat(request.requests().get(1).preference(), equalTo("_local"));
|
||||||
assertThat(request.requests().get(2).indices()[0], equalTo("test4"));
|
assertThat(request.requests().get(2).indices()[0], equalTo("test4"));
|
||||||
assertThat(request.requests().get(2).indices()[1], equalTo("test1"));
|
assertThat(request.requests().get(2).indices()[1], equalTo("test1"));
|
||||||
assertThat(request.requests().get(2).types()[0], equalTo("type2"));
|
|
||||||
assertThat(request.requests().get(2).types()[1], equalTo("type1"));
|
|
||||||
assertThat(request.requests().get(2).routing(), equalTo("123"));
|
assertThat(request.requests().get(2).routing(), equalTo("123"));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -419,7 +400,6 @@ public class MultiSearchRequestTests extends OpenSearchTestCase {
|
|||||||
null,
|
null,
|
||||||
null,
|
null,
|
||||||
null,
|
null,
|
||||||
null,
|
|
||||||
xContentRegistry(),
|
xContentRegistry(),
|
||||||
true,
|
true,
|
||||||
deprecationLogger
|
deprecationLogger
|
||||||
|
@ -127,7 +127,6 @@ public class SearchRequestTests extends AbstractSearchTestCase {
|
|||||||
SearchRequest searchRequest = new SearchRequest();
|
SearchRequest searchRequest = new SearchRequest();
|
||||||
assertNotNull(searchRequest.indices());
|
assertNotNull(searchRequest.indices());
|
||||||
assertNotNull(searchRequest.indicesOptions());
|
assertNotNull(searchRequest.indicesOptions());
|
||||||
assertNotNull(searchRequest.types());
|
|
||||||
assertNotNull(searchRequest.searchType());
|
assertNotNull(searchRequest.searchType());
|
||||||
|
|
||||||
NullPointerException e = expectThrows(NullPointerException.class, () -> searchRequest.indices((String[]) null));
|
NullPointerException e = expectThrows(NullPointerException.class, () -> searchRequest.indices((String[]) null));
|
||||||
@ -138,11 +137,6 @@ public class SearchRequestTests extends AbstractSearchTestCase {
|
|||||||
e = expectThrows(NullPointerException.class, () -> searchRequest.indicesOptions(null));
|
e = expectThrows(NullPointerException.class, () -> searchRequest.indicesOptions(null));
|
||||||
assertEquals("indicesOptions must not be null", e.getMessage());
|
assertEquals("indicesOptions must not be null", e.getMessage());
|
||||||
|
|
||||||
e = expectThrows(NullPointerException.class, () -> searchRequest.types((String[]) null));
|
|
||||||
assertEquals("types must not be null", e.getMessage());
|
|
||||||
e = expectThrows(NullPointerException.class, () -> searchRequest.types((String) null));
|
|
||||||
assertEquals("type must not be null", e.getMessage());
|
|
||||||
|
|
||||||
e = expectThrows(NullPointerException.class, () -> searchRequest.searchType((SearchType) null));
|
e = expectThrows(NullPointerException.class, () -> searchRequest.searchType((SearchType) null));
|
||||||
assertEquals("searchType must not be null", e.getMessage());
|
assertEquals("searchType must not be null", e.getMessage());
|
||||||
|
|
||||||
@ -242,7 +236,6 @@ public class SearchRequestTests extends AbstractSearchTestCase {
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
mutators.add(() -> mutation.types(ArrayUtils.concat(searchRequest.types(), new String[] { randomAlphaOfLength(10) })));
|
|
||||||
mutators.add(() -> mutation.preference(randomValueOtherThan(searchRequest.preference(), () -> randomAlphaOfLengthBetween(3, 10))));
|
mutators.add(() -> mutation.preference(randomValueOtherThan(searchRequest.preference(), () -> randomAlphaOfLengthBetween(3, 10))));
|
||||||
mutators.add(() -> mutation.routing(randomValueOtherThan(searchRequest.routing(), () -> randomAlphaOfLengthBetween(3, 10))));
|
mutators.add(() -> mutation.routing(randomValueOtherThan(searchRequest.routing(), () -> randomAlphaOfLengthBetween(3, 10))));
|
||||||
mutators.add(() -> mutation.requestCache((randomValueOtherThan(searchRequest.requestCache(), OpenSearchTestCase::randomBoolean))));
|
mutators.add(() -> mutation.requestCache((randomValueOtherThan(searchRequest.requestCache(), OpenSearchTestCase::randomBoolean))));
|
||||||
@ -273,13 +266,13 @@ public class SearchRequestTests extends AbstractSearchTestCase {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public void testDescriptionForDefault() {
|
public void testDescriptionForDefault() {
|
||||||
assertThat(toDescription(new SearchRequest()), equalTo("indices[], types[], search_type[QUERY_THEN_FETCH], source[]"));
|
assertThat(toDescription(new SearchRequest()), equalTo("indices[], search_type[QUERY_THEN_FETCH], source[]"));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testDescriptionIncludesScroll() {
|
public void testDescriptionIncludesScroll() {
|
||||||
assertThat(
|
assertThat(
|
||||||
toDescription(new SearchRequest().scroll(TimeValue.timeValueMinutes(5))),
|
toDescription(new SearchRequest().scroll(TimeValue.timeValueMinutes(5))),
|
||||||
equalTo("indices[], types[], search_type[QUERY_THEN_FETCH], scroll[5m], source[]")
|
equalTo("indices[], search_type[QUERY_THEN_FETCH], scroll[5m], source[]")
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -96,7 +96,7 @@ public class SearchSlowLogTests extends OpenSearchSingleNodeTestCase {
|
|||||||
|
|
||||||
protected SearchContext createSearchContext(IndexService indexService, String... groupStats) {
|
protected SearchContext createSearchContext(IndexService indexService, String... groupStats) {
|
||||||
BigArrays bigArrays = indexService.getBigArrays();
|
BigArrays bigArrays = indexService.getBigArrays();
|
||||||
final ShardSearchRequest request = new ShardSearchRequest(new ShardId(indexService.index(), 0), new String[0], 0L, null);
|
final ShardSearchRequest request = new ShardSearchRequest(new ShardId(indexService.index(), 0), 0L, null);
|
||||||
return new TestSearchContext(bigArrays, indexService) {
|
return new TestSearchContext(bigArrays, indexService) {
|
||||||
@Override
|
@Override
|
||||||
public List<String> groupStats() {
|
public List<String> groupStats() {
|
||||||
@ -258,30 +258,12 @@ public class SearchSlowLogTests extends OpenSearchSingleNodeTestCase {
|
|||||||
assertThat(p.getValueFor("took"), equalTo("10nanos"));
|
assertThat(p.getValueFor("took"), equalTo("10nanos"));
|
||||||
assertThat(p.getValueFor("took_millis"), equalTo("0"));
|
assertThat(p.getValueFor("took_millis"), equalTo("0"));
|
||||||
assertThat(p.getValueFor("total_hits"), equalTo("-1"));
|
assertThat(p.getValueFor("total_hits"), equalTo("-1"));
|
||||||
assertThat(p.getValueFor("types"), equalTo("[]"));
|
|
||||||
assertThat(p.getValueFor("stats"), equalTo("[]"));
|
assertThat(p.getValueFor("stats"), equalTo("[]"));
|
||||||
assertThat(p.getValueFor("search_type"), Matchers.nullValue());
|
assertThat(p.getValueFor("search_type"), Matchers.nullValue());
|
||||||
assertThat(p.getValueFor("total_shards"), equalTo("1"));
|
assertThat(p.getValueFor("total_shards"), equalTo("1"));
|
||||||
assertThat(p.getValueFor("source"), equalTo("{\\\"query\\\":{\\\"match_all\\\":{\\\"boost\\\":1.0}}}"));
|
assertThat(p.getValueFor("source"), equalTo("{\\\"query\\\":{\\\"match_all\\\":{\\\"boost\\\":1.0}}}"));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testSlowLogWithTypes() throws IOException {
|
|
||||||
IndexService index = createIndex("foo");
|
|
||||||
SearchContext searchContext = searchContextWithSourceAndTask(index);
|
|
||||||
searchContext.getQueryShardContext().setTypes("type1", "type2");
|
|
||||||
SearchSlowLog.SearchSlowLogMessage p = new SearchSlowLog.SearchSlowLogMessage(searchContext, 10);
|
|
||||||
|
|
||||||
assertThat(p.getValueFor("types"), equalTo("[\\\"type1\\\", \\\"type2\\\"]"));
|
|
||||||
|
|
||||||
searchContext.getQueryShardContext().setTypes("type1");
|
|
||||||
p = new SearchSlowLog.SearchSlowLogMessage(searchContext, 10);
|
|
||||||
assertThat(p.getValueFor("types"), equalTo("[\\\"type1\\\"]"));
|
|
||||||
|
|
||||||
searchContext.getQueryShardContext().setTypes();
|
|
||||||
p = new SearchSlowLog.SearchSlowLogMessage(searchContext, 10);
|
|
||||||
assertThat(p.getValueFor("types"), equalTo("[]"));
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testSlowLogsWithStats() throws IOException {
|
public void testSlowLogsWithStats() throws IOException {
|
||||||
IndexService index = createIndex("foo");
|
IndexService index = createIndex("foo");
|
||||||
SearchContext searchContext = createSearchContext(index, "group1");
|
SearchContext searchContext = createSearchContext(index, "group1");
|
||||||
|
@ -143,7 +143,7 @@ public class IndexFieldDataServiceTests extends OpenSearchSingleNodeTestCase {
|
|||||||
searchLookupSetOnce.set(searchLookup);
|
searchLookupSetOnce.set(searchLookup);
|
||||||
return (IndexFieldData.Builder) (cache, breakerService) -> null;
|
return (IndexFieldData.Builder) (cache, breakerService) -> null;
|
||||||
});
|
});
|
||||||
SearchLookup searchLookup = new SearchLookup(null, null, null);
|
SearchLookup searchLookup = new SearchLookup(null, null);
|
||||||
ifdService.getForField(ft, "qualified", () -> searchLookup);
|
ifdService.getForField(ft, "qualified", () -> searchLookup);
|
||||||
assertSame(searchLookup, searchLookupSetOnce.get().get());
|
assertSame(searchLookup, searchLookupSetOnce.get().get());
|
||||||
}
|
}
|
||||||
|
@ -42,9 +42,6 @@ import org.opensearch.index.query.QueryShardContext;
|
|||||||
import org.opensearch.test.OpenSearchTestCase;
|
import org.opensearch.test.OpenSearchTestCase;
|
||||||
import org.mockito.Mockito;
|
import org.mockito.Mockito;
|
||||||
|
|
||||||
import java.util.Collection;
|
|
||||||
import java.util.Collections;
|
|
||||||
|
|
||||||
public class IdFieldTypeTests extends OpenSearchTestCase {
|
public class IdFieldTypeTests extends OpenSearchTestCase {
|
||||||
|
|
||||||
public void testRangeQuery() {
|
public void testRangeQuery() {
|
||||||
@ -70,16 +67,12 @@ public class IdFieldTypeTests extends OpenSearchTestCase {
|
|||||||
Mockito.when(context.indexVersionCreated()).thenReturn(indexSettings.getAsVersion(IndexMetadata.SETTING_VERSION_CREATED, null));
|
Mockito.when(context.indexVersionCreated()).thenReturn(indexSettings.getAsVersion(IndexMetadata.SETTING_VERSION_CREATED, null));
|
||||||
|
|
||||||
MapperService mapperService = Mockito.mock(MapperService.class);
|
MapperService mapperService = Mockito.mock(MapperService.class);
|
||||||
Collection<String> types = Collections.emptySet();
|
|
||||||
Mockito.when(context.queryTypes()).thenReturn(types);
|
|
||||||
Mockito.when(context.getMapperService()).thenReturn(mapperService);
|
Mockito.when(context.getMapperService()).thenReturn(mapperService);
|
||||||
|
|
||||||
MappedFieldType ft = new IdFieldMapper.IdFieldType(() -> false);
|
MappedFieldType ft = new IdFieldMapper.IdFieldType(() -> false);
|
||||||
Query query = ft.termQuery("id", context);
|
Query query = ft.termQuery("id", context);
|
||||||
assertEquals(new TermInSetQuery("_id", Uid.encodeId("id")), query);
|
assertEquals(new TermInSetQuery("_id", Uid.encodeId("id")), query);
|
||||||
|
|
||||||
types = Collections.singleton("type");
|
|
||||||
Mockito.when(context.queryTypes()).thenReturn(types);
|
|
||||||
query = ft.termQuery("id", context);
|
query = ft.termQuery("id", context);
|
||||||
assertEquals(new TermInSetQuery("_id", Uid.encodeId("id")), query);
|
assertEquals(new TermInSetQuery("_id", Uid.encodeId("id")), query);
|
||||||
}
|
}
|
||||||
|
@ -45,7 +45,7 @@ public class CommonTermsQueryParserTests extends OpenSearchSingleNodeTestCase {
|
|||||||
CommonTermsQueryBuilder commonTermsQueryBuilder = new CommonTermsQueryBuilder("name", "the").queryName("query-name");
|
CommonTermsQueryBuilder commonTermsQueryBuilder = new CommonTermsQueryBuilder("name", "the").queryName("query-name");
|
||||||
|
|
||||||
// the named query parses to null; we are testing this does not cause a NullPointerException
|
// the named query parses to null; we are testing this does not cause a NullPointerException
|
||||||
SearchResponse response = client().prepareSearch(index).setTypes(type).setQuery(commonTermsQueryBuilder).execute().actionGet();
|
SearchResponse response = client().prepareSearch(index).setQuery(commonTermsQueryBuilder).execute().actionGet();
|
||||||
|
|
||||||
assertNotNull(response);
|
assertNotNull(response);
|
||||||
assertEquals(response.getHits().getHits().length, 0);
|
assertEquals(response.getHits().getHits().length, 0);
|
||||||
|
@ -35,14 +35,11 @@ package org.opensearch.index.query;
|
|||||||
import org.apache.lucene.search.MatchNoDocsQuery;
|
import org.apache.lucene.search.MatchNoDocsQuery;
|
||||||
import org.apache.lucene.search.Query;
|
import org.apache.lucene.search.Query;
|
||||||
import org.apache.lucene.search.TermInSetQuery;
|
import org.apache.lucene.search.TermInSetQuery;
|
||||||
import org.opensearch.cluster.metadata.Metadata;
|
|
||||||
import org.opensearch.common.ParsingException;
|
import org.opensearch.common.ParsingException;
|
||||||
import org.opensearch.common.xcontent.XContentParser;
|
import org.opensearch.common.xcontent.XContentParser;
|
||||||
import org.opensearch.index.mapper.IdFieldMapper;
|
|
||||||
import org.opensearch.test.AbstractQueryTestCase;
|
import org.opensearch.test.AbstractQueryTestCase;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
@ -56,43 +53,17 @@ public class IdsQueryBuilderTests extends AbstractQueryTestCase<IdsQueryBuilder>
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected IdsQueryBuilder doCreateTestQueryBuilder() {
|
protected IdsQueryBuilder doCreateTestQueryBuilder() {
|
||||||
final String type;
|
|
||||||
if (randomBoolean()) {
|
|
||||||
if (frequently()) {
|
|
||||||
type = "_doc";
|
|
||||||
} else {
|
|
||||||
type = randomAlphaOfLengthBetween(1, 10);
|
|
||||||
}
|
|
||||||
} else if (randomBoolean()) {
|
|
||||||
type = Metadata.ALL;
|
|
||||||
} else {
|
|
||||||
type = null;
|
|
||||||
}
|
|
||||||
int numberOfIds = randomIntBetween(0, 10);
|
int numberOfIds = randomIntBetween(0, 10);
|
||||||
String[] ids = new String[numberOfIds];
|
String[] ids = new String[numberOfIds];
|
||||||
for (int i = 0; i < numberOfIds; i++) {
|
for (int i = 0; i < numberOfIds; i++) {
|
||||||
ids[i] = randomAlphaOfLengthBetween(1, 10);
|
ids[i] = randomAlphaOfLengthBetween(1, 10);
|
||||||
}
|
}
|
||||||
IdsQueryBuilder query;
|
return new IdsQueryBuilder().addIds(ids);
|
||||||
if (type != null && randomBoolean()) {
|
|
||||||
query = new IdsQueryBuilder().types(type);
|
|
||||||
query.addIds(ids);
|
|
||||||
} else {
|
|
||||||
query = new IdsQueryBuilder();
|
|
||||||
query.addIds(ids);
|
|
||||||
}
|
|
||||||
return query;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void doAssertLuceneQuery(IdsQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
protected void doAssertLuceneQuery(IdsQueryBuilder queryBuilder, Query query, QueryShardContext context) throws IOException {
|
||||||
boolean allTypes = queryBuilder.types().length == 0 || queryBuilder.types().length == 1 && "_all".equals(queryBuilder.types()[0]);
|
if (queryBuilder.ids().size() == 0) {
|
||||||
if (queryBuilder.ids().size() == 0
|
|
||||||
// no types
|
|
||||||
|| context.fieldMapper(IdFieldMapper.NAME) == null
|
|
||||||
// there are types, but disjoint from the query
|
|
||||||
|| (allTypes == false
|
|
||||||
&& Arrays.asList(queryBuilder.types()).indexOf(context.getMapperService().documentMapper().type()) == -1)) {
|
|
||||||
assertThat(query, instanceOf(MatchNoDocsQuery.class));
|
assertThat(query, instanceOf(MatchNoDocsQuery.class));
|
||||||
} else {
|
} else {
|
||||||
assertThat(query, instanceOf(TermInSetQuery.class));
|
assertThat(query, instanceOf(TermInSetQuery.class));
|
||||||
@ -100,11 +71,8 @@ public class IdsQueryBuilderTests extends AbstractQueryTestCase<IdsQueryBuilder>
|
|||||||
}
|
}
|
||||||
|
|
||||||
public void testIllegalArguments() {
|
public void testIllegalArguments() {
|
||||||
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new IdsQueryBuilder().types((String[]) null));
|
|
||||||
assertEquals("[ids] types cannot be null", e.getMessage());
|
|
||||||
|
|
||||||
IdsQueryBuilder idsQueryBuilder = new IdsQueryBuilder();
|
IdsQueryBuilder idsQueryBuilder = new IdsQueryBuilder();
|
||||||
e = expectThrows(IllegalArgumentException.class, () -> idsQueryBuilder.addIds((String[]) null));
|
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> idsQueryBuilder.addIds((String[]) null));
|
||||||
assertEquals("[ids] ids cannot be null", e.getMessage());
|
assertEquals("[ids] ids cannot be null", e.getMessage());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -116,64 +84,27 @@ public class IdsQueryBuilderTests extends AbstractQueryTestCase<IdsQueryBuilder>
|
|||||||
}
|
}
|
||||||
|
|
||||||
public void testFromJson() throws IOException {
|
public void testFromJson() throws IOException {
|
||||||
String json = "{\n"
|
String json = "{\n" + " \"ids\" : {\n" + " \"values\" : [ \"1\", \"100\", \"4\" ],\n" + " \"boost\" : 1.0\n" + " }\n" + "}";
|
||||||
+ " \"ids\" : {\n"
|
|
||||||
+ " \"type\" : [ \"my_type\" ],\n"
|
|
||||||
+ " \"values\" : [ \"1\", \"100\", \"4\" ],\n"
|
|
||||||
+ " \"boost\" : 1.0\n"
|
|
||||||
+ " }\n"
|
|
||||||
+ "}";
|
|
||||||
IdsQueryBuilder parsed = (IdsQueryBuilder) parseQuery(json);
|
IdsQueryBuilder parsed = (IdsQueryBuilder) parseQuery(json);
|
||||||
checkGeneratedJson(json, parsed);
|
checkGeneratedJson(json, parsed);
|
||||||
assertThat(parsed.ids(), contains("1", "100", "4"));
|
assertThat(parsed.ids(), contains("1", "100", "4"));
|
||||||
assertEquals(json, "my_type", parsed.types()[0]);
|
|
||||||
|
|
||||||
// check that type that is not an array and also ids that are numbers are parsed
|
// check that type that is not an array and also ids that are numbers are parsed
|
||||||
json = "{\n"
|
json = "{\n" + " \"ids\" : {\n" + " \"values\" : [ 1, 100, 4 ],\n" + " \"boost\" : 1.0\n" + " }\n" + "}";
|
||||||
+ " \"ids\" : {\n"
|
|
||||||
+ " \"type\" : \"my_type\",\n"
|
|
||||||
+ " \"values\" : [ 1, 100, 4 ],\n"
|
|
||||||
+ " \"boost\" : 1.0\n"
|
|
||||||
+ " }\n"
|
|
||||||
+ "}";
|
|
||||||
parsed = (IdsQueryBuilder) parseQuery(json);
|
parsed = (IdsQueryBuilder) parseQuery(json);
|
||||||
assertThat(parsed.ids(), contains("1", "100", "4"));
|
assertThat(parsed.ids(), contains("1", "100", "4"));
|
||||||
assertEquals(json, "my_type", parsed.types()[0]);
|
|
||||||
|
|
||||||
// check with empty type array
|
|
||||||
json = "{\n"
|
|
||||||
+ " \"ids\" : {\n"
|
|
||||||
+ " \"type\" : [ ],\n"
|
|
||||||
+ " \"values\" : [ \"1\", \"100\", \"4\" ],\n"
|
|
||||||
+ " \"boost\" : 1.0\n"
|
|
||||||
+ " }\n"
|
|
||||||
+ "}";
|
|
||||||
parsed = (IdsQueryBuilder) parseQuery(json);
|
|
||||||
assertThat(parsed.ids(), contains("1", "100", "4"));
|
|
||||||
assertEquals(json, 0, parsed.types().length);
|
|
||||||
|
|
||||||
// check without type
|
|
||||||
json = "{\n" + " \"ids\" : {\n" + " \"values\" : [ \"1\", \"100\", \"4\" ],\n" + " \"boost\" : 1.0\n" + " }\n" + "}";
|
|
||||||
parsed = (IdsQueryBuilder) parseQuery(json);
|
|
||||||
assertThat(parsed.ids(), contains("1", "100", "4"));
|
|
||||||
assertEquals(json, 0, parsed.types().length);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected QueryBuilder parseQuery(XContentParser parser) throws IOException {
|
protected QueryBuilder parseQuery(XContentParser parser) throws IOException {
|
||||||
QueryBuilder query = super.parseQuery(parser);
|
QueryBuilder query = super.parseQuery(parser);
|
||||||
assertThat(query, instanceOf(IdsQueryBuilder.class));
|
assertThat(query, instanceOf(IdsQueryBuilder.class));
|
||||||
|
|
||||||
IdsQueryBuilder idsQuery = (IdsQueryBuilder) query;
|
|
||||||
if (idsQuery.types().length > 0 && !assertedWarnings.contains(IdsQueryBuilder.TYPES_DEPRECATION_MESSAGE)) {
|
|
||||||
assertWarnings(IdsQueryBuilder.TYPES_DEPRECATION_MESSAGE);
|
|
||||||
assertedWarnings.add(IdsQueryBuilder.TYPES_DEPRECATION_MESSAGE);
|
|
||||||
}
|
|
||||||
return query;
|
return query;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void testMustRewrite() throws IOException {
|
public void testMustRewrite() throws IOException {
|
||||||
|
super.testMustRewrite();
|
||||||
QueryShardContext context = createShardContextWithNoType();
|
QueryShardContext context = createShardContextWithNoType();
|
||||||
context.setAllowUnmappedFields(true);
|
context.setAllowUnmappedFields(true);
|
||||||
IdsQueryBuilder queryBuilder = createTestQueryBuilder();
|
IdsQueryBuilder queryBuilder = createTestQueryBuilder();
|
||||||
|
@ -34,7 +34,6 @@ package org.opensearch.index.query;
|
|||||||
|
|
||||||
import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
|
import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
|
||||||
import com.carrotsearch.randomizedtesting.generators.RandomStrings;
|
import com.carrotsearch.randomizedtesting.generators.RandomStrings;
|
||||||
import org.opensearch.common.Strings;
|
|
||||||
|
|
||||||
import java.util.Random;
|
import java.util.Random;
|
||||||
|
|
||||||
@ -63,7 +62,7 @@ public class RandomQueryBuilder {
|
|||||||
case 2:
|
case 2:
|
||||||
// We make sure this query has no types to avoid deprecation warnings in the
|
// We make sure this query has no types to avoid deprecation warnings in the
|
||||||
// tests that use this method.
|
// tests that use this method.
|
||||||
return new IdsQueryBuilderTests().createTestQueryBuilder().types(Strings.EMPTY_ARRAY);
|
return new IdsQueryBuilderTests().createTestQueryBuilder();
|
||||||
case 3:
|
case 3:
|
||||||
return createMultiTermQuery(r);
|
return createMultiTermQuery(r);
|
||||||
default:
|
default:
|
||||||
|
@ -95,30 +95,6 @@ public class DeleteByQueryRequestTests extends AbstractBulkByScrollRequestTestCa
|
|||||||
// No extra assertions needed
|
// No extra assertions needed
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testTypesGetter() {
|
|
||||||
int numTypes = between(1, 50);
|
|
||||||
String[] types = new String[numTypes];
|
|
||||||
for (int i = 0; i < numTypes; i++) {
|
|
||||||
types[i] = randomSimpleString(random(), 1, 30);
|
|
||||||
}
|
|
||||||
SearchRequest searchRequest = new SearchRequest();
|
|
||||||
searchRequest.types(types);
|
|
||||||
DeleteByQueryRequest request = new DeleteByQueryRequest(searchRequest);
|
|
||||||
assertArrayEquals(request.types(), types);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testTypesSetter() {
|
|
||||||
int numTypes = between(1, 50);
|
|
||||||
String[] types = new String[numTypes];
|
|
||||||
for (int i = 0; i < numTypes; i++) {
|
|
||||||
types[i] = randomSimpleString(random(), 1, 30);
|
|
||||||
}
|
|
||||||
SearchRequest searchRequest = new SearchRequest();
|
|
||||||
DeleteByQueryRequest request = new DeleteByQueryRequest(searchRequest);
|
|
||||||
request.types(types);
|
|
||||||
assertArrayEquals(request.types(), types);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testValidateGivenNoQuery() {
|
public void testValidateGivenNoQuery() {
|
||||||
SearchRequest searchRequest = new SearchRequest();
|
SearchRequest searchRequest = new SearchRequest();
|
||||||
DeleteByQueryRequest deleteByQueryRequest = new DeleteByQueryRequest(searchRequest);
|
DeleteByQueryRequest deleteByQueryRequest = new DeleteByQueryRequest(searchRequest);
|
||||||
|
@ -1,103 +0,0 @@
|
|||||||
/*
|
|
||||||
* SPDX-License-Identifier: Apache-2.0
|
|
||||||
*
|
|
||||||
* The OpenSearch Contributors require contributions made to
|
|
||||||
* this file be licensed under the Apache-2.0 license or a
|
|
||||||
* compatible open source license.
|
|
||||||
*/
|
|
||||||
|
|
||||||
/*
|
|
||||||
* Licensed to Elasticsearch under one or more contributor
|
|
||||||
* license agreements. See the NOTICE file distributed with
|
|
||||||
* this work for additional information regarding copyright
|
|
||||||
* ownership. Elasticsearch licenses this file to you under
|
|
||||||
* the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
* not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing,
|
|
||||||
* software distributed under the License is distributed on an
|
|
||||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
||||||
* KIND, either express or implied. See the License for the
|
|
||||||
* specific language governing permissions and limitations
|
|
||||||
* under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
/*
|
|
||||||
* Modifications Copyright OpenSearch Contributors. See
|
|
||||||
* GitHub history for details.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.opensearch.rest.action.admin.indices;
|
|
||||||
|
|
||||||
import org.opensearch.common.settings.Settings;
|
|
||||||
import org.opensearch.common.util.concurrent.ThreadContext;
|
|
||||||
import org.opensearch.rest.RestRequest;
|
|
||||||
import org.opensearch.rest.RestStatus;
|
|
||||||
import org.opensearch.test.rest.FakeRestChannel;
|
|
||||||
import org.opensearch.test.rest.FakeRestRequest;
|
|
||||||
import org.opensearch.test.rest.RestActionTestCase;
|
|
||||||
import org.junit.Before;
|
|
||||||
|
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.Map;
|
|
||||||
|
|
||||||
import static org.opensearch.rest.BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER;
|
|
||||||
|
|
||||||
public class RestGetFieldMappingActionTests extends RestActionTestCase {
|
|
||||||
|
|
||||||
@Before
|
|
||||||
public void setUpAction() {
|
|
||||||
controller().registerHandler(new RestGetFieldMappingAction());
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testIncludeTypeName() {
|
|
||||||
Map<String, String> params = new HashMap<>();
|
|
||||||
String path;
|
|
||||||
if (randomBoolean()) {
|
|
||||||
params.put(INCLUDE_TYPE_NAME_PARAMETER, "true");
|
|
||||||
path = "some_index/some_type/_mapping/field/some_field";
|
|
||||||
} else {
|
|
||||||
params.put(INCLUDE_TYPE_NAME_PARAMETER, "false");
|
|
||||||
path = "some_index/_mapping/field/some_field";
|
|
||||||
}
|
|
||||||
|
|
||||||
// We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset.
|
|
||||||
verifyingClient.setExecuteVerifier((arg1, arg2) -> null);
|
|
||||||
|
|
||||||
RestRequest deprecatedRequest = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.GET)
|
|
||||||
.withPath(path)
|
|
||||||
.withParams(params)
|
|
||||||
.build();
|
|
||||||
dispatchRequest(deprecatedRequest);
|
|
||||||
assertWarnings(RestGetFieldMappingAction.TYPES_DEPRECATION_MESSAGE);
|
|
||||||
|
|
||||||
RestRequest validRequest = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.GET)
|
|
||||||
.withPath("some_index/_mapping/field/some_field")
|
|
||||||
.build();
|
|
||||||
dispatchRequest(validRequest);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testTypeInPath() {
|
|
||||||
// Test that specifying a type while setting include_type_name to false
|
|
||||||
// results in an illegal argument exception.
|
|
||||||
Map<String, String> params = new HashMap<>();
|
|
||||||
params.put(INCLUDE_TYPE_NAME_PARAMETER, "false");
|
|
||||||
RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.GET)
|
|
||||||
.withPath("some_index/some_type/_mapping/field/some_field")
|
|
||||||
.withParams(params)
|
|
||||||
.build();
|
|
||||||
|
|
||||||
// We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset.
|
|
||||||
verifyingClient.setExecuteVerifier((arg1, arg2) -> null);
|
|
||||||
|
|
||||||
FakeRestChannel channel = new FakeRestChannel(request, false, 1);
|
|
||||||
ThreadContext threadContext = new ThreadContext(Settings.EMPTY);
|
|
||||||
controller().dispatchRequest(request, channel, threadContext);
|
|
||||||
|
|
||||||
assertEquals(1, channel.errors().get());
|
|
||||||
assertEquals(RestStatus.BAD_REQUEST, channel.capturedResponse().status());
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,124 +0,0 @@
|
|||||||
/*
|
|
||||||
* SPDX-License-Identifier: Apache-2.0
|
|
||||||
*
|
|
||||||
* The OpenSearch Contributors require contributions made to
|
|
||||||
* this file be licensed under the Apache-2.0 license or a
|
|
||||||
* compatible open source license.
|
|
||||||
*/
|
|
||||||
|
|
||||||
/*
|
|
||||||
* Licensed to Elasticsearch under one or more contributor
|
|
||||||
* license agreements. See the NOTICE file distributed with
|
|
||||||
* this work for additional information regarding copyright
|
|
||||||
* ownership. Elasticsearch licenses this file to you under
|
|
||||||
* the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
* not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing,
|
|
||||||
* software distributed under the License is distributed on an
|
|
||||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
||||||
* KIND, either express or implied. See the License for the
|
|
||||||
* specific language governing permissions and limitations
|
|
||||||
* under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
/*
|
|
||||||
* Modifications Copyright OpenSearch Contributors. See
|
|
||||||
* GitHub history for details.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.opensearch.rest.action.admin.indices;
|
|
||||||
|
|
||||||
import org.opensearch.client.node.NodeClient;
|
|
||||||
import org.opensearch.common.settings.Settings;
|
|
||||||
import org.opensearch.common.util.concurrent.ThreadContext;
|
|
||||||
import org.opensearch.rest.RestRequest;
|
|
||||||
import org.opensearch.rest.RestStatus;
|
|
||||||
import org.opensearch.test.rest.FakeRestChannel;
|
|
||||||
import org.opensearch.test.rest.FakeRestRequest;
|
|
||||||
import org.opensearch.test.rest.RestActionTestCase;
|
|
||||||
import org.opensearch.threadpool.TestThreadPool;
|
|
||||||
import org.opensearch.threadpool.ThreadPool;
|
|
||||||
import org.junit.After;
|
|
||||||
import org.junit.Before;
|
|
||||||
|
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.Map;
|
|
||||||
|
|
||||||
import static org.opensearch.rest.BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER;
|
|
||||||
import static org.mockito.Mockito.mock;
|
|
||||||
|
|
||||||
public class RestGetMappingActionTests extends RestActionTestCase {
|
|
||||||
|
|
||||||
private ThreadPool threadPool;
|
|
||||||
|
|
||||||
@Before
|
|
||||||
public void setUpAction() {
|
|
||||||
threadPool = new TestThreadPool(RestValidateQueryActionTests.class.getName());
|
|
||||||
controller().registerHandler(new RestGetMappingAction(threadPool));
|
|
||||||
}
|
|
||||||
|
|
||||||
@After
|
|
||||||
public void tearDownAction() {
|
|
||||||
assertTrue(terminate(threadPool));
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testTypeExistsDeprecation() throws Exception {
|
|
||||||
Map<String, String> params = new HashMap<>();
|
|
||||||
params.put("type", "_doc");
|
|
||||||
RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.HEAD)
|
|
||||||
.withParams(params)
|
|
||||||
.build();
|
|
||||||
|
|
||||||
RestGetMappingAction handler = new RestGetMappingAction(threadPool);
|
|
||||||
handler.prepareRequest(request, mock(NodeClient.class));
|
|
||||||
|
|
||||||
assertWarnings("Type exists requests are deprecated, as types have been deprecated.");
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testTypeInPath() {
|
|
||||||
// Test that specifying a type while setting include_type_name to false
|
|
||||||
// results in an illegal argument exception.
|
|
||||||
Map<String, String> params = new HashMap<>();
|
|
||||||
params.put(INCLUDE_TYPE_NAME_PARAMETER, "false");
|
|
||||||
RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.GET)
|
|
||||||
.withPath("some_index/some_type/_mapping/some_field")
|
|
||||||
.withParams(params)
|
|
||||||
.build();
|
|
||||||
|
|
||||||
// We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset.
|
|
||||||
verifyingClient.setExecuteVerifier((arg1, arg2) -> null);
|
|
||||||
|
|
||||||
FakeRestChannel channel = new FakeRestChannel(request, false, 1);
|
|
||||||
ThreadContext threadContext = new ThreadContext(Settings.EMPTY);
|
|
||||||
controller().dispatchRequest(request, channel, threadContext);
|
|
||||||
|
|
||||||
assertEquals(1, channel.errors().get());
|
|
||||||
assertEquals(RestStatus.BAD_REQUEST, channel.capturedResponse().status());
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Setting "include_type_name" to true or false should cause a deprecation warning starting in 7.0
|
|
||||||
*/
|
|
||||||
public void testTypeUrlParameterDeprecation() throws Exception {
|
|
||||||
Map<String, String> params = new HashMap<>();
|
|
||||||
params.put(INCLUDE_TYPE_NAME_PARAMETER, Boolean.toString(randomBoolean()));
|
|
||||||
RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.GET)
|
|
||||||
.withParams(params)
|
|
||||||
.withPath("/some_index/_mappings")
|
|
||||||
.build();
|
|
||||||
|
|
||||||
// We're not actually testing anything to do with the client, but need to set this so it doesn't fail the test for being unset.
|
|
||||||
verifyingClient.setExecuteVerifier((arg1, arg2) -> null);
|
|
||||||
|
|
||||||
FakeRestChannel channel = new FakeRestChannel(request, false, 1);
|
|
||||||
ThreadContext threadContext = new ThreadContext(Settings.EMPTY);
|
|
||||||
controller().dispatchRequest(request, channel, threadContext);
|
|
||||||
|
|
||||||
assertWarnings(RestGetMappingAction.TYPES_DEPRECATION_MESSAGE);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
@ -1,92 +0,0 @@
|
|||||||
/*
|
|
||||||
* SPDX-License-Identifier: Apache-2.0
|
|
||||||
*
|
|
||||||
* The OpenSearch Contributors require contributions made to
|
|
||||||
* this file be licensed under the Apache-2.0 license or a
|
|
||||||
* compatible open source license.
|
|
||||||
*/
|
|
||||||
|
|
||||||
/*
|
|
||||||
* Licensed to Elasticsearch under one or more contributor
|
|
||||||
* license agreements. See the NOTICE file distributed with
|
|
||||||
* this work for additional information regarding copyright
|
|
||||||
* ownership. Elasticsearch licenses this file to you under
|
|
||||||
* the Apache License, Version 2.0 (the "License"); you may
|
|
||||||
* not use this file except in compliance with the License.
|
|
||||||
* You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing,
|
|
||||||
* software distributed under the License is distributed on an
|
|
||||||
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
||||||
* KIND, either express or implied. See the License for the
|
|
||||||
* specific language governing permissions and limitations
|
|
||||||
* under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
/*
|
|
||||||
* Modifications Copyright OpenSearch Contributors. See
|
|
||||||
* GitHub history for details.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package org.opensearch.rest.action.admin.indices;
|
|
||||||
|
|
||||||
import org.opensearch.client.node.NodeClient;
|
|
||||||
import org.opensearch.common.bytes.BytesReference;
|
|
||||||
import org.opensearch.common.xcontent.XContentBuilder;
|
|
||||||
import org.opensearch.common.xcontent.XContentFactory;
|
|
||||||
import org.opensearch.common.xcontent.XContentType;
|
|
||||||
import org.opensearch.rest.RestRequest;
|
|
||||||
import org.opensearch.test.rest.FakeRestRequest;
|
|
||||||
import org.opensearch.test.rest.RestActionTestCase;
|
|
||||||
import org.junit.Before;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.Map;
|
|
||||||
|
|
||||||
import static org.opensearch.rest.BaseRestHandler.INCLUDE_TYPE_NAME_PARAMETER;
|
|
||||||
import static org.mockito.Mockito.mock;
|
|
||||||
|
|
||||||
public class RestPutIndexTemplateActionTests extends RestActionTestCase {
|
|
||||||
private RestPutIndexTemplateAction action;
|
|
||||||
|
|
||||||
@Before
|
|
||||||
public void setUpAction() {
|
|
||||||
action = new RestPutIndexTemplateAction();
|
|
||||||
controller().registerHandler(action);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void testIncludeTypeName() throws IOException {
|
|
||||||
XContentBuilder typedContent = XContentFactory.jsonBuilder()
|
|
||||||
.startObject()
|
|
||||||
.startObject("mappings")
|
|
||||||
.startObject("my_doc")
|
|
||||||
.startObject("properties")
|
|
||||||
.startObject("field1")
|
|
||||||
.field("type", "keyword")
|
|
||||||
.endObject()
|
|
||||||
.startObject("field2")
|
|
||||||
.field("type", "text")
|
|
||||||
.endObject()
|
|
||||||
.endObject()
|
|
||||||
.endObject()
|
|
||||||
.endObject()
|
|
||||||
.startObject("aliases")
|
|
||||||
.startObject("read_alias")
|
|
||||||
.endObject()
|
|
||||||
.endObject()
|
|
||||||
.endObject();
|
|
||||||
|
|
||||||
Map<String, String> params = new HashMap<>();
|
|
||||||
params.put(INCLUDE_TYPE_NAME_PARAMETER, "true");
|
|
||||||
RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withMethod(RestRequest.Method.PUT)
|
|
||||||
.withParams(params)
|
|
||||||
.withPath("/_template/_some_template")
|
|
||||||
.withContent(BytesReference.bytes(typedContent), XContentType.JSON)
|
|
||||||
.build();
|
|
||||||
action.prepareRequest(request, mock(NodeClient.class));
|
|
||||||
assertWarnings(RestPutIndexTemplateAction.TYPES_DEPRECATION_MESSAGE);
|
|
||||||
}
|
|
||||||
}
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user