Remove legacy mapping code. (#29224)

Some features have been deprecated since `6.0` like the `_parent` field or the
ability to have multiple types per index. This allows to remove quite some
code, which in-turn will hopefully make it easier to proceed with the removal
of types.
This commit is contained in:
Adrien Grand 2018-04-11 09:41:37 +02:00 committed by GitHub
parent 6949c888bf
commit 4918924fae
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
226 changed files with 843 additions and 7494 deletions

View File

@ -136,7 +136,6 @@ public final class Request {
Params parameters = Params.builder();
parameters.withRouting(deleteRequest.routing());
parameters.withParent(deleteRequest.parent());
parameters.withTimeout(deleteRequest.timeout());
parameters.withVersion(deleteRequest.version());
parameters.withVersionType(deleteRequest.versionType());
@ -315,9 +314,6 @@ public final class Request {
if (Strings.hasLength(request.routing())) {
metadata.field("routing", request.routing());
}
if (Strings.hasLength(request.parent())) {
metadata.field("parent", request.parent());
}
if (request.version() != Versions.MATCH_ANY) {
metadata.field("version", request.version());
}
@ -394,7 +390,6 @@ public final class Request {
Params parameters = Params.builder();
parameters.withPreference(getRequest.preference());
parameters.withRouting(getRequest.routing());
parameters.withParent(getRequest.parent());
parameters.withRefresh(getRequest.refresh());
parameters.withRealtime(getRequest.realtime());
parameters.withStoredFields(getRequest.storedFields());
@ -422,7 +417,6 @@ public final class Request {
Params parameters = Params.builder();
parameters.withRouting(indexRequest.routing());
parameters.withParent(indexRequest.parent());
parameters.withTimeout(indexRequest.timeout());
parameters.withVersion(indexRequest.version());
parameters.withVersionType(indexRequest.versionType());
@ -446,7 +440,6 @@ public final class Request {
Params parameters = Params.builder();
parameters.withRouting(updateRequest.routing());
parameters.withParent(updateRequest.parent());
parameters.withTimeout(updateRequest.timeout());
parameters.withRefreshPolicy(updateRequest.getRefreshPolicy());
parameters.withWaitForActiveShards(updateRequest.waitForActiveShards());
@ -711,10 +704,6 @@ public final class Request {
return putParam("master_timeout", masterTimeout);
}
Params withParent(String parent) {
return putParam("parent", parent);
}
Params withPipeline(String pipeline) {
return putParam("pipeline", pipeline);
}

View File

@ -362,19 +362,6 @@ public class CrudIT extends ESRestHighLevelClientTestCase {
"version conflict, current version [2] is different than the one provided [5]]", exception.getMessage());
assertEquals("index", exception.getMetadata("es.index").get(0));
}
{
ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class, () -> {
IndexRequest indexRequest = new IndexRequest("index", "type", "missing_parent");
indexRequest.source(XContentBuilder.builder(xContentType.xContent()).startObject().field("field", "test").endObject());
indexRequest.parent("missing");
execute(indexRequest, highLevelClient()::index, highLevelClient()::indexAsync);
});
assertEquals(RestStatus.BAD_REQUEST, exception.status());
assertEquals("Elasticsearch exception [type=illegal_argument_exception, " +
"reason=can't specify parent if no parent field has been configured]", exception.getMessage());
}
{
ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class, () -> {
IndexRequest indexRequest = new IndexRequest("index", "type", "missing_pipeline");
@ -456,22 +443,6 @@ public class CrudIT extends ESRestHighLevelClientTestCase {
assertEquals("Elasticsearch exception [type=version_conflict_engine_exception, reason=[type][id]: version conflict, " +
"current version [2] is different than the one provided [1]]", exception.getMessage());
}
{
ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class, () -> {
UpdateRequest updateRequest = new UpdateRequest("index", "type", "id");
updateRequest.doc(singletonMap("field", "updated"), randomFrom(XContentType.values()));
if (randomBoolean()) {
updateRequest.parent("missing");
} else {
updateRequest.routing("missing");
}
execute(updateRequest, highLevelClient()::update, highLevelClient()::updateAsync);
});
assertEquals(RestStatus.NOT_FOUND, exception.status());
assertEquals("Elasticsearch exception [type=document_missing_exception, reason=[type][id]: document missing]",
exception.getMessage());
}
{
IndexRequest indexRequest = new IndexRequest("index", "type", "with_script");
indexRequest.source(singletonMap("counter", 12));

View File

@ -209,9 +209,6 @@ public class RequestTests extends ESTestCase {
if (randomBoolean()) {
item.routing(randomAlphaOfLength(4));
}
if (randomBoolean()) {
item.parent(randomAlphaOfLength(4));
}
if (randomBoolean()) {
item.storedFields(generateRandomStringArray(16, 8, false));
}
@ -253,11 +250,6 @@ public class RequestTests extends ESTestCase {
deleteRequest.routing(routing);
expectedParams.put("routing", routing);
}
if (randomBoolean()) {
String parent = randomAlphaOfLengthBetween(3, 10);
deleteRequest.parent(parent);
expectedParams.put("parent", parent);
}
}
Request request = Request.delete(deleteRequest);
@ -525,11 +517,6 @@ public class RequestTests extends ESTestCase {
indexRequest.routing(routing);
expectedParams.put("routing", routing);
}
if (randomBoolean()) {
String parent = randomAlphaOfLengthBetween(3, 10);
indexRequest.parent(parent);
expectedParams.put("parent", parent);
}
if (randomBoolean()) {
String pipeline = randomAlphaOfLengthBetween(3, 10);
indexRequest.setPipeline(pipeline);
@ -732,11 +719,6 @@ public class RequestTests extends ESTestCase {
updateRequest.routing(routing);
expectedParams.put("routing", routing);
}
if (randomBoolean()) {
String parent = randomAlphaOfLengthBetween(3, 10);
updateRequest.parent(parent);
expectedParams.put("parent", parent);
}
if (randomBoolean()) {
String timeout = randomTimeValue();
updateRequest.timeout(timeout);
@ -840,15 +822,9 @@ public class RequestTests extends ESTestCase {
if (randomBoolean()) {
indexRequest.setPipeline(randomAlphaOfLength(5));
}
if (randomBoolean()) {
indexRequest.parent(randomAlphaOfLength(5));
}
} else if (opType == DocWriteRequest.OpType.CREATE) {
IndexRequest createRequest = new IndexRequest(index, type, id).source(source, xContentType).create(true);
docWriteRequest = createRequest;
if (randomBoolean()) {
createRequest.parent(randomAlphaOfLength(5));
}
} else if (opType == DocWriteRequest.OpType.UPDATE) {
final UpdateRequest updateRequest = new UpdateRequest(index, type, id).doc(new IndexRequest().source(source, xContentType));
docWriteRequest = updateRequest;
@ -858,9 +834,6 @@ public class RequestTests extends ESTestCase {
if (randomBoolean()) {
randomizeFetchSourceContextParams(updateRequest::fetchSource, new HashMap<>());
}
if (randomBoolean()) {
updateRequest.parent(randomAlphaOfLength(5));
}
} else if (opType == DocWriteRequest.OpType.DELETE) {
docWriteRequest = new DeleteRequest(index, type, id);
} else {
@ -902,7 +875,6 @@ public class RequestTests extends ESTestCase {
assertEquals(originalRequest.type(), parsedRequest.type());
assertEquals(originalRequest.id(), parsedRequest.id());
assertEquals(originalRequest.routing(), parsedRequest.routing());
assertEquals(originalRequest.parent(), parsedRequest.parent());
assertEquals(originalRequest.version(), parsedRequest.version());
assertEquals(originalRequest.versionType(), parsedRequest.versionType());

View File

@ -186,9 +186,6 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
// tag::index-request-routing
request.routing("routing"); // <1>
// end::index-request-routing
// tag::index-request-parent
request.parent("parent"); // <1>
// end::index-request-parent
// tag::index-request-timeout
request.timeout(TimeValue.timeValueSeconds(1)); // <1>
request.timeout("1s"); // <2>
@ -475,9 +472,6 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
// tag::update-request-routing
request.routing("routing"); // <1>
// end::update-request-routing
// tag::update-request-parent
request.parent("parent"); // <1>
// end::update-request-parent
// tag::update-request-timeout
request.timeout(TimeValue.timeValueSeconds(1)); // <1>
request.timeout("1s"); // <2>
@ -583,9 +577,6 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
// tag::delete-request-routing
request.routing("routing"); // <1>
// end::delete-request-routing
// tag::delete-request-parent
request.parent("parent"); // <1>
// end::delete-request-parent
// tag::delete-request-timeout
request.timeout(TimeValue.timeValueMinutes(2)); // <1>
request.timeout("2m"); // <2>
@ -869,9 +860,6 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
//tag::get-request-routing
request.routing("routing"); // <1>
//end::get-request-routing
//tag::get-request-parent
request.parent("parent"); // <1>
//end::get-request-parent
//tag::get-request-preference
request.preference("preference"); // <1>
//end::get-request-preference
@ -1122,8 +1110,6 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
// tag::multi-get-request-item-extras
request.add(new MultiGetRequest.Item("index", "type", "with_routing")
.routing("some_routing")); // <1>
request.add(new MultiGetRequest.Item("index", "type", "with_parent")
.parent("some_parent")); // <2>
request.add(new MultiGetRequest.Item("index", "type", "with_version")
.versionType(VersionType.EXTERNAL) // <3>
.version(10123L)); // <4>

View File

@ -131,7 +131,7 @@ public class SearchDocumentationIT extends ESRestHighLevelClientTestCase {
// tag::search-source-sorting
sourceBuilder.sort(new ScoreSortBuilder().order(SortOrder.DESC)); // <1>
sourceBuilder.sort(new FieldSortBuilder("_uid").order(SortOrder.ASC)); // <2>
sourceBuilder.sort(new FieldSortBuilder("_id").order(SortOrder.ASC)); // <2>
// end::search-source-sorting
// tag::search-source-filtering-off

View File

@ -23,12 +23,6 @@ include-tagged::{doc-tests}/CRUDDocumentationIT.java[delete-request-routing]
--------------------------------------------------
<1> Routing value
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/CRUDDocumentationIT.java[delete-request-parent]
--------------------------------------------------
<1> Parent value
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/CRUDDocumentationIT.java[delete-request-timeout]

View File

@ -51,12 +51,6 @@ include-tagged::{doc-tests}/CRUDDocumentationIT.java[get-request-routing]
--------------------------------------------------
<1> Routing value
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/CRUDDocumentationIT.java[get-request-parent]
--------------------------------------------------
<1> Parent value
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/CRUDDocumentationIT.java[get-request-preference]

View File

@ -49,12 +49,6 @@ include-tagged::{doc-tests}/CRUDDocumentationIT.java[index-request-routing]
--------------------------------------------------
<1> Routing value
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/CRUDDocumentationIT.java[index-request-parent]
--------------------------------------------------
<1> Parent value
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/CRUDDocumentationIT.java[index-request-timeout]

View File

@ -93,12 +93,6 @@ include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-request-routing]
--------------------------------------------------
<1> Routing value
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-request-parent]
--------------------------------------------------
<1> Parent value
["source","java",subs="attributes,callouts,macros"]
--------------------------------------------------
include-tagged::{doc-tests}/CRUDDocumentationIT.java[update-request-timeout]

View File

@ -492,7 +492,7 @@ Which results in a sensible `total` like this one:
==== Automatic slicing
You can also let delete-by-query automatically parallelize using
<<sliced-scroll>> to slice on `_uid`. Use `slices` to specify the number of
<<sliced-scroll>> to slice on `_id`. Use `slices` to specify the number of
slices to use:
[source,js]

View File

@ -531,7 +531,7 @@ Which results in a sensible `total` like this one:
==== Automatic slicing
You can also let update-by-query automatically parallelize using
<<sliced-scroll>> to slice on `_uid`. Use `slices` to specify the number of
<<sliced-scroll>> to slice on `_id`. Use `slices` to specify the number of
slices to use:
[source,js]

View File

@ -35,9 +35,9 @@ consistent across queries.
This work around has another benefit: when two documents have the same score,
they will be sorted by their internal Lucene doc id (which is unrelated to the
`_id` or `_uid`) by default. However these doc ids could be different across
copies of the same shard. So by always hitting the same shard, we would get
more consistent ordering of documents that have the same scores.
`_id`) by default. However these doc ids could be different across copies of
the same shard. So by always hitting the same shard, we would get more
consistent ordering of documents that have the same scores.
[float]
==== Relevancy looks wrong

View File

@ -13,10 +13,6 @@ can be customised when a mapping type is created.
The index to which the document belongs.
<<mapping-uid-field,`_uid`>>::
A composite field consisting of the `_type` and the `_id`.
<<mapping-type-field,`_type`>>::
The document's <<mapping-type,mapping type>>.

View File

@ -5,10 +5,6 @@ Each document has an `_id` that uniquely identifies it, which is indexed
so that documents can be looked up either with the <<docs-get,GET API>> or the
<<query-dsl-ids-query,`ids` query>>.
NOTE: This was not the case with pre-6.0 indices due to the fact that they
supported multiple types, so the `_type` and `_id` were merged into a composite
primary key called `_uid`.
The value of the `_id` field is accessible in certain queries (`term`,
`terms`, `match`, `query_string`, `simple_query_string`).

View File

@ -1,69 +0,0 @@
[[mapping-uid-field]]
=== `_uid` field
deprecated[6.0.0, Now that types have been removed, documents are uniquely identified by their `_id` and the `_uid` field has only been kept as a view over the `_id` field for backward compatibility.]
Each document indexed is associated with a <<mapping-type-field,`_type`>> (see
<<mapping-type>>) and an <<mapping-id-field,`_id`>>. These values are
combined as `{type}#{id}` and indexed as the `_uid` field.
The value of the `_uid` field is accessible in queries, aggregations, scripts,
and when sorting:
[source,js]
--------------------------
# Example documents
PUT my_index/_doc/1
{
"text": "Document with ID 1"
}
PUT my_index/_doc/2?refresh=true
{
"text": "Document with ID 2"
}
--------------------------
// CONSOLE
[source,js]
--------------------------
GET my_index/_search
{
"query": {
"terms": {
"_uid": [ "_doc#1", "_doc#2" ] <1>
}
},
"aggs": {
"UIDs": {
"terms": {
"field": "_uid", <2>
"size": 10
}
}
},
"sort": [
{
"_uid": { <3>
"order": "desc"
}
}
],
"script_fields": {
"UID": {
"script": {
"lang": "painless",
"source": "doc['_uid']" <4>
}
}
}
}
--------------------------
// CONSOLE
// TEST[continued]
// TEST[warning:Fielddata access on the _uid field is deprecated, use _id instead]
<1> Querying on the `_uid` field (also see the <<query-dsl-ids-query,`ids` query>>)
<2> Aggregating on the `_uid` field
<3> Sorting on the `_uid` field
<4> Accessing the `_uid` field in scripts

View File

@ -5,6 +5,12 @@
The `_all` field deprecated in 6 have now been removed.
==== The `_uid` meta field is removed
This field used to index a composite key formed of the `_type` and the `_id`.
Now that indices cannot have multiple types, this has been removed in favour
of `_id`.
==== The `_default_` mapping is no longer allowed
The `_default_` mapping has been deprecated in 6.0 and is now no longer allowed

View File

@ -2,7 +2,7 @@
=== Ids Query
Filters documents that only have the provided ids. Note, this query
uses the <<mapping-uid-field,_uid>> field.
uses the <<mapping-id-field,_id>> field.
[source,js]
--------------------------------------------------

View File

@ -225,9 +225,9 @@ GET /twitter/_search?scroll=1m
The result from the first request returned documents that belong to the first slice (id: 0) and the result from the
second request returned documents that belong to the second slice. Since the maximum number of slices is set to 2
the union of the results of the two requests is equivalent to the results of a scroll query without slicing.
By default the splitting is done on the shards first and then locally on each shard using the _uid field
By default the splitting is done on the shards first and then locally on each shard using the _id field
with the following formula:
`slice(doc) = floorMod(hashCode(doc._uid), max)`
`slice(doc) = floorMod(hashCode(doc._id), max)`
For instance if the number of shards is equal to 2 and the user requested 4 slices then the slices 0 and 2 are assigned
to the first shard and the slices 1 and 3 are assigned to the second shard.

View File

@ -180,7 +180,7 @@ Response:
{
"index": "twitter",
"valid": true,
"explanation": "((user:terminator^3.71334 plot:future^2.763601 plot:human^2.8415773 plot:sarah^3.4193945 plot:kyle^3.8244398 plot:cyborg^3.9177752 plot:connor^4.040236 plot:reese^4.7133346 ... )~6) -ConstantScore(_uid:tweet#2)) #(ConstantScore(_type:tweet))^0.0"
"explanation": "((user:terminator^3.71334 plot:future^2.763601 plot:human^2.8415773 plot:sarah^3.4193945 plot:kyle^3.8244398 plot:cyborg^3.9177752 plot:connor^4.040236 plot:reese^4.7133346 ... )~6) -ConstantScore(_id:2)) #(ConstantScore(_type:tweet))^0.0"
}
]
}

View File

@ -127,7 +127,7 @@ public class AppendProcessorTests extends ESTestCase {
public void testAppendMetadataExceptVersion() throws Exception {
// here any metadata field value becomes a list, which won't make sense in most of the cases,
// but support for append is streamlined like for set so we test it
MetaData randomMetaData = randomFrom(MetaData.INDEX, MetaData.TYPE, MetaData.ID, MetaData.ROUTING, MetaData.PARENT);
MetaData randomMetaData = randomFrom(MetaData.INDEX, MetaData.TYPE, MetaData.ID, MetaData.ROUTING);
List<String> values = new ArrayList<>();
Processor appendProcessor;
if (randomBoolean()) {

View File

@ -38,7 +38,7 @@ public class DateIndexNameProcessorTests extends ESTestCase {
"events-", "y", "yyyyMMdd"
);
IngestDocument document = new IngestDocument("_index", "_type", "_id", null, null, null, null,
IngestDocument document = new IngestDocument("_index", "_type", "_id", null, null, null,
Collections.singletonMap("_field", "2016-04-25T12:24:20.101Z"));
processor.execute(document);
assertThat(document.getSourceAndMetadata().get("_index"), equalTo("<events-{20160425||/y{yyyyMMdd|UTC}}>"));
@ -48,7 +48,7 @@ public class DateIndexNameProcessorTests extends ESTestCase {
Function<String, DateTime> function = DateFormat.Tai64n.getFunction(null, DateTimeZone.UTC, null);
DateIndexNameProcessor dateProcessor = new DateIndexNameProcessor("_tag", "_field", Collections.singletonList(function),
DateTimeZone.UTC, "events-", "m", "yyyyMMdd");
IngestDocument document = new IngestDocument("_index", "_type", "_id", null, null, null, null,
IngestDocument document = new IngestDocument("_index", "_type", "_id", null, null, null,
Collections.singletonMap("_field", (randomBoolean() ? "@" : "") + "4000000050d506482dbdf024"));
dateProcessor.execute(document);
assertThat(document.getSourceAndMetadata().get("_index"), equalTo("<events-{20121222||/m{yyyyMMdd|UTC}}>"));
@ -58,12 +58,12 @@ public class DateIndexNameProcessorTests extends ESTestCase {
Function<String, DateTime> function = DateFormat.UnixMs.getFunction(null, DateTimeZone.UTC, null);
DateIndexNameProcessor dateProcessor = new DateIndexNameProcessor("_tag", "_field", Collections.singletonList(function),
DateTimeZone.UTC, "events-", "m", "yyyyMMdd");
IngestDocument document = new IngestDocument("_index", "_type", "_id", null, null, null, null,
IngestDocument document = new IngestDocument("_index", "_type", "_id", null, null, null,
Collections.singletonMap("_field", "1000500"));
dateProcessor.execute(document);
assertThat(document.getSourceAndMetadata().get("_index"), equalTo("<events-{19700101||/m{yyyyMMdd|UTC}}>"));
document = new IngestDocument("_index", "_type", "_id", null, null, null, null,
document = new IngestDocument("_index", "_type", "_id", null, null, null,
Collections.singletonMap("_field", 1000500L));
dateProcessor.execute(document);
assertThat(document.getSourceAndMetadata().get("_index"), equalTo("<events-{19700101||/m{yyyyMMdd|UTC}}>"));
@ -73,7 +73,7 @@ public class DateIndexNameProcessorTests extends ESTestCase {
Function<String, DateTime> function = DateFormat.Unix.getFunction(null, DateTimeZone.UTC, null);
DateIndexNameProcessor dateProcessor = new DateIndexNameProcessor("_tag", "_field", Collections.singletonList(function),
DateTimeZone.UTC, "events-", "m", "yyyyMMdd");
IngestDocument document = new IngestDocument("_index", "_type", "_id", null, null, null, null,
IngestDocument document = new IngestDocument("_index", "_type", "_id", null, null, null,
Collections.singletonMap("_field", "1000.5"));
dateProcessor.execute(document);
assertThat(document.getSourceAndMetadata().get("_index"), equalTo("<events-{19700101||/m{yyyyMMdd|UTC}}>"));

View File

@ -45,7 +45,7 @@ public class ForEachProcessorTests extends ESTestCase {
values.add("bar");
values.add("baz");
IngestDocument ingestDocument = new IngestDocument(
"_index", "_type", "_id", null, null, null, null, Collections.singletonMap("values", values)
"_index", "_type", "_id", null, null, null, Collections.singletonMap("values", values)
);
ForEachProcessor processor = new ForEachProcessor(
@ -61,7 +61,7 @@ public class ForEachProcessorTests extends ESTestCase {
public void testExecuteWithFailure() throws Exception {
IngestDocument ingestDocument = new IngestDocument(
"_index", "_type", "_id", null, null, null, null, Collections.singletonMap("values", Arrays.asList("a", "b", "c"))
"_index", "_type", "_id", null, null, null, Collections.singletonMap("values", Arrays.asList("a", "b", "c"))
);
TestProcessor testProcessor = new TestProcessor(id -> {
@ -101,7 +101,7 @@ public class ForEachProcessorTests extends ESTestCase {
values.add(new HashMap<>());
values.add(new HashMap<>());
IngestDocument ingestDocument = new IngestDocument(
"_index", "_type", "_id", null, null, null, null, Collections.singletonMap("values", values)
"_index", "_type", "_id", null, null, null, Collections.singletonMap("values", values)
);
TestProcessor innerProcessor = new TestProcessor(id -> {
@ -132,7 +132,7 @@ public class ForEachProcessorTests extends ESTestCase {
document.put("values", values);
document.put("flat_values", new ArrayList<>());
document.put("other", "value");
IngestDocument ingestDocument = new IngestDocument("_index", "_type", "_id", null, null, null, null, document);
IngestDocument ingestDocument = new IngestDocument("_index", "_type", "_id", null, null, null, document);
ForEachProcessor processor = new ForEachProcessor(
"_tag", "values", new SetProcessor("_tag",
@ -171,7 +171,7 @@ public class ForEachProcessorTests extends ESTestCase {
values.add("");
}
IngestDocument ingestDocument = new IngestDocument(
"_index", "_type", "_id", null, null, null, null, Collections.singletonMap("values", values)
"_index", "_type", "_id", null, null, null, Collections.singletonMap("values", values)
);
ForEachProcessor processor = new ForEachProcessor("_tag", "values", innerProcessor);
@ -190,7 +190,7 @@ public class ForEachProcessorTests extends ESTestCase {
values.add(1);
values.add(null);
IngestDocument ingestDocument = new IngestDocument(
"_index", "_type", "_id", null, null, null, null, Collections.singletonMap("values", values)
"_index", "_type", "_id", null, null, null, Collections.singletonMap("values", values)
);
TemplateScript.Factory template = new TestTemplateService.MockTemplateScript.Factory("errors");
@ -220,7 +220,7 @@ public class ForEachProcessorTests extends ESTestCase {
source.put("_value", "new_value");
source.put("values", values);
IngestDocument ingestDocument = new IngestDocument(
"_index", "_type", "_id", null, null, null, null, source
"_index", "_type", "_id", null, null, null, source
);
TestProcessor processor = new TestProcessor(doc -> doc.setFieldValue("_ingest._value",
@ -251,7 +251,7 @@ public class ForEachProcessorTests extends ESTestCase {
values.add(value);
IngestDocument ingestDocument = new IngestDocument(
"_index", "_type", "_id", null, null, null, null, Collections.singletonMap("values1", values)
"_index", "_type", "_id", null, null, null, Collections.singletonMap("values1", values)
);
TestProcessor testProcessor = new TestProcessor(

View File

@ -102,7 +102,7 @@ public class SetProcessorTests extends ESTestCase {
}
public void testSetMetadataExceptVersion() throws Exception {
MetaData randomMetaData = randomFrom(MetaData.INDEX, MetaData.TYPE, MetaData.ID, MetaData.ROUTING, MetaData.PARENT);
MetaData randomMetaData = randomFrom(MetaData.INDEX, MetaData.TYPE, MetaData.ID, MetaData.ROUTING);
Processor processor = createSetProcessor(randomMetaData.getFieldName(), "_value", true);
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
processor.execute(ingestDocument);

View File

@ -82,7 +82,7 @@ public class MoreExpressionTests extends ESIntegTestCase {
SearchRequestBuilder req = client().prepareSearch().setIndices("test");
req.setQuery(QueryBuilders.matchAllQuery())
.addSort(SortBuilders.fieldSort("_uid")
.addSort(SortBuilders.fieldSort("_id")
.order(SortOrder.ASC))
.addScriptField("foo", new Script(ScriptType.INLINE, "expression", script, paramsMap));
return req;

View File

@ -39,6 +39,7 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParser.Token;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.fielddata.AtomicNumericFieldData;
@ -157,10 +158,10 @@ public class ScaledFloatFieldMapper extends FieldMapper {
builder.nullValue(ScaledFloatFieldMapper.parse(propNode));
iterator.remove();
} else if (propName.equals("ignore_malformed")) {
builder.ignoreMalformed(TypeParsers.nodeBooleanValue(name, "ignore_malformed", propNode, parserContext));
builder.ignoreMalformed(XContentMapValues.nodeBooleanValue(propNode, name + ".ignore_malformed"));
iterator.remove();
} else if (propName.equals("coerce")) {
builder.coerce(TypeParsers.nodeBooleanValue(name, "coerce", propNode, parserContext));
builder.coerce(XContentMapValues.nodeBooleanValue(propNode, name + ".coerce"));
iterator.remove();
} else if (propName.equals("scaling_factor")) {
builder.scalingFactor(ScaledFloatFieldMapper.parse(propNode));

View File

@ -28,7 +28,6 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.plain.SortedSetDVOrdinalsIndexFieldData;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.ParentFieldMapper;
import org.elasticsearch.join.mapper.ParentIdFieldMapper;
import org.elasticsearch.join.mapper.ParentJoinFieldMapper;
import org.elasticsearch.search.aggregations.AggregationBuilder;
@ -109,11 +108,7 @@ public class ChildrenAggregationBuilder
@Override
protected ValuesSourceConfig<WithOrdinals> resolveConfig(SearchContext context) {
ValuesSourceConfig<WithOrdinals> config = new ValuesSourceConfig<>(ValuesSourceType.BYTES);
if (context.mapperService().getIndexSettings().isSingleType()) {
joinFieldResolveConfig(context, config);
} else {
parentFieldResolveConfig(context, config);
}
return config;
}
@ -131,30 +126,6 @@ public class ChildrenAggregationBuilder
}
}
private void parentFieldResolveConfig(SearchContext context, ValuesSourceConfig<WithOrdinals> config) {
DocumentMapper childDocMapper = context.mapperService().documentMapper(childType);
if (childDocMapper != null) {
ParentFieldMapper parentFieldMapper = childDocMapper.parentFieldMapper();
if (!parentFieldMapper.active()) {
throw new IllegalArgumentException("[children] no [_parent] field not configured that points to a parent type");
}
String parentType = parentFieldMapper.type();
DocumentMapper parentDocMapper = context.mapperService().documentMapper(parentType);
if (parentDocMapper != null) {
parentFilter = parentDocMapper.typeFilter(context.getQueryShardContext());
childFilter = childDocMapper.typeFilter(context.getQueryShardContext());
MappedFieldType parentFieldType = parentDocMapper.parentFieldMapper().getParentJoinFieldType();
final SortedSetDVOrdinalsIndexFieldData fieldData = context.getForField(parentFieldType);
config.fieldContext(new FieldContext(parentFieldType.name(), fieldData,
parentFieldType));
} else {
config.unmapped(true);
}
} else {
config.unmapped(true);
}
}
@Override
protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException {
builder.field(ParentToChildrenAggregator.TYPE_FIELD.getPreferredName(), childType);

View File

@ -96,10 +96,6 @@ public final class ParentJoinFieldMapper extends FieldMapper {
throw new IllegalStateException("cannot create join field [" + name + "] " +
"for the partitioned index " + "[" + settings.getIndex().getName() + "]");
}
if (settings.isSingleType() == false) {
throw new IllegalStateException("cannot create join field [" + name + "] " +
"on multi-types index [" + settings.getIndex().getName() + "]");
}
}
private static void checkObjectOrNested(ContentPath path, String name) {

View File

@ -37,9 +37,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.IndexOrdinalsFieldData;
import org.elasticsearch.index.fielddata.plain.SortedSetDVOrdinalsIndexFieldData;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.ParentFieldMapper;
import org.elasticsearch.index.query.AbstractQueryBuilder;
import org.elasticsearch.index.query.InnerHitBuilder;
import org.elasticsearch.index.query.InnerHitContextBuilder;
@ -306,14 +304,6 @@ public class HasChildQueryBuilder extends AbstractQueryBuilder<HasChildQueryBuil
@Override
protected Query doToQuery(QueryShardContext context) throws IOException {
if (context.getIndexSettings().isSingleType()) {
return joinFieldDoToQuery(context);
} else {
return parentFieldDoToQuery(context);
}
}
private Query joinFieldDoToQuery(QueryShardContext context) throws IOException {
ParentJoinFieldMapper joinFieldMapper = ParentJoinFieldMapper.getMapper(context.getMapperService());
if (joinFieldMapper == null) {
if (ignoreUnmapped) {
@ -342,44 +332,6 @@ public class HasChildQueryBuilder extends AbstractQueryBuilder<HasChildQueryBuil
}
}
private Query parentFieldDoToQuery(QueryShardContext context) throws IOException {
Query innerQuery;
final String[] previousTypes = context.getTypes();
context.setTypes(type);
try {
innerQuery = query.toQuery(context);
} finally {
context.setTypes(previousTypes);
}
DocumentMapper childDocMapper = context.getMapperService().documentMapper(type);
if (childDocMapper == null) {
if (ignoreUnmapped) {
return new MatchNoDocsQuery();
} else {
throw new QueryShardException(context, "[" + NAME + "] no mapping found for type [" + type + "]");
}
}
ParentFieldMapper parentFieldMapper = childDocMapper.parentFieldMapper();
if (parentFieldMapper.active() == false) {
throw new QueryShardException(context, "[" + NAME + "] _parent field has no parent type configured");
}
String parentType = parentFieldMapper.type();
DocumentMapper parentDocMapper = context.getMapperService().documentMapper(parentType);
if (parentDocMapper == null) {
throw new QueryShardException(context,
"[" + NAME + "] Type [" + type + "] points to a non existent parent type [" + parentType + "]");
}
// wrap the query with type query
innerQuery = Queries.filtered(innerQuery, childDocMapper.typeFilter(context));
String joinField = ParentFieldMapper.joinField(parentType);
final MappedFieldType parentFieldType = parentDocMapper.parentFieldMapper().getParentJoinFieldType();
final SortedSetDVOrdinalsIndexFieldData fieldData = context.getForField(parentFieldType);
return new LateParsingQuery(parentDocMapper.typeFilter(context), innerQuery, minChildren(), maxChildren(),
joinField, scoreMode, fieldData, context.getSearchSimilarity());
}
/**
* A query that rewrites into another query using
* {@link JoinUtil#createJoinQuery(String, Query, Query, IndexSearcher, ScoreMode, OrdinalMap, int, int)}

View File

@ -18,8 +18,6 @@
*/
package org.elasticsearch.join.query;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.join.ScoreMode;
@ -32,9 +30,7 @@ import org.elasticsearch.common.lucene.search.Queries;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.fielddata.plain.SortedSetDVOrdinalsIndexFieldData;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.ParentFieldMapper;
import org.elasticsearch.index.query.AbstractQueryBuilder;
import org.elasticsearch.index.query.InnerHitBuilder;
import org.elasticsearch.index.query.InnerHitContextBuilder;
@ -47,10 +43,8 @@ import org.elasticsearch.join.mapper.ParentJoinFieldMapper;
import java.io.IOException;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
/**
* Builder for the 'has_parent' query.
@ -173,14 +167,6 @@ public class HasParentQueryBuilder extends AbstractQueryBuilder<HasParentQueryBu
@Override
protected Query doToQuery(QueryShardContext context) throws IOException {
if (context.getIndexSettings().isSingleType()) {
return joinFieldDoToQuery(context);
} else {
return parentFieldDoToQuery(context);
}
}
private Query joinFieldDoToQuery(QueryShardContext context) throws IOException {
ParentJoinFieldMapper joinFieldMapper = ParentJoinFieldMapper.getMapper(context.getMapperService());
if (joinFieldMapper == null) {
if (ignoreUnmapped) {
@ -210,65 +196,6 @@ public class HasParentQueryBuilder extends AbstractQueryBuilder<HasParentQueryBu
}
}
private Query parentFieldDoToQuery(QueryShardContext context) throws IOException {
Query innerQuery;
String[] previousTypes = context.getTypes();
context.setTypes(type);
try {
innerQuery = query.toQuery(context);
} finally {
context.setTypes(previousTypes);
}
DocumentMapper parentDocMapper = context.documentMapper(type);
if (parentDocMapper == null) {
if (ignoreUnmapped) {
return new MatchNoDocsQuery();
} else {
throw new QueryShardException(context,
"[" + NAME + "] query configured 'parent_type' [" + type + "] is not a valid type");
}
}
Set<String> childTypes = new HashSet<>();
for (DocumentMapper documentMapper : context.getMapperService().docMappers(false)) {
ParentFieldMapper parentFieldMapper = documentMapper.parentFieldMapper();
if (parentFieldMapper.active() && type.equals(parentFieldMapper.type())) {
childTypes.add(documentMapper.type());
}
}
if (childTypes.isEmpty()) {
throw new QueryShardException(context, "[" + NAME + "] no child types found for type [" + type + "]");
}
Query childrenQuery;
if (childTypes.size() == 1) {
DocumentMapper documentMapper = context.getMapperService().documentMapper(childTypes.iterator().next());
childrenQuery = documentMapper.typeFilter(context);
} else {
BooleanQuery.Builder childrenFilter = new BooleanQuery.Builder();
for (String childrenTypeStr : childTypes) {
DocumentMapper documentMapper = context.getMapperService().documentMapper(childrenTypeStr);
childrenFilter.add(documentMapper.typeFilter(context), BooleanClause.Occur.SHOULD);
}
childrenQuery = childrenFilter.build();
}
// wrap the query with type query
innerQuery = Queries.filtered(innerQuery, parentDocMapper.typeFilter(context));
final MappedFieldType parentType = parentDocMapper.parentFieldMapper().getParentJoinFieldType();
final SortedSetDVOrdinalsIndexFieldData fieldData = context.getForField(parentType);
return new HasChildQueryBuilder.LateParsingQuery(childrenQuery,
innerQuery,
HasChildQueryBuilder.DEFAULT_MIN_CHILDREN,
HasChildQueryBuilder.DEFAULT_MAX_CHILDREN,
ParentFieldMapper.joinField(type),
score ? ScoreMode.Max : ScoreMode.None,
fieldData,
context.getSearchSimilarity());
}
@Override
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(NAME);

View File

@ -21,13 +21,9 @@ package org.elasticsearch.join.query;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.ReaderUtil;
import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.DocValuesTermsQuery;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.TopDocsCollector;
import org.apache.lucene.search.TopFieldCollector;
@ -36,12 +32,8 @@ import org.apache.lucene.search.TotalHitCountCollector;
import org.apache.lucene.search.Weight;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.common.document.DocumentField;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.IdFieldMapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ParentFieldMapper;
import org.elasticsearch.index.query.InnerHitBuilder;
import org.elasticsearch.index.query.InnerHitContextBuilder;
import org.elasticsearch.index.query.QueryBuilder;
@ -70,15 +62,7 @@ class ParentChildInnerHitContextBuilder extends InnerHitContextBuilder {
}
@Override
protected void doBuild(SearchContext parentSearchContext, InnerHitsContext innerHitsContext) throws IOException {
if (parentSearchContext.mapperService().getIndexSettings().isSingleType()) {
handleJoinFieldInnerHits(parentSearchContext, innerHitsContext);
} else {
handleParentFieldInnerHits(parentSearchContext, innerHitsContext);
}
}
private void handleJoinFieldInnerHits(SearchContext context, InnerHitsContext innerHitsContext) throws IOException {
protected void doBuild(SearchContext context, InnerHitsContext innerHitsContext) throws IOException {
QueryShardContext queryShardContext = context.getQueryShardContext();
ParentJoinFieldMapper joinFieldMapper = ParentJoinFieldMapper.getMapper(context.mapperService());
if (joinFieldMapper != null) {
@ -94,24 +78,6 @@ class ParentChildInnerHitContextBuilder extends InnerHitContextBuilder {
}
}
private void handleParentFieldInnerHits(SearchContext context, InnerHitsContext innerHitsContext) throws IOException {
QueryShardContext queryShardContext = context.getQueryShardContext();
DocumentMapper documentMapper = queryShardContext.documentMapper(typeName);
if (documentMapper == null) {
if (innerHitBuilder.isIgnoreUnmapped() == false) {
throw new IllegalStateException("[" + query.getName() + "] no mapping found for type [" + typeName + "]");
} else {
return;
}
}
String name = innerHitBuilder.getName() != null ? innerHitBuilder.getName() : documentMapper.type();
ParentChildInnerHitSubContext parentChildInnerHits = new ParentChildInnerHitSubContext(
name, context, queryShardContext.getMapperService(), documentMapper
);
setupInnerHitsContext(queryShardContext, parentChildInnerHits);
innerHitsContext.addInnerHitDefinition(parentChildInnerHits);
}
static final class JoinFieldInnerHitSubContext extends InnerHitsContext.InnerHitSubContext {
private final String typeName;
private final boolean fetchChildInnerHits;
@ -206,85 +172,4 @@ class ParentChildInnerHitContextBuilder extends InnerHitContextBuilder {
}
static final class ParentChildInnerHitSubContext extends InnerHitsContext.InnerHitSubContext {
private final MapperService mapperService;
private final DocumentMapper documentMapper;
ParentChildInnerHitSubContext(String name, SearchContext context, MapperService mapperService, DocumentMapper documentMapper) {
super(name, context);
this.mapperService = mapperService;
this.documentMapper = documentMapper;
}
@Override
public TopDocs[] topDocs(SearchHit[] hits) throws IOException {
Weight innerHitQueryWeight = createInnerHitQueryWeight();
TopDocs[] result = new TopDocs[hits.length];
for (int i = 0; i < hits.length; i++) {
SearchHit hit = hits[i];
final Query hitQuery;
if (isParentHit(hit)) {
String field = ParentFieldMapper.joinField(hit.getType());
hitQuery = new DocValuesTermsQuery(field, hit.getId());
} else if (isChildHit(hit)) {
DocumentMapper hitDocumentMapper = mapperService.documentMapper(hit.getType());
final String parentType = hitDocumentMapper.parentFieldMapper().type();
DocumentField parentField = hit.field(ParentFieldMapper.NAME);
if (parentField == null) {
throw new IllegalStateException("All children must have a _parent");
}
Term uidTerm = context.mapperService().createUidTerm(parentType, parentField.getValue());
if (uidTerm == null) {
hitQuery = new MatchNoDocsQuery("Missing type: " + parentType);
} else {
hitQuery = new TermQuery(uidTerm);
}
} else {
result[i] = Lucene.EMPTY_TOP_DOCS;
continue;
}
BooleanQuery q = new BooleanQuery.Builder()
// Only include docs that have the current hit as parent
.add(hitQuery, BooleanClause.Occur.FILTER)
// Only include docs that have this inner hits type
.add(documentMapper.typeFilter(context.getQueryShardContext()), BooleanClause.Occur.FILTER)
.build();
Weight weight = context.searcher().createNormalizedWeight(q, false);
if (size() == 0) {
TotalHitCountCollector totalHitCountCollector = new TotalHitCountCollector();
for (LeafReaderContext ctx : context.searcher().getIndexReader().leaves()) {
intersect(weight, innerHitQueryWeight, totalHitCountCollector, ctx);
}
result[i] = new TopDocs(totalHitCountCollector.getTotalHits(), Lucene.EMPTY_SCORE_DOCS, 0);
} else {
int topN = Math.min(from() + size(), context.searcher().getIndexReader().maxDoc());
TopDocsCollector<?> topDocsCollector;
if (sort() != null) {
topDocsCollector = TopFieldCollector.create(sort().sort, topN, true, trackScores(), trackScores(), true);
} else {
topDocsCollector = TopScoreDocCollector.create(topN);
}
try {
for (LeafReaderContext ctx : context.searcher().getIndexReader().leaves()) {
intersect(weight, innerHitQueryWeight, topDocsCollector, ctx);
}
} finally {
clearReleasables(Lifetime.COLLECTION);
}
result[i] = topDocsCollector.topDocs(from(), size());
}
}
return result;
}
private boolean isParentHit(SearchHit hit) {
return hit.getType().equals(documentMapper.parentFieldMapper().type());
}
private boolean isChildHit(SearchHit hit) {
DocumentMapper hitDocumentMapper = mapperService.documentMapper(hit.getType());
return documentMapper.type().equals(hitDocumentMapper.parentFieldMapper().type());
}
}
}

View File

@ -19,22 +19,16 @@
package org.elasticsearch.join.query;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.DocValuesTermsQuery;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.ParentFieldMapper;
import org.elasticsearch.index.mapper.TypeFieldMapper;
import org.elasticsearch.index.query.AbstractQueryBuilder;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.QueryShardException;
@ -159,11 +153,6 @@ public final class ParentIdQueryBuilder extends AbstractQueryBuilder<ParentIdQue
@Override
protected Query doToQuery(QueryShardContext context) throws IOException {
if (context.getIndexSettings().isSingleType() == false) {
// BWC for indices with multiple types
return doToQueryBWC(context);
}
ParentJoinFieldMapper joinFieldMapper = ParentJoinFieldMapper.getMapper(context.getMapperService());
if (joinFieldMapper == null) {
if (ignoreUnmapped) {
@ -188,32 +177,6 @@ public final class ParentIdQueryBuilder extends AbstractQueryBuilder<ParentIdQue
.build();
}
/**
* Creates parent_id query from a {@link ParentFieldMapper}
* Only used for BWC with multi-types indices
*/
private Query doToQueryBWC(QueryShardContext context) throws IOException {
DocumentMapper childDocMapper = context.getMapperService().documentMapper(type);
if (childDocMapper == null) {
if (ignoreUnmapped) {
return new MatchNoDocsQuery();
} else {
throw new QueryShardException(context, "[" + NAME + "] no mapping found for type [" + type + "]");
}
}
ParentFieldMapper parentFieldMapper = childDocMapper.parentFieldMapper();
if (parentFieldMapper.active() == false) {
throw new QueryShardException(context, "[" + NAME + "] _parent field has no parent type configured");
}
String fieldName = ParentFieldMapper.joinField(parentFieldMapper.type());
return new BooleanQuery.Builder()
.add(new DocValuesTermsQuery(fieldName, id), BooleanClause.Occur.MUST)
// Need to take child type into account, otherwise a child doc of different type with the same id could match
.add(new TermQuery(new Term(TypeFieldMapper.NAME, type)), BooleanClause.Occur.FILTER)
.build();
}
@Override
protected boolean doEquals(ParentIdQueryBuilder that) {
return Objects.equals(type, that.type)

View File

@ -19,7 +19,6 @@
package org.elasticsearch.join.aggregations;
import org.apache.lucene.search.join.ScoreMode;
import org.elasticsearch.Version;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.update.UpdateResponse;
@ -68,20 +67,12 @@ public class ChildrenIT extends ParentChildTestCase {
@Before
public void setupCluster() throws Exception {
categoryToControl.clear();
if (legacy()) {
assertAcked(
prepareCreate("test")
.addMapping("article", "category", "type=keyword")
.addMapping("comment", "_parent", "type=article", "commenter", "type=keyword")
);
} else {
assertAcked(
prepareCreate("test")
.addMapping("doc",
addFieldMappings(buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "article", "comment"),
"commenter", "keyword", "category", "keyword"))
);
}
List<IndexRequestBuilder> requests = new ArrayList<>();
String[] uniqueCategories = new String[randomIntBetween(1, 25)];
@ -189,7 +180,7 @@ public class ChildrenIT extends ParentChildTestCase {
.setQuery(matchQuery("randomized", false))
.addAggregation(
terms("category").field("category").size(10000).subAggregation(
children("to_comment", "comment").subAggregation(topHits("top_comments").sort("_uid", SortOrder.ASC))
children("to_comment", "comment").subAggregation(topHits("top_comments").sort("_id", SortOrder.ASC))
)
).get();
assertSearchResponse(searchResponse);
@ -244,20 +235,12 @@ public class ChildrenIT extends ParentChildTestCase {
public void testWithDeletes() throws Exception {
String indexName = "xyz";
if (legacy()) {
assertAcked(
prepareCreate(indexName)
.addMapping("parent")
.addMapping("child", "_parent", "type=parent", "count", "type=long")
);
} else {
assertAcked(
prepareCreate(indexName)
.addMapping("doc",
addFieldMappings(buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"),
"name", "keyword"))
);
}
List<IndexRequestBuilder> requests = new ArrayList<>();
requests.add(createIndexRequest(indexName, "parent", "1", null));
@ -286,19 +269,11 @@ public class ChildrenIT extends ParentChildTestCase {
* the updates cause that.
*/
UpdateResponse updateResponse;
if (legacy()) {
updateResponse = client().prepareUpdate(indexName, "child", idToUpdate)
.setParent("1")
.setDoc(Requests.INDEX_CONTENT_TYPE, "count", 1)
.setDetectNoop(false)
.get();
} else {
updateResponse = client().prepareUpdate(indexName, "doc", idToUpdate)
.setRouting("1")
.setDoc(Requests.INDEX_CONTENT_TYPE, "count", 1)
.setDetectNoop(false)
.get();
}
assertThat(updateResponse.getVersion(), greaterThan(1L));
refresh();
}
@ -320,16 +295,6 @@ public class ChildrenIT extends ParentChildTestCase {
String indexName = "prodcatalog";
String masterType = "masterprod";
String childType = "variantsku";
if (legacy()) {
assertAcked(
prepareCreate(indexName)
.setSettings(Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
.put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)
.put("index.version.created", Version.V_5_6_0)) // multi type
.addMapping(masterType, "brand", "type=text", "name", "type=keyword", "material", "type=text")
.addMapping(childType, "_parent", "type=masterprod", "color", "type=keyword", "size", "type=keyword")
);
} else {
assertAcked(
prepareCreate(indexName)
.setSettings(Settings.builder().put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
@ -339,7 +304,6 @@ public class ChildrenIT extends ParentChildTestCase {
masterType, childType),
"brand", "text", "name", "keyword", "material", "text", "color", "keyword", "size", "keyword"))
);
}
List<IndexRequestBuilder> requests = new ArrayList<>();
requests.add(createIndexRequest(indexName, masterType, "1", null, "brand", "Levis", "name",
@ -396,17 +360,6 @@ public class ChildrenIT extends ParentChildTestCase {
String grandParentType = "continent";
String parentType = "country";
String childType = "city";
if (legacy()) {
assertAcked(
prepareCreate(indexName)
.setSettings(Settings.builder()
.put("index.version.created", Version.V_5_6_0) // multi type
).addMapping(grandParentType, "name", "type=keyword")
.addMapping(parentType, "_parent", "type=" + grandParentType)
.addMapping(childType, "_parent", "type=" + parentType)
);
} else {
assertAcked(
prepareCreate(indexName)
.addMapping("doc",
@ -414,7 +367,6 @@ public class ChildrenIT extends ParentChildTestCase {
grandParentType, parentType, parentType, childType),
"name", "keyword"))
);
}
createIndexRequest(indexName, grandParentType, "1", null, "name", "europe").get();
createIndexRequest(indexName, parentType, "2", "1", "name", "belgium").get();
@ -451,13 +403,6 @@ public class ChildrenIT extends ParentChildTestCase {
// Before we only evaluated segments that yielded matches in 'towns' and 'parent_names' aggs, which caused
// us to miss to evaluate child docs in segments we didn't have parent matches for.
if (legacy()) {
assertAcked(
prepareCreate("index")
.addMapping("parentType", "name", "type=keyword", "town", "type=keyword")
.addMapping("childType", "_parent", "type=parentType", "name", "type=keyword", "age", "type=integer")
);
} else {
assertAcked(
prepareCreate("index")
.addMapping("doc",
@ -465,7 +410,6 @@ public class ChildrenIT extends ParentChildTestCase {
"parentType", "childType"),
"name", "keyword", "town", "keyword", "age", "integer"))
);
}
List<IndexRequestBuilder> requests = new ArrayList<>();
requests.add(createIndexRequest("index", "parentType", "1", null, "name", "Bob", "town", "Memphis"));
requests.add(createIndexRequest("index", "parentType", "2", null, "name", "Alice", "town", "Chicago"));

View File

@ -1,27 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.join.aggregations;
public class LegacyChildrenIT extends ChildrenIT {
@Override
protected boolean legacy() {
return true;
}
}

View File

@ -39,12 +39,12 @@ import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.mapper.ContentPath;
import org.elasticsearch.index.mapper.IdFieldMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.NumberFieldMapper;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.mapper.UidFieldMapper;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.join.mapper.MetaJoinFieldMapper;
import org.elasticsearch.join.mapper.ParentJoinFieldMapper;
@ -109,7 +109,7 @@ public class ParentToChildrenAggregatorTests extends AggregatorTestCase {
});
for (String parent : expectedParentChildRelations.keySet()) {
testCase(new TermInSetQuery(UidFieldMapper.NAME, new BytesRef(Uid.createUid(PARENT_TYPE, parent))), indexSearcher, child -> {
testCase(new TermInSetQuery(IdFieldMapper.NAME, Uid.encodeId(parent)), indexSearcher, child -> {
assertEquals((long) expectedParentChildRelations.get(parent).v1(), child.getDocCount());
assertEquals(expectedParentChildRelations.get(parent).v2(),
((InternalMin) child.getAggregations().get("in_child")).getValue(), Double.MIN_VALUE);
@ -139,7 +139,7 @@ public class ParentToChildrenAggregatorTests extends AggregatorTestCase {
private static List<Field> createParentDocument(String id) {
return Arrays.asList(
new StringField(UidFieldMapper.NAME, Uid.createUid(PARENT_TYPE, id), Field.Store.NO),
new StringField(IdFieldMapper.NAME, Uid.encodeId(id), Field.Store.NO),
new StringField("join_field", PARENT_TYPE, Field.Store.NO),
createJoinField(PARENT_TYPE, id)
);
@ -147,7 +147,7 @@ public class ParentToChildrenAggregatorTests extends AggregatorTestCase {
private static List<Field> createChildDocument(String childId, String parentId, int value) {
return Arrays.asList(
new StringField(UidFieldMapper.NAME, Uid.createUid(CHILD_TYPE, childId), Field.Store.NO),
new StringField(IdFieldMapper.NAME, Uid.encodeId(childId), Field.Store.NO),
new StringField("join_field", CHILD_TYPE, Field.Store.NO),
createJoinField(PARENT_TYPE, parentId),
new SortedNumericDocValuesField("number", value)

View File

@ -69,7 +69,6 @@ import static org.elasticsearch.index.query.QueryBuilders.matchQuery;
import static org.elasticsearch.index.query.QueryBuilders.prefixQuery;
import static org.elasticsearch.index.query.QueryBuilders.queryStringQuery;
import static org.elasticsearch.index.query.QueryBuilders.termQuery;
import static org.elasticsearch.index.query.QueryBuilders.termsQuery;
import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.fieldValueFactorFunction;
import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.weightFactorFunction;
import static org.elasticsearch.join.query.JoinQueryBuilders.hasChildQuery;
@ -88,23 +87,10 @@ import static org.hamcrest.Matchers.is;
public class ChildQuerySearchIT extends ParentChildTestCase {
public void testSelfReferentialIsForbidden() {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () ->
prepareCreate("test").addMapping("type", "_parent", "type=type").get());
assertThat(e.getMessage(), equalTo("The [_parent.type] option can't point to the same type"));
}
public void testMultiLevelChild() throws Exception {
if (legacy()) {
assertAcked(prepareCreate("test")
.addMapping("parent")
.addMapping("child", "_parent", "type=parent")
.addMapping("grandchild", "_parent", "type=child"));
} else {
assertAcked(prepareCreate("test")
.addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true,
"parent", "child", "child", "grandchild")));
}
ensureGreen();
createIndexRequest("test", "parent", "p1", null, "p_field", "p_value1").get();
@ -159,14 +145,8 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
// see #2744
public void test2744() throws IOException {
if (legacy()) {
assertAcked(prepareCreate("test")
.addMapping("foo")
.addMapping("test", "_parent", "type=foo"));
} else {
assertAcked(prepareCreate("test")
.addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "foo", "test")));
}
ensureGreen();
// index simple data
@ -183,14 +163,8 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
}
public void testSimpleChildQuery() throws Exception {
if (legacy()) {
assertAcked(prepareCreate("test")
.addMapping("parent")
.addMapping("child", "_parent", "type=parent"));
} else {
assertAcked(prepareCreate("test")
.addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")));
}
ensureGreen();
// index simple data
@ -204,14 +178,6 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
// TEST FETCHING _parent from child
SearchResponse searchResponse;
if (legacy()) {
searchResponse = client().prepareSearch("test")
.setQuery(idsQuery("child").addIds("c1")).storedFields("_parent").get();
assertNoFailures(searchResponse);
assertThat(searchResponse.getHits().getTotalHits(), equalTo(1L));
assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("c1"));
assertThat(searchResponse.getHits().getAt(0).field("_parent").getValue(), equalTo("p1"));
} else {
searchResponse = client().prepareSearch("test")
.setQuery(idsQuery("doc").addIds("c1")).get();
assertNoFailures(searchResponse);
@ -219,18 +185,8 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
assertThat(searchResponse.getHits().getAt(0).getId(), equalTo("c1"));
assertThat(extractValue("join_field.name", searchResponse.getHits().getAt(0).getSourceAsMap()), equalTo("child"));
assertThat(extractValue("join_field.parent", searchResponse.getHits().getAt(0).getSourceAsMap()), equalTo("p1"));
}
// TEST matching on parent
if (legacy()) {
searchResponse = client().prepareSearch("test").setQuery(termQuery("_parent#parent", "p1")).storedFields("_parent").get();
assertNoFailures(searchResponse);
assertThat(searchResponse.getHits().getTotalHits(), equalTo(2L));
assertThat(searchResponse.getHits().getAt(0).getId(), anyOf(equalTo("c1"), equalTo("c2")));
assertThat(searchResponse.getHits().getAt(0).field("_parent").getValue(), equalTo("p1"));
assertThat(searchResponse.getHits().getAt(1).getId(), anyOf(equalTo("c1"), equalTo("c2")));
assertThat(searchResponse.getHits().getAt(1).field("_parent").getValue(), equalTo("p1"));
} else {
searchResponse = client().prepareSearch("test")
.setQuery(boolQuery().filter(termQuery("join_field#parent", "p1")).filter(termQuery("join_field", "child")))
.get();
@ -242,19 +198,6 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
assertThat(searchResponse.getHits().getAt(1).getId(), anyOf(equalTo("c1"), equalTo("c2")));
assertThat(extractValue("join_field.name", searchResponse.getHits().getAt(1).getSourceAsMap()), equalTo("child"));
assertThat(extractValue("join_field.parent", searchResponse.getHits().getAt(1).getSourceAsMap()), equalTo("p1"));
}
if (legacy()) {
searchResponse = client().prepareSearch("test").setQuery(queryStringQuery("_parent#parent:p1")).storedFields("_parent").get();
assertNoFailures(searchResponse);
assertThat(searchResponse.getHits().getTotalHits(), equalTo(2L));
assertThat(searchResponse.getHits().getAt(0).getId(), anyOf(equalTo("c1"), equalTo("c2")));
assertThat(searchResponse.getHits().getAt(0).field("_parent").getValue(), equalTo("p1"));
assertThat(searchResponse.getHits().getAt(1).getId(), anyOf(equalTo("c1"), equalTo("c2")));
assertThat(searchResponse.getHits().getAt(1).field("_parent").getValue(), equalTo("p1"));
} else {
// doesn't make sense for join field, because query string & term query om this field have no special logic.
}
// HAS CHILD
searchResponse = client().prepareSearch("test").setQuery(randomHasChild("child", "c_field", "yellow"))
@ -290,14 +233,8 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
// Issue #3290
public void testCachingBugWithFqueryFilter() throws Exception {
if (legacy()) {
assertAcked(prepareCreate("test")
.addMapping("parent")
.addMapping("child", "_parent", "type=parent"));
} else {
assertAcked(prepareCreate("test")
.addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")));
}
ensureGreen();
List<IndexRequestBuilder> builders = new ArrayList<>();
// index simple data
@ -334,14 +271,8 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
}
public void testHasParentFilter() throws Exception {
if (legacy()) {
assertAcked(prepareCreate("test")
.addMapping("parent")
.addMapping("child", "_parent", "type=parent"));
} else {
assertAcked(prepareCreate("test")
.addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")));
}
ensureGreen();
Map<String, Set<String>> parentToChildren = new HashMap<>();
// Childless parent
@ -388,14 +319,8 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
}
public void testSimpleChildQueryWithFlush() throws Exception {
if (legacy()) {
assertAcked(prepareCreate("test")
.addMapping("parent")
.addMapping("child", "_parent", "type=parent"));
} else {
assertAcked(prepareCreate("test")
.addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")));
}
ensureGreen();
// index simple data with flushes, so we have many segments
@ -462,16 +387,10 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
}
public void testScopedFacet() throws Exception {
if (legacy()) {
assertAcked(prepareCreate("test")
.addMapping("parent")
.addMapping("child", "_parent", "type=parent", "c_field", "type=keyword"));
} else {
assertAcked(prepareCreate("test")
.addMapping("doc",
addFieldMappings(buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"),
"c_field", "keyword")));
}
ensureGreen();
// index simple data
@ -508,14 +427,8 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
}
public void testDeletedParent() throws Exception {
if (legacy()) {
assertAcked(prepareCreate("test")
.addMapping("parent")
.addMapping("child", "_parent", "type=parent"));
} else {
assertAcked(prepareCreate("test")
.addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")));
}
ensureGreen();
// index simple data
createIndexRequest("test", "parent", "p1", null, "p_field", "p_value1").get();
@ -548,14 +461,8 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
}
public void testDfsSearchType() throws Exception {
if (legacy()) {
assertAcked(prepareCreate("test")
.addMapping("parent")
.addMapping("child", "_parent", "type=parent"));
} else {
assertAcked(prepareCreate("test")
.addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")));
}
ensureGreen();
// index simple data
@ -581,20 +488,14 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
}
public void testHasChildAndHasParentFailWhenSomeSegmentsDontContainAnyParentOrChildDocs() throws Exception {
if (legacy()) {
assertAcked(prepareCreate("test")
.addMapping("parent")
.addMapping("child", "_parent", "type=parent"));
} else {
assertAcked(prepareCreate("test")
.addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")));
}
ensureGreen();
createIndexRequest("test", "parent", "1", null, "p_field", 1).get();
createIndexRequest("test", "child", "2", "1", "c_field", 1).get();
client().prepareIndex("test", legacy() ? "type1" : "doc", "3").setSource("p_field", 1).get();
client().prepareIndex("test", "doc", "3").setSource("p_field", 1).get();
refresh();
SearchResponse searchResponse = client().prepareSearch("test")
@ -609,14 +510,8 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
}
public void testCountApiUsage() throws Exception {
if (legacy()) {
assertAcked(prepareCreate("test")
.addMapping("parent")
.addMapping("child", "_parent", "type=parent"));
} else {
assertAcked(prepareCreate("test")
.addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")));
}
ensureGreen();
String parentId = "p1";
@ -646,14 +541,8 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
}
public void testExplainUsage() throws Exception {
if (legacy()) {
assertAcked(prepareCreate("test")
.addMapping("parent")
.addMapping("child", "_parent", "type=parent"));
} else {
assertAcked(prepareCreate("test")
.addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")));
}
ensureGreen();
String parentId = "p1";
@ -675,7 +564,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
assertHitCount(searchResponse, 1L);
assertThat(searchResponse.getHits().getAt(0).getExplanation().getDescription(), containsString("join value p1"));
ExplainResponse explainResponse = client().prepareExplain("test", legacy() ? "parent" : "doc", parentId)
ExplainResponse explainResponse = client().prepareExplain("test", "doc", parentId)
.setQuery(hasChildQuery("child", termQuery("c_field", "1"), ScoreMode.Max))
.get();
assertThat(explainResponse.isExists(), equalTo(true));
@ -716,12 +605,6 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
}
public void testScoreForParentChildQueriesWithFunctionScore() throws Exception {
if (legacy()) {
assertAcked(prepareCreate("test")
.addMapping("parent")
.addMapping("child", "_parent", "type=parent")
.addMapping("child1", "_parent", "type=parent"));
} else {
assertAcked(prepareCreate("test")
.addMapping("doc", jsonBuilder().startObject().startObject("doc").startObject("properties")
.startObject("join_field")
@ -732,7 +615,6 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
.endObject()
.endObject().endObject().endObject()
));
}
ensureGreen();
indexRandom(true, createDocBuilders().toArray(new IndexRequestBuilder[0]));
@ -816,14 +698,8 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
// Issue #2536
public void testParentChildQueriesCanHandleNoRelevantTypesInIndex() throws Exception {
if (legacy()) {
assertAcked(prepareCreate("test")
.addMapping("parent")
.addMapping("child", "_parent", "type=parent"));
} else {
assertAcked(prepareCreate("test")
.addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")));
}
ensureGreen();
SearchResponse response = client().prepareSearch("test")
@ -831,13 +707,8 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
assertNoFailures(response);
assertThat(response.getHits().getTotalHits(), equalTo(0L));
if (legacy()) {
client().prepareIndex("test", "child1").setSource(jsonBuilder().startObject().field("text", "value").endObject())
.setRefreshPolicy(RefreshPolicy.IMMEDIATE).get();
} else {
client().prepareIndex("test", "doc").setSource(jsonBuilder().startObject().field("text", "value").endObject())
.setRefreshPolicy(RefreshPolicy.IMMEDIATE).get();
}
response = client().prepareSearch("test")
.setQuery(hasChildQuery("child", matchQuery("text", "value"), ScoreMode.None)).get();
@ -861,25 +732,15 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
}
public void testHasChildAndHasParentFilter_withFilter() throws Exception {
if (legacy()) {
assertAcked(prepareCreate("test")
.addMapping("parent")
.addMapping("child", "_parent", "type=parent"));
} else {
assertAcked(prepareCreate("test")
.addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")));
}
ensureGreen();
createIndexRequest("test", "parent", "1", null, "p_field", 1).get();
createIndexRequest("test", "child", "2", "1", "c_field", 1).get();
client().admin().indices().prepareFlush("test").get();
if (legacy()) {
client().prepareIndex("test", "type1", "3").setSource("p_field", 2).get();
} else {
client().prepareIndex("test", "doc", "3").setSource("p_field", 2).get();
}
refresh();
SearchResponse searchResponse = client().prepareSearch("test")
@ -898,14 +759,8 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
}
public void testHasChildInnerHitsHighlighting() throws Exception {
if (legacy()) {
assertAcked(prepareCreate("test")
.addMapping("parent")
.addMapping("child", "_parent", "type=parent"));
} else {
assertAcked(prepareCreate("test")
.addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")));
}
ensureGreen();
createIndexRequest("test", "parent", "1", null, "p_field", 1).get();
@ -928,14 +783,8 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
}
public void testHasChildAndHasParentWrappedInAQueryFilter() throws Exception {
if (legacy()) {
assertAcked(prepareCreate("test")
.addMapping("parent")
.addMapping("child", "_parent", "type=parent"));
} else {
assertAcked(prepareCreate("test")
.addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")));
}
ensureGreen();
// query filter in case for p/c shouldn't execute per segment, but rather
@ -966,15 +815,9 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
}
public void testSimpleQueryRewrite() throws Exception {
if (legacy()) {
assertAcked(prepareCreate("test")
.addMapping("parent", "p_field", "type=keyword")
.addMapping("child", "_parent", "type=parent", "c_field", "type=keyword"));
} else {
assertAcked(prepareCreate("test")
.addMapping("doc", addFieldMappings(buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"),
"c_field", "keyword", "p_field", "keyword")));
}
ensureGreen();
// index simple data
@ -1020,14 +863,8 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
// Issue #3144
public void testReIndexingParentAndChildDocuments() throws Exception {
if (legacy()) {
assertAcked(prepareCreate("test")
.addMapping("parent")
.addMapping("child", "_parent", "type=parent"));
} else {
assertAcked(prepareCreate("test")
.addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")));
}
ensureGreen();
// index simple data
@ -1087,14 +924,8 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
// Issue #3203
public void testHasChildQueryWithMinimumScore() throws Exception {
if (legacy()) {
assertAcked(prepareCreate("test")
.addMapping("parent")
.addMapping("child", "_parent", "type=parent"));
} else {
assertAcked(prepareCreate("test")
.addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")));
}
ensureGreen();
// index simple data
@ -1117,90 +948,42 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
}
public void testParentFieldQuery() throws Exception {
if (legacy()) {
assertAcked(prepareCreate("test")
.setSettings(Settings.builder()
.put(indexSettings())
.put("index.refresh_interval", -1)
)
.addMapping("parent")
.addMapping("child", "_parent", "type=parent"));
} else {
assertAcked(prepareCreate("test")
.setSettings(Settings.builder().put("index.refresh_interval", -1))
.addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")));
}
ensureGreen();
SearchResponse response;
if (legacy()){
response = client().prepareSearch("test").setQuery(termQuery("_parent#parent:p1", "p1"))
.get();
} else {
response = client().prepareSearch("test")
SearchResponse response = client().prepareSearch("test")
.setQuery(boolQuery().filter(termQuery("join_field#parent", "p1")).filter(termQuery("join_field", "child")))
.get();
}
assertHitCount(response, 0L);
createIndexRequest("test", "child", "c1", "p1").get();
refresh();
if (legacy()){
response = client().prepareSearch("test").setQuery(termQuery("_parent#parent", "p1"))
.get();
} else {
response = client().prepareSearch("test")
.setQuery(boolQuery().filter(termQuery("join_field#parent", "p1")).filter(termQuery("join_field", "child")))
.get();
}
assertHitCount(response, 1L);
if (legacy()) {
response = client().prepareSearch("test").setQuery(queryStringQuery("_parent#parent:p1")).get();
assertHitCount(response, 1L);
}
createIndexRequest("test", "child", "c2", "p2").get();
refresh();
if (legacy()) {
response = client().prepareSearch("test").setQuery(termsQuery("_parent#parent", "p1", "p2")).get();
assertHitCount(response, 2L);
}
if (legacy()) {
response = client().prepareSearch("test")
.setQuery(boolQuery()
.should(termQuery("_parent#parent", "p1"))
.should(termQuery("_parent#parent", "p2"))
).get();
} else {
response = client().prepareSearch("test")
.setQuery(boolQuery()
.should(boolQuery().filter(termQuery("join_field#parent", "p1")).filter(termQuery("join_field", "child")))
.should(boolQuery().filter(termQuery("join_field#parent", "p2")).filter(termQuery("join_field", "child")))
).get();
}
assertHitCount(response, 2L);
}
public void testParentIdQuery() throws Exception {
if (legacy()) {
assertAcked(prepareCreate("test")
.setSettings(Settings.builder()
.put(indexSettings())
.put("index.refresh_interval", -1)
)
.addMapping("parent")
.addMapping("child", "_parent", "type=parent"));
} else {
assertAcked(prepareCreate("test")
.setSettings(Settings.builder()
.put(indexSettings())
.put("index.refresh_interval", -1)
)
.addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")));
}
ensureGreen();
createIndexRequest("test", "child", "c1", "p1").get();
@ -1221,14 +1004,8 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
}
public void testHasChildNotBeingCached() throws IOException {
if (legacy()) {
assertAcked(prepareCreate("test")
.addMapping("parent")
.addMapping("child", "_parent", "type=parent"));
} else {
assertAcked(prepareCreate("test")
.addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")));
}
ensureGreen();
// index simple data
@ -1288,13 +1065,6 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
// Issue #3818
public void testHasChildQueryOnlyReturnsSingleChildType() throws Exception {
if (legacy()) {
assertAcked(prepareCreate("grandissue")
.addMapping("grandparent", "name", "type=text")
.addMapping("parent", "_parent", "type=grandparent")
.addMapping("child_type_one", "_parent", "type=parent")
.addMapping("child_type_two", "_parent", "type=parent"));
} else {
assertAcked(prepareCreate("grandissue")
.addMapping("doc", jsonBuilder().startObject().startObject("doc").startObject("properties")
.startObject("join_field")
@ -1306,7 +1076,6 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
.endObject()
.endObject().endObject().endObject()
));
}
createIndexRequest("grandissue", "grandparent", "1", null, "name", "Grandpa").get();
createIndexRequest("grandissue", "parent", "2", "1", "name", "Dana").get();
@ -1350,16 +1119,10 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
}
public void testHasChildQueryWithNestedInnerObjects() throws Exception {
if (legacy()) {
assertAcked(prepareCreate("test")
.addMapping("parent", "objects", "type=nested")
.addMapping("child", "_parent", "type=parent"));
} else {
assertAcked(prepareCreate("test")
.addMapping("doc",
addFieldMappings(buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child"),
"objects", "nested")));
}
ensureGreen();
createIndexRequest("test", "parent", "p1", null, jsonBuilder().startObject().field("p_field", "1").startArray("objects")
@ -1398,14 +1161,8 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
}
public void testNamedFilters() throws Exception {
if (legacy()) {
assertAcked(prepareCreate("test")
.addMapping("parent")
.addMapping("child", "_parent", "type=parent"));
} else {
assertAcked(prepareCreate("test")
.addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")));
}
ensureGreen();
String parentId = "p1";
@ -1450,7 +1207,7 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
ensureGreen();
String parentId = "p1";
client().prepareIndex("test", legacy() ? "parent" : "doc", parentId).setSource("p_field", "1").get();
client().prepareIndex("test", "doc", parentId).setSource("p_field", "1").get();
refresh();
try {
@ -1500,19 +1257,9 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
}
public void testParentChildCaching() throws Exception {
if (legacy()) {
assertAcked(prepareCreate("test")
.setSettings(Settings.builder()
.put(indexSettings())
.put("index.refresh_interval", -1)
)
.addMapping("parent")
.addMapping("child", "_parent", "type=parent"));
} else {
assertAcked(prepareCreate("test")
.setSettings(Settings.builder().put("index.refresh_interval", -1))
.addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")));
}
ensureGreen();
// index simple data
@ -1553,14 +1300,8 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
}
public void testParentChildQueriesViaScrollApi() throws Exception {
if (legacy()) {
assertAcked(prepareCreate("test")
.addMapping("parent")
.addMapping("child", "_parent", "type=parent"));
} else {
assertAcked(prepareCreate("test")
.addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")));
}
ensureGreen();
for (int i = 0; i < 10; i++) {
createIndexRequest("test", "parent", "p" + i, null).get();
@ -1600,44 +1341,6 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
}
}
public void testTypeIsAppliedInHasParentInnerQuery() throws Exception {
if (legacy()) {
assertAcked(prepareCreate("test")
.addMapping("parent")
.addMapping("child", "_parent", "type=parent"));
} else {
assertAcked(prepareCreate("test")
.addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")));
}
ensureGreen();
List<IndexRequestBuilder> indexRequests = new ArrayList<>();
indexRequests.add(createIndexRequest("test", "parent", "p1", null, "field1", "a"));
indexRequests.add(createIndexRequest("test", "child", "c1", "p1"));
indexRequests.add(createIndexRequest("test", "child", "c2", "p1"));
indexRandom(true, indexRequests);
SearchResponse searchResponse = client().prepareSearch("test")
.setQuery(constantScoreQuery(hasParentQuery("parent", boolQuery().mustNot(termQuery("field1", "a")), false)))
.get();
assertHitCount(searchResponse, 0L);
searchResponse = client().prepareSearch("test")
.setQuery(hasParentQuery("parent", constantScoreQuery(boolQuery().mustNot(termQuery("field1", "a"))), false))
.get();
assertHitCount(searchResponse, 0L);
searchResponse = client().prepareSearch("test")
.setQuery(constantScoreQuery(hasParentQuery("parent", termQuery("field1", "a"), false)))
.get();
assertHitCount(searchResponse, 2L);
searchResponse = client().prepareSearch("test")
.setQuery(hasParentQuery("parent", constantScoreQuery(termQuery("field1", "a")), false))
.get();
assertHitCount(searchResponse, 2L);
}
private List<IndexRequestBuilder> createMinMaxDocBuilders() {
List<IndexRequestBuilder> indexBuilders = new ArrayList<>();
// Parent 1 and its children
@ -1685,14 +1388,8 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
}
public void testMinMaxChildren() throws Exception {
if (legacy()) {
assertAcked(prepareCreate("test")
.addMapping("parent", "id", "type=long")
.addMapping("child", "_parent", "type=parent"));
} else {
assertAcked(prepareCreate("test")
.addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")));
}
ensureGreen();
indexRandom(true, createMinMaxDocBuilders().toArray(new IndexRequestBuilder[0]));
@ -2004,13 +1701,8 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
}
public void testHasParentInnerQueryType() {
if (legacy()) {
assertAcked(prepareCreate("test")
.addMapping("parent-type").addMapping("child-type", "_parent", "type=parent-type"));
} else {
assertAcked(prepareCreate("test")
.addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent-type", "child-type")));
}
createIndexRequest("test", "child-type", "child-id", "parent-id").get();
createIndexRequest("test", "parent-type", "parent-id", null).get();
refresh();
@ -2026,12 +1718,6 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
}
public void testHighlightersIgnoreParentChild() throws IOException {
if (legacy()) {
assertAcked(prepareCreate("test")
.addMapping("parent-type", "searchText", "type=text,term_vector=with_positions_offsets,index_options=offsets")
.addMapping("child-type", "_parent", "type=parent-type", "searchText",
"type=text,term_vector=with_positions_offsets,index_options=offsets"));
} else {
assertAcked(prepareCreate("test")
.addMapping("doc", jsonBuilder().startObject().startObject("properties")
.startObject("join_field")
@ -2047,7 +1733,6 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
.endObject()
.endObject().endObject()
));
}
createIndexRequest("test", "parent-type", "parent-id", null, "searchText", "quick brown fox").get();
createIndexRequest("test", "child-type", "child-id", "parent-id", "searchText", "quick brown fox").get();
refresh();
@ -2082,15 +1767,8 @@ public class ChildQuerySearchIT extends ParentChildTestCase {
}
public void testAliasesFilterWithHasChildQuery() throws Exception {
if (legacy()) {
assertAcked(prepareCreate("my-index")
.addMapping("parent")
.addMapping("child", "_parent", "type=parent")
);
} else {
assertAcked(prepareCreate("my-index")
.addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")));
}
createIndexRequest("my-index", "parent", "1", null).get();
createIndexRequest("my-index", "child", "2", "1").get();
refresh();

View File

@ -86,12 +86,6 @@ public class InnerHitsIT extends ParentChildTestCase {
}
public void testSimpleParentChild() throws Exception {
if (legacy()) {
assertAcked(prepareCreate("articles")
.addMapping("article", "title", "type=text")
.addMapping("comment", "_parent", "type=article", "message", "type=text,fielddata=true")
);
} else {
assertAcked(prepareCreate("articles")
.addMapping("doc", jsonBuilder().startObject().startObject("doc").startObject("properties")
.startObject("join_field")
@ -109,7 +103,6 @@ public class InnerHitsIT extends ParentChildTestCase {
.endObject()
.endObject().endObject().endObject()
));
}
List<IndexRequestBuilder> requests = new ArrayList<>();
requests.add(createIndexRequest("articles", "article", "p1", null, "title", "quick brown fox"));
@ -136,9 +129,9 @@ public class InnerHitsIT extends ParentChildTestCase {
assertThat(innerHits.getTotalHits(), equalTo(2L));
assertThat(innerHits.getAt(0).getId(), equalTo("c1"));
assertThat(innerHits.getAt(0).getType(), equalTo(legacy() ? "comment" : "doc"));
assertThat(innerHits.getAt(0).getType(), equalTo("doc"));
assertThat(innerHits.getAt(1).getId(), equalTo("c2"));
assertThat(innerHits.getAt(1).getType(), equalTo(legacy() ? "comment" : "doc"));
assertThat(innerHits.getAt(1).getType(), equalTo("doc"));
response = client().prepareSearch("articles")
.setQuery(hasChildQuery("comment", matchQuery("message", "elephant"), ScoreMode.None)
@ -153,11 +146,11 @@ public class InnerHitsIT extends ParentChildTestCase {
assertThat(innerHits.getTotalHits(), equalTo(3L));
assertThat(innerHits.getAt(0).getId(), equalTo("c4"));
assertThat(innerHits.getAt(0).getType(), equalTo(legacy() ? "comment" : "doc"));
assertThat(innerHits.getAt(0).getType(), equalTo("doc"));
assertThat(innerHits.getAt(1).getId(), equalTo("c5"));
assertThat(innerHits.getAt(1).getType(), equalTo(legacy() ? "comment" : "doc"));
assertThat(innerHits.getAt(1).getType(), equalTo("doc"));
assertThat(innerHits.getAt(2).getId(), equalTo("c6"));
assertThat(innerHits.getAt(2).getType(), equalTo(legacy() ? "comment" : "doc"));
assertThat(innerHits.getAt(2).getType(), equalTo("doc"));
response = client().prepareSearch("articles")
.setQuery(
@ -179,13 +172,6 @@ public class InnerHitsIT extends ParentChildTestCase {
}
public void testRandomParentChild() throws Exception {
if (legacy()) {
assertAcked(prepareCreate("idx")
.addMapping("parent")
.addMapping("child1", "_parent", "type=parent")
.addMapping("child2", "_parent", "type=parent")
);
} else {
assertAcked(prepareCreate("idx")
.addMapping("doc", jsonBuilder().startObject().startObject("doc").startObject("properties")
.startObject("join_field")
@ -196,7 +182,6 @@ public class InnerHitsIT extends ParentChildTestCase {
.endObject()
.endObject().endObject().endObject()
));
}
int numDocs = scaledRandomIntBetween(5, 50);
List<IndexRequestBuilder> requestBuilders = new ArrayList<>();
@ -225,13 +210,13 @@ public class InnerHitsIT extends ParentChildTestCase {
BoolQueryBuilder boolQuery = new BoolQueryBuilder();
boolQuery.should(constantScoreQuery(hasChildQuery("child1", matchAllQuery(), ScoreMode.None)
.innerHit(new InnerHitBuilder().setName("a")
.addSort(new FieldSortBuilder("_uid").order(SortOrder.ASC)).setSize(size))));
.addSort(new FieldSortBuilder("_id").order(SortOrder.ASC)).setSize(size))));
boolQuery.should(constantScoreQuery(hasChildQuery("child2", matchAllQuery(), ScoreMode.None)
.innerHit(new InnerHitBuilder().setName("b")
.addSort(new FieldSortBuilder("_uid").order(SortOrder.ASC)).setSize(size))));
.addSort(new FieldSortBuilder("_id").order(SortOrder.ASC)).setSize(size))));
SearchResponse searchResponse = client().prepareSearch("idx")
.setSize(numDocs)
.addSort("_uid", SortOrder.ASC)
.addSort("_id", SortOrder.ASC)
.setQuery(boolQuery)
.get();
@ -243,7 +228,7 @@ public class InnerHitsIT extends ParentChildTestCase {
int offset2 = 0;
for (int parent = 0; parent < numDocs; parent++) {
SearchHit searchHit = searchResponse.getHits().getAt(parent);
assertThat(searchHit.getType(), equalTo(legacy() ? "parent" : "doc"));
assertThat(searchHit.getType(), equalTo("doc"));
assertThat(searchHit.getId(), equalTo(String.format(Locale.ENGLISH, "p_%03d", parent)));
assertThat(searchHit.getShard(), notNullValue());
@ -251,7 +236,7 @@ public class InnerHitsIT extends ParentChildTestCase {
assertThat(inner.getTotalHits(), equalTo((long) child1InnerObjects[parent]));
for (int child = 0; child < child1InnerObjects[parent] && child < size; child++) {
SearchHit innerHit = inner.getAt(child);
assertThat(innerHit.getType(), equalTo(legacy() ? "child1" : "doc"));
assertThat(innerHit.getType(), equalTo("doc"));
String childId = String.format(Locale.ENGLISH, "c1_%04d", offset1 + child);
assertThat(innerHit.getId(), equalTo(childId));
assertThat(innerHit.getNestedIdentity(), nullValue());
@ -262,7 +247,7 @@ public class InnerHitsIT extends ParentChildTestCase {
assertThat(inner.getTotalHits(), equalTo((long) child2InnerObjects[parent]));
for (int child = 0; child < child2InnerObjects[parent] && child < size; child++) {
SearchHit innerHit = inner.getAt(child);
assertThat(innerHit.getType(), equalTo(legacy() ? "child2" : "doc"));
assertThat(innerHit.getType(), equalTo("doc"));
String childId = String.format(Locale.ENGLISH, "c2_%04d", offset2 + child);
assertThat(innerHit.getId(), equalTo(childId));
assertThat(innerHit.getNestedIdentity(), nullValue());
@ -272,16 +257,9 @@ public class InnerHitsIT extends ParentChildTestCase {
}
public void testInnerHitsOnHasParent() throws Exception {
if (legacy()) {
assertAcked(prepareCreate("stack")
.addMapping("question", "body", "type=text")
.addMapping("answer", "_parent", "type=question", "body", "type=text")
);
} else {
assertAcked(prepareCreate("stack")
.addMapping("doc", addFieldMappings(buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "question", "answer"),
"body", "text")));
}
List<IndexRequestBuilder> requests = new ArrayList<>();
requests.add(createIndexRequest("stack", "question", "1", null, "body", "I'm using HTTPS + Basic authentication "
+ "to protect a resource. How can I throttle authentication attempts to protect against brute force attacks?"));
@ -293,7 +271,7 @@ public class InnerHitsIT extends ParentChildTestCase {
indexRandom(true, requests);
SearchResponse response = client().prepareSearch("stack")
.addSort("_uid", SortOrder.ASC)
.addSort("_id", SortOrder.ASC)
.setQuery(
boolQuery()
.must(matchQuery("body", "fail2ban"))
@ -304,32 +282,24 @@ public class InnerHitsIT extends ParentChildTestCase {
SearchHit searchHit = response.getHits().getAt(0);
assertThat(searchHit.getId(), equalTo("3"));
assertThat(searchHit.getType(), equalTo(legacy() ? "answer" : "doc"));
assertThat(searchHit.getType(), equalTo("doc"));
assertThat(searchHit.getInnerHits().get("question").getTotalHits(), equalTo(1L));
assertThat(searchHit.getInnerHits().get("question").getAt(0).getType(), equalTo(legacy() ? "question" : "doc"));
assertThat(searchHit.getInnerHits().get("question").getAt(0).getType(), equalTo("doc"));
assertThat(searchHit.getInnerHits().get("question").getAt(0).getId(), equalTo("1"));
searchHit = response.getHits().getAt(1);
assertThat(searchHit.getId(), equalTo("4"));
assertThat(searchHit.getType(), equalTo(legacy() ? "answer" : "doc"));
assertThat(searchHit.getType(), equalTo("doc"));
assertThat(searchHit.getInnerHits().get("question").getTotalHits(), equalTo(1L));
assertThat(searchHit.getInnerHits().get("question").getAt(0).getType(), equalTo(legacy() ? "question" : "doc"));
assertThat(searchHit.getInnerHits().get("question").getAt(0).getType(), equalTo("doc"));
assertThat(searchHit.getInnerHits().get("question").getAt(0).getId(), equalTo("2"));
}
public void testParentChildMultipleLayers() throws Exception {
if (legacy()) {
assertAcked(prepareCreate("articles")
.addMapping("article", "title", "type=text")
.addMapping("comment", "_parent", "type=article", "message", "type=text")
.addMapping("remark", "_parent", "type=comment", "message", "type=text")
);
} else {
assertAcked(prepareCreate("articles")
.addMapping("doc",
addFieldMappings(buildParentJoinFieldMappingFromSimplifiedDef("join_field", true,
"article", "comment", "comment", "remark"), "title", "text", "message", "text")));
}
List<IndexRequestBuilder> requests = new ArrayList<>();
requests.add(createIndexRequest("articles", "article", "1", null, "title", "quick brown fox"));
@ -354,12 +324,12 @@ public class InnerHitsIT extends ParentChildTestCase {
SearchHits innerHits = response.getHits().getAt(0).getInnerHits().get("comment");
assertThat(innerHits.getTotalHits(), equalTo(1L));
assertThat(innerHits.getAt(0).getId(), equalTo("3"));
assertThat(innerHits.getAt(0).getType(), equalTo(legacy() ? "comment" : "doc"));
assertThat(innerHits.getAt(0).getType(), equalTo("doc"));
innerHits = innerHits.getAt(0).getInnerHits().get("remark");
assertThat(innerHits.getTotalHits(), equalTo(1L));
assertThat(innerHits.getAt(0).getId(), equalTo("5"));
assertThat(innerHits.getAt(0).getType(), equalTo(legacy() ? "remark" : "doc"));
assertThat(innerHits.getAt(0).getType(), equalTo("doc"));
response = client().prepareSearch("articles")
.setQuery(hasChildQuery("comment",
@ -375,29 +345,18 @@ public class InnerHitsIT extends ParentChildTestCase {
innerHits = response.getHits().getAt(0).getInnerHits().get("comment");
assertThat(innerHits.getTotalHits(), equalTo(1L));
assertThat(innerHits.getAt(0).getId(), equalTo("4"));
assertThat(innerHits.getAt(0).getType(), equalTo(legacy() ? "comment" : "doc"));
assertThat(innerHits.getAt(0).getType(), equalTo("doc"));
innerHits = innerHits.getAt(0).getInnerHits().get("remark");
assertThat(innerHits.getTotalHits(), equalTo(1L));
assertThat(innerHits.getAt(0).getId(), equalTo("6"));
assertThat(innerHits.getAt(0).getType(), equalTo(legacy() ? "remark" : "doc"));
assertThat(innerHits.getAt(0).getType(), equalTo("doc"));
}
public void testRoyals() throws Exception {
if (legacy()) {
assertAcked(
prepareCreate("royals")
.addMapping("king")
.addMapping("prince", "_parent", "type=king")
.addMapping("duke", "_parent", "type=prince")
.addMapping("earl", "_parent", "type=duke")
.addMapping("baron", "_parent", "type=earl")
);
} else {
assertAcked(prepareCreate("royals")
.addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true,
"king", "prince", "prince", "duke", "duke", "earl", "earl", "baron")));
}
List<IndexRequestBuilder> requests = new ArrayList<>();
requests.add(createIndexRequest("royals", "king", "king", null));
@ -423,7 +382,7 @@ public class InnerHitsIT extends ParentChildTestCase {
hasChildQuery("baron", matchAllQuery(), ScoreMode.None)
.innerHit(new InnerHitBuilder().setName("barons")),
ScoreMode.None).innerHit(new InnerHitBuilder()
.addSort(SortBuilders.fieldSort("_uid").order(SortOrder.ASC))
.addSort(SortBuilders.fieldSort("_id").order(SortOrder.ASC))
.setName("earls")
.setSize(4))
)
@ -464,13 +423,8 @@ public class InnerHitsIT extends ParentChildTestCase {
}
public void testMatchesQueriesParentChildInnerHits() throws Exception {
if (legacy()) {
assertAcked(prepareCreate("index")
.addMapping("child", "_parent", "type=parent"));
} else {
assertAcked(prepareCreate("index")
.addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")));
}
List<IndexRequestBuilder> requests = new ArrayList<>();
requests.add(createIndexRequest("index", "parent", "1", null));
requests.add(createIndexRequest("index", "child", "3", "1", "field", "value1"));
@ -482,7 +436,7 @@ public class InnerHitsIT extends ParentChildTestCase {
SearchResponse response = client().prepareSearch("index")
.setQuery(hasChildQuery("child", matchQuery("field", "value1").queryName("_name1"), ScoreMode.None)
.innerHit(new InnerHitBuilder()))
.addSort("_uid", SortOrder.ASC)
.addSort("_id", SortOrder.ASC)
.get();
assertHitCount(response, 2);
assertThat(response.getHits().getAt(0).getId(), equalTo("1"));
@ -499,7 +453,7 @@ public class InnerHitsIT extends ParentChildTestCase {
.innerHit(new InnerHitBuilder());
response = client().prepareSearch("index")
.setQuery(query)
.addSort("_uid", SortOrder.ASC)
.addSort("_id", SortOrder.ASC)
.get();
assertHitCount(response, 1);
assertThat(response.getHits().getAt(0).getId(), equalTo("1"));
@ -509,12 +463,8 @@ public class InnerHitsIT extends ParentChildTestCase {
}
public void testUseMaxDocInsteadOfSize() throws Exception {
if (legacy()) {
assertAcked(prepareCreate("index1").addMapping("child", "_parent", "type=parent"));
} else {
assertAcked(prepareCreate("index1")
.addMapping("doc", buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent", "child")));
}
client().admin().indices().prepareUpdateSettings("index1")
.setSettings(Collections.singletonMap(IndexSettings.MAX_INNER_RESULT_WINDOW_SETTING.getKey(), ArrayUtil.MAX_ARRAY_LENGTH))
.get();
@ -533,14 +483,9 @@ public class InnerHitsIT extends ParentChildTestCase {
}
public void testNestedInnerHitWrappedInParentChildInnerhit() throws Exception {
if (legacy()) {
assertAcked(prepareCreate("test")
.addMapping("child_type", "_parent", "type=parent_type", "nested_type", "type=nested"));
} else {
assertAcked(prepareCreate("test")
.addMapping("doc", addFieldMappings(buildParentJoinFieldMappingFromSimplifiedDef("join_field", true,
"parent_type", "child_type"), "nested_type", "nested")));
}
createIndexRequest("test", "parent_type", "1", null, "key", "value").get();
createIndexRequest("test", "child_type", "2", "1", "nested_type", Collections.singletonMap("key", "value")).get();
refresh();
@ -551,28 +496,17 @@ public class InnerHitsIT extends ParentChildTestCase {
.get();
assertHitCount(response, 1);
SearchHit hit = response.getHits().getAt(0);
if (legacy()) {
assertThat(hit.getInnerHits().get("child_type").getAt(0).field("_parent").getValue(), equalTo("1"));
} else {
String parentId = (String) extractValue("join_field.parent", hit.getInnerHits().get("child_type").getAt(0).getSourceAsMap());
assertThat(parentId, equalTo("1"));
}
assertThat(hit.getInnerHits().get("child_type").getAt(0).getInnerHits().get("nested_type").getAt(0).field("_parent"), nullValue());
}
public void testInnerHitsWithIgnoreUnmapped() throws Exception {
if (legacy()) {
assertAcked(prepareCreate("index1")
.addMapping("parent_type", "nested_type", "type=nested")
.addMapping("child_type", "_parent", "type=parent_type")
);
} else {
assertAcked(prepareCreate("index1")
.addMapping("doc", addFieldMappings(
buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent_type", "child_type"),
"nested_type", "nested"))
);
}
assertAcked(prepareCreate("index2"));
createIndexRequest("index1", "parent_type", "1", null, "nested_type", Collections.singletonMap("key", "value")).get();
createIndexRequest("index1", "child_type", "2", "1").get();
@ -592,18 +526,11 @@ public class InnerHitsIT extends ParentChildTestCase {
}
public void testTooHighResultWindow() throws Exception {
if (legacy()) {
assertAcked(prepareCreate("index1")
.addMapping("parent_type", "nested_type", "type=nested")
.addMapping("child_type", "_parent", "type=parent_type")
);
} else {
assertAcked(prepareCreate("index1")
.addMapping("doc", addFieldMappings(
buildParentJoinFieldMappingFromSimplifiedDef("join_field", true, "parent_type", "child_type"),
"nested_type", "nested"))
);
}
createIndexRequest("index1", "parent_type", "1", null, "nested_type", Collections.singletonMap("key", "value")).get();
createIndexRequest("index1", "child_type", "2", "1").get();
refresh();

View File

@ -1,308 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.join.query;
import org.apache.lucene.search.join.ScoreMode;
import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingResponse;
import org.elasticsearch.action.bulk.BulkRequestBuilder;
import org.elasticsearch.action.bulk.BulkResponse;
import org.elasticsearch.action.search.SearchPhaseExecutionException;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.Map;
import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder;
import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery;
import static org.elasticsearch.index.query.QueryBuilders.multiMatchQuery;
import static org.elasticsearch.join.query.JoinQueryBuilders.hasChildQuery;
import static org.elasticsearch.join.query.JoinQueryBuilders.hasParentQuery;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.notNullValue;
public class LegacyChildQuerySearchIT extends ChildQuerySearchIT {
@Override
protected boolean legacy() {
return true;
}
public void testIndexChildDocWithNoParentMapping() throws IOException {
assertAcked(prepareCreate("test")
.addMapping("parent")
.addMapping("child1"));
ensureGreen();
client().prepareIndex("test", "parent", "p1").setSource("p_field", "p_value1").get();
try {
client().prepareIndex("test", "child1", "c1").setParent("p1").setSource("c_field", "blue").get();
fail();
} catch (IllegalArgumentException e) {
assertThat(e.toString(), containsString("can't specify parent if no parent field has been configured"));
}
try {
client().prepareIndex("test", "child2", "c2").setParent("p1").setSource("c_field", "blue").get();
fail();
} catch (IllegalArgumentException e) {
assertThat(e.toString(), containsString("can't specify parent if no parent field has been configured"));
}
refresh();
}
public void testAddingParentToExistingMapping() throws IOException {
createIndex("test");
ensureGreen();
PutMappingResponse putMappingResponse = client().admin().indices()
.preparePutMapping("test").setType("child").setSource("number", "type=integer")
.get();
assertThat(putMappingResponse.isAcknowledged(), equalTo(true));
GetMappingsResponse getMappingsResponse = client().admin().indices().prepareGetMappings("test").get();
Map<String, Object> mapping = getMappingsResponse.getMappings().get("test").get("child").getSourceAsMap();
assertThat(mapping.size(), greaterThanOrEqualTo(1)); // there are potentially some meta fields configured randomly
assertThat(mapping.get("properties"), notNullValue());
try {
// Adding _parent metadata field to existing mapping is prohibited:
client().admin().indices().preparePutMapping("test").setType("child").setSource(jsonBuilder().startObject().startObject("child")
.startObject("_parent").field("type", "parent").endObject()
.endObject().endObject()).get();
fail();
} catch (IllegalArgumentException e) {
assertThat(e.toString(), containsString("The _parent field's type option can't be changed: [null]->[parent]"));
}
}
// Issue #5783
public void testQueryBeforeChildType() throws Exception {
assertAcked(prepareCreate("test")
.addMapping("features")
.addMapping("posts", "_parent", "type=features")
.addMapping("specials"));
ensureGreen();
client().prepareIndex("test", "features", "1").setSource("field", "foo").get();
client().prepareIndex("test", "posts", "1").setParent("1").setSource("field", "bar").get();
refresh();
SearchResponse resp;
resp = client().prepareSearch("test")
.setSource(new SearchSourceBuilder().query(hasChildQuery("posts",
QueryBuilders.matchQuery("field", "bar"), ScoreMode.None)))
.get();
assertHitCount(resp, 1L);
}
// Issue #6256
public void testParentFieldInMultiMatchField() throws Exception {
assertAcked(prepareCreate("test")
.addMapping("type1")
.addMapping("type2", "_parent", "type=type1")
);
ensureGreen();
client().prepareIndex("test", "type2", "1").setParent("1").setSource("field", "value").get();
refresh();
SearchResponse response = client().prepareSearch("test")
.setQuery(multiMatchQuery("1", "_parent#type1"))
.get();
assertThat(response.getHits().getTotalHits(), equalTo(1L));
assertThat(response.getHits().getAt(0).getId(), equalTo("1"));
}
public void testParentFieldToNonExistingType() {
assertAcked(prepareCreate("test")
.addMapping("parent").addMapping("child", "_parent", "type=parent2"));
client().prepareIndex("test", "parent", "1").setSource("{}", XContentType.JSON).get();
client().prepareIndex("test", "child", "1").setParent("1").setSource("{}", XContentType.JSON).get();
refresh();
try {
client().prepareSearch("test")
.setQuery(hasChildQuery("child", matchAllQuery(), ScoreMode.None))
.get();
fail();
} catch (SearchPhaseExecutionException e) {
}
}
/*
Test for https://github.com/elastic/elasticsearch/issues/3444
*/
public void testBulkUpdateDocAsUpsertWithParent() throws Exception {
assertAcked(prepareCreate("test")
.addMapping("parent", "{\"parent\":{}}", XContentType.JSON)
.addMapping("child", "{\"child\": {\"_parent\": {\"type\": \"parent\"}}}", XContentType.JSON));
ensureGreen();
BulkRequestBuilder builder = client().prepareBulk();
// It's important to use JSON parsing here and request objects: issue 3444 is related to incomplete option parsing
byte[] addParent = (
"{" +
" \"index\" : {" +
" \"_index\" : \"test\"," +
" \"_type\" : \"parent\"," +
" \"_id\" : \"parent1\"" +
" }" +
"}" +
"\n" +
"{" +
" \"field1\" : \"value1\"" +
"}" +
"\n").getBytes(StandardCharsets.UTF_8);
byte[] addChild = (
"{" +
" \"update\" : {" +
" \"_index\" : \"test\"," +
" \"_type\" : \"child\"," +
" \"_id\" : \"child1\"," +
" \"parent\" : \"parent1\"" +
" }" +
"}" +
"\n" +
"{" +
" \"doc\" : {" +
" \"field1\" : \"value1\"" +
" }," +
" \"doc_as_upsert\" : \"true\"" +
"}" +
"\n").getBytes(StandardCharsets.UTF_8);
builder.add(addParent, 0, addParent.length, XContentType.JSON);
builder.add(addChild, 0, addChild.length, XContentType.JSON);
BulkResponse bulkResponse = builder.get();
assertThat(bulkResponse.getItems().length, equalTo(2));
assertThat(bulkResponse.getItems()[0].isFailed(), equalTo(false));
assertThat(bulkResponse.getItems()[1].isFailed(), equalTo(false));
client().admin().indices().prepareRefresh("test").get();
//we check that the _parent field was set on the child document by using the has parent query
SearchResponse searchResponse = client().prepareSearch("test")
.setQuery(hasParentQuery("parent", QueryBuilders.matchAllQuery(), false))
.get();
assertNoFailures(searchResponse);
assertSearchHits(searchResponse, "child1");
}
/*
Test for https://github.com/elastic/elasticsearch/issues/3444
*/
public void testBulkUpdateUpsertWithParent() throws Exception {
assertAcked(prepareCreate("test")
.addMapping("parent", "{\"parent\":{}}", XContentType.JSON)
.addMapping("child", "{\"child\": {\"_parent\": {\"type\": \"parent\"}}}", XContentType.JSON));
ensureGreen();
BulkRequestBuilder builder = client().prepareBulk();
byte[] addParent = (
"{" +
" \"index\" : {" +
" \"_index\" : \"test\"," +
" \"_type\" : \"parent\"," +
" \"_id\" : \"parent1\"" +
" }" +
"}" +
"\n" +
"{" +
" \"field1\" : \"value1\"" +
"}" +
"\n").getBytes(StandardCharsets.UTF_8);
byte[] addChild1 = (
"{" +
" \"update\" : {" +
" \"_index\" : \"test\"," +
" \"_type\" : \"child\"," +
" \"_id\" : \"child1\"," +
" \"parent\" : \"parent1\"" +
" }" +
"}" +
"\n" +
"{" +
" \"script\" : {" +
" \"inline\" : \"ctx._source.field2 = 'value2'\"" +
" }," +
" \"lang\" : \"" + InnerHitsIT.CustomScriptPlugin.NAME + "\"," +
" \"upsert\" : {" +
" \"field1\" : \"value1'\"" +
" }" +
"}" +
"\n").getBytes(StandardCharsets.UTF_8);
byte[] addChild2 = (
"{" +
" \"update\" : {" +
" \"_index\" : \"test\"," +
" \"_type\" : \"child\"," +
" \"_id\" : \"child1\"," +
" \"parent\" : \"parent1\"" +
" }" +
"}" +
"\n" +
"{" +
" \"script\" : \"ctx._source.field2 = 'value2'\"," +
" \"upsert\" : {" +
" \"field1\" : \"value1'\"" +
" }" +
"}" +
"\n").getBytes(StandardCharsets.UTF_8);
builder.add(addParent, 0, addParent.length, XContentType.JSON);
builder.add(addChild1, 0, addChild1.length, XContentType.JSON);
builder.add(addChild2, 0, addChild2.length, XContentType.JSON);
BulkResponse bulkResponse = builder.get();
assertThat(bulkResponse.getItems().length, equalTo(3));
assertThat(bulkResponse.getItems()[0].isFailed(), equalTo(false));
assertThat(bulkResponse.getItems()[1].isFailed(), equalTo(false));
assertThat(bulkResponse.getItems()[2].isFailed(), equalTo(true));
assertThat(bulkResponse.getItems()[2].getFailure().getCause().getCause().getMessage(),
equalTo("script_lang not supported [painless]"));
client().admin().indices().prepareRefresh("test").get();
SearchResponse searchResponse = client().prepareSearch("test")
.setQuery(hasParentQuery("parent", QueryBuilders.matchAllQuery(), false))
.get();
assertSearchHits(searchResponse, "child1");
}
}

View File

@ -1,353 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.join.query;
import com.carrotsearch.randomizedtesting.generators.RandomPicks;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.ConstantScoreQuery;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermInSetQuery;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.join.ScoreMode;
import org.apache.lucene.search.similarities.PerFieldSimilarityWrapper;
import org.apache.lucene.search.similarities.Similarity;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.TypeFieldMapper;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.mapper.UidFieldMapper;
import org.elasticsearch.index.query.IdsQueryBuilder;
import org.elasticsearch.index.query.InnerHitBuilder;
import org.elasticsearch.index.query.InnerHitContextBuilder;
import org.elasticsearch.index.query.MatchAllQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.QueryShardException;
import org.elasticsearch.index.query.TermQueryBuilder;
import org.elasticsearch.index.query.WrapperQueryBuilder;
import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.join.ParentJoinPlugin;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.search.fetch.subphase.InnerHitsContext;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.sort.FieldSortBuilder;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.test.AbstractQueryTestCase;
import org.elasticsearch.test.VersionUtils;
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import static org.elasticsearch.join.query.JoinQueryBuilders.hasChildQuery;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.CoreMatchers.notNullValue;
public class LegacyHasChildQueryBuilderTests extends AbstractQueryTestCase<HasChildQueryBuilder> {
protected static final String PARENT_TYPE = "parent";
protected static final String CHILD_TYPE = "child";
private static String similarity;
boolean requiresRewrite = false;
@Override
protected Collection<Class<? extends Plugin>> getPlugins() {
return Collections.singletonList(ParentJoinPlugin.class);
}
@Override
protected void initializeAdditionalMappings(MapperService mapperService) throws IOException {
similarity = randomFrom("boolean", "BM25");
// TODO: use a single type when inner hits have been changed to work with join field,
// this test randomly generates queries with inner hits
mapperService.merge(PARENT_TYPE, new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef(PARENT_TYPE,
STRING_FIELD_NAME, "type=text",
STRING_FIELD_NAME_2, "type=keyword",
INT_FIELD_NAME, "type=integer",
DOUBLE_FIELD_NAME, "type=double",
BOOLEAN_FIELD_NAME, "type=boolean",
DATE_FIELD_NAME, "type=date",
OBJECT_FIELD_NAME, "type=object"
))), MapperService.MergeReason.MAPPING_UPDATE);
mapperService.merge(CHILD_TYPE, new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef(CHILD_TYPE,
"_parent", "type=" + PARENT_TYPE,
STRING_FIELD_NAME, "type=text",
"custom_string", "type=text,similarity=" + similarity,
INT_FIELD_NAME, "type=integer",
DOUBLE_FIELD_NAME, "type=double",
BOOLEAN_FIELD_NAME, "type=boolean",
DATE_FIELD_NAME, "type=date",
OBJECT_FIELD_NAME, "type=object"
))), MapperService.MergeReason.MAPPING_UPDATE);
}
@Override
protected Settings indexSettings() {
return Settings.builder()
.put(super.indexSettings())
.put("index.version.created", Version.V_5_6_0) // multi type
.build();
}
/**
* @return a {@link HasChildQueryBuilder} with random values all over the place
*/
@Override
protected HasChildQueryBuilder doCreateTestQueryBuilder() {
int min = randomIntBetween(0, Integer.MAX_VALUE / 2);
int max = randomIntBetween(min, Integer.MAX_VALUE);
QueryBuilder innerQueryBuilder = new MatchAllQueryBuilder();
if (randomBoolean()) {
requiresRewrite = true;
innerQueryBuilder = new WrapperQueryBuilder(innerQueryBuilder.toString());
}
HasChildQueryBuilder hqb = new HasChildQueryBuilder(CHILD_TYPE, innerQueryBuilder,
RandomPicks.randomFrom(random(), ScoreMode.values()));
hqb.minMaxChildren(min, max);
hqb.ignoreUnmapped(randomBoolean());
if (randomBoolean()) {
hqb.innerHit(new InnerHitBuilder()
.setName(randomAlphaOfLengthBetween(1, 10))
.setSize(randomIntBetween(0, 100))
.addSort(new FieldSortBuilder(STRING_FIELD_NAME_2).order(SortOrder.ASC))
.setIgnoreUnmapped(hqb.ignoreUnmapped()));
}
return hqb;
}
@Override
protected void doAssertLuceneQuery(HasChildQueryBuilder queryBuilder, Query query, SearchContext searchContext) throws IOException {
assertThat(query, instanceOf(HasChildQueryBuilder.LateParsingQuery.class));
HasChildQueryBuilder.LateParsingQuery lpq = (HasChildQueryBuilder.LateParsingQuery) query;
assertEquals(queryBuilder.minChildren(), lpq.getMinChildren());
assertEquals(queryBuilder.maxChildren(), lpq.getMaxChildren());
assertEquals(queryBuilder.scoreMode(), lpq.getScoreMode()); // WTF is this why do we have two?
if (queryBuilder.innerHit() != null) {
// have to rewrite again because the provided queryBuilder hasn't been rewritten (directly returned from
// doCreateTestQueryBuilder)
queryBuilder = (HasChildQueryBuilder) queryBuilder.rewrite(searchContext.getQueryShardContext());
Map<String, InnerHitContextBuilder> innerHitBuilders = new HashMap<>();
InnerHitContextBuilder.extractInnerHits(queryBuilder, innerHitBuilders);
for (InnerHitContextBuilder builder : innerHitBuilders.values()) {
builder.build(searchContext, searchContext.innerHits());
}
assertNotNull(searchContext.innerHits());
assertEquals(1, searchContext.innerHits().getInnerHits().size());
assertTrue(searchContext.innerHits().getInnerHits().containsKey(queryBuilder.innerHit().getName()));
InnerHitsContext.InnerHitSubContext innerHits =
searchContext.innerHits().getInnerHits().get(queryBuilder.innerHit().getName());
assertEquals(innerHits.size(), queryBuilder.innerHit().getSize());
assertEquals(innerHits.sort().sort.getSort().length, 1);
assertEquals(innerHits.sort().sort.getSort()[0].getField(), STRING_FIELD_NAME_2);
}
}
/**
* Test (de)serialization on all previous released versions
*/
public void testSerializationBWC() throws IOException {
for (Version version : VersionUtils.allReleasedVersions()) {
HasChildQueryBuilder testQuery = createTestQueryBuilder();
if (version.before(Version.V_5_2_0) && testQuery.innerHit() != null) {
// ignore unmapped for inner_hits has been added on 5.2
testQuery.innerHit().setIgnoreUnmapped(false);
}
assertSerialization(testQuery, version);
}
}
public void testIllegalValues() {
QueryBuilder query = new MatchAllQueryBuilder();
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> hasChildQuery(null, query, ScoreMode.None));
assertEquals("[has_child] requires 'type' field", e.getMessage());
e = expectThrows(IllegalArgumentException.class, () -> hasChildQuery("foo", null, ScoreMode.None));
assertEquals("[has_child] requires 'query' field", e.getMessage());
e = expectThrows(IllegalArgumentException.class, () -> hasChildQuery("foo", query, null));
assertEquals("[has_child] requires 'score_mode' field", e.getMessage());
int positiveValue = randomIntBetween(0, Integer.MAX_VALUE);
HasChildQueryBuilder foo = hasChildQuery("foo", query, ScoreMode.None); // all good
e = expectThrows(IllegalArgumentException.class, () -> foo.minMaxChildren(randomIntBetween(Integer.MIN_VALUE, -1), positiveValue));
assertEquals("[has_child] requires non-negative 'min_children' field", e.getMessage());
e = expectThrows(IllegalArgumentException.class, () -> foo.minMaxChildren(positiveValue, randomIntBetween(Integer.MIN_VALUE, -1)));
assertEquals("[has_child] requires non-negative 'max_children' field", e.getMessage());
e = expectThrows(IllegalArgumentException.class, () -> foo.minMaxChildren(positiveValue, positiveValue - 10));
assertEquals("[has_child] 'max_children' is less than 'min_children'", e.getMessage());
}
public void testFromJson() throws IOException {
String query =
"{\n" +
" \"has_child\" : {\n" +
" \"query\" : {\n" +
" \"range\" : {\n" +
" \"mapped_string\" : {\n" +
" \"from\" : \"agJhRET\",\n" +
" \"to\" : \"zvqIq\",\n" +
" \"include_lower\" : true,\n" +
" \"include_upper\" : true,\n" +
" \"boost\" : 1.0\n" +
" }\n" +
" }\n" +
" },\n" +
" \"type\" : \"child\",\n" +
" \"score_mode\" : \"avg\",\n" +
" \"min_children\" : 883170873,\n" +
" \"max_children\" : 1217235442,\n" +
" \"ignore_unmapped\" : false,\n" +
" \"boost\" : 2.0,\n" +
" \"_name\" : \"WNzYMJKRwePuRBh\",\n" +
" \"inner_hits\" : {\n" +
" \"name\" : \"inner_hits_name\",\n" +
" \"ignore_unmapped\" : false,\n" +
" \"from\" : 0,\n" +
" \"size\" : 100,\n" +
" \"version\" : false,\n" +
" \"explain\" : false,\n" +
" \"track_scores\" : false,\n" +
" \"sort\" : [ {\n" +
" \"mapped_string\" : {\n" +
" \"order\" : \"asc\"\n" +
" }\n" +
" } ]\n" +
" }\n" +
" }\n" +
"}";
HasChildQueryBuilder queryBuilder = (HasChildQueryBuilder) parseQuery(query);
checkGeneratedJson(query, queryBuilder);
assertEquals(query, queryBuilder.maxChildren(), 1217235442);
assertEquals(query, queryBuilder.minChildren(), 883170873);
assertEquals(query, queryBuilder.boost(), 2.0f, 0.0f);
assertEquals(query, queryBuilder.queryName(), "WNzYMJKRwePuRBh");
assertEquals(query, queryBuilder.childType(), "child");
assertEquals(query, queryBuilder.scoreMode(), ScoreMode.Avg);
assertNotNull(query, queryBuilder.innerHit());
InnerHitBuilder expected = new InnerHitBuilder("child")
.setName("inner_hits_name")
.setSize(100)
.addSort(new FieldSortBuilder("mapped_string").order(SortOrder.ASC));
assertEquals(query, queryBuilder.innerHit(), expected);
}
public void testToQueryInnerQueryType() throws IOException {
String[] searchTypes = new String[]{PARENT_TYPE};
QueryShardContext shardContext = createShardContext();
shardContext.setTypes(searchTypes);
HasChildQueryBuilder hasChildQueryBuilder = hasChildQuery(CHILD_TYPE, new IdsQueryBuilder().addIds("id"), ScoreMode.None);
Query query = hasChildQueryBuilder.toQuery(shardContext);
//verify that the context types are still the same as the ones we previously set
assertThat(shardContext.getTypes(), equalTo(searchTypes));
assertLateParsingQuery(query, CHILD_TYPE, "id");
}
static void assertLateParsingQuery(Query query, String type, String id) throws IOException {
assertThat(query, instanceOf(HasChildQueryBuilder.LateParsingQuery.class));
HasChildQueryBuilder.LateParsingQuery lateParsingQuery = (HasChildQueryBuilder.LateParsingQuery) query;
assertThat(lateParsingQuery.getInnerQuery(), instanceOf(BooleanQuery.class));
BooleanQuery booleanQuery = (BooleanQuery) lateParsingQuery.getInnerQuery();
assertThat(booleanQuery.clauses().size(), equalTo(2));
//check the inner ids query, we have to call rewrite to get to check the type it's executed against
assertThat(booleanQuery.clauses().get(0).getOccur(), equalTo(BooleanClause.Occur.MUST));
assertThat(booleanQuery.clauses().get(0).getQuery(), instanceOf(TermInSetQuery.class));
TermInSetQuery termsQuery = (TermInSetQuery) booleanQuery.clauses().get(0).getQuery();
Query rewrittenTermsQuery = termsQuery.rewrite(null);
assertThat(rewrittenTermsQuery, instanceOf(ConstantScoreQuery.class));
ConstantScoreQuery constantScoreQuery = (ConstantScoreQuery) rewrittenTermsQuery;
assertThat(constantScoreQuery.getQuery(), instanceOf(BooleanQuery.class));
BooleanQuery booleanTermsQuery = (BooleanQuery) constantScoreQuery.getQuery();
assertThat(booleanTermsQuery.clauses().toString(), booleanTermsQuery.clauses().size(), equalTo(1));
assertThat(booleanTermsQuery.clauses().get(0).getOccur(), equalTo(BooleanClause.Occur.SHOULD));
assertThat(booleanTermsQuery.clauses().get(0).getQuery(), instanceOf(TermQuery.class));
TermQuery termQuery = (TermQuery) booleanTermsQuery.clauses().get(0).getQuery();
assertThat(termQuery.getTerm().field(), equalTo(UidFieldMapper.NAME));
//we want to make sure that the inner ids query gets executed against the child type rather
// than the main type we initially set to the context
BytesRef[] ids = Uid.createUidsForTypesAndIds(Collections.singletonList(type), Collections.singletonList(id));
assertThat(termQuery.getTerm().bytes(), equalTo(ids[0]));
//check the type filter
assertThat(booleanQuery.clauses().get(1).getOccur(), equalTo(BooleanClause.Occur.FILTER));
assertEquals(new TypeFieldMapper.TypesQuery(new BytesRef(type)), booleanQuery.clauses().get(1).getQuery());
}
@Override
public void testMustRewrite() throws IOException {
try {
super.testMustRewrite();
} catch (UnsupportedOperationException e) {
if (requiresRewrite == false) {
throw e;
}
}
}
public void testNonDefaultSimilarity() throws Exception {
QueryShardContext shardContext = createShardContext();
HasChildQueryBuilder hasChildQueryBuilder =
hasChildQuery(CHILD_TYPE, new TermQueryBuilder("custom_string", "value"), ScoreMode.None);
HasChildQueryBuilder.LateParsingQuery query = (HasChildQueryBuilder.LateParsingQuery) hasChildQueryBuilder.toQuery(shardContext);
Similarity expected = SimilarityService.BUILT_IN.get(similarity)
.apply(Settings.EMPTY, Version.CURRENT, null);
assertThat(((PerFieldSimilarityWrapper) query.getSimilarity()).get("custom_string"), instanceOf(expected.getClass()));
}
public void testIgnoreUnmapped() throws IOException {
final HasChildQueryBuilder queryBuilder = new HasChildQueryBuilder("unmapped", new MatchAllQueryBuilder(), ScoreMode.None);
queryBuilder.ignoreUnmapped(true);
Query query = queryBuilder.toQuery(createShardContext());
assertThat(query, notNullValue());
assertThat(query, instanceOf(MatchNoDocsQuery.class));
final HasChildQueryBuilder failingQueryBuilder = new HasChildQueryBuilder("unmapped", new MatchAllQueryBuilder(), ScoreMode.None);
failingQueryBuilder.ignoreUnmapped(false);
QueryShardException e = expectThrows(QueryShardException.class, () -> failingQueryBuilder.toQuery(createShardContext()));
assertThat(e.getMessage(), containsString("[" + HasChildQueryBuilder.NAME + "] no mapping found for type [unmapped]"));
}
public void testIgnoreUnmappedWithRewrite() throws IOException {
// WrapperQueryBuilder makes sure we always rewrite
final HasChildQueryBuilder queryBuilder
= new HasChildQueryBuilder("unmapped", new WrapperQueryBuilder(new MatchAllQueryBuilder().toString()), ScoreMode.None);
queryBuilder.ignoreUnmapped(true);
QueryShardContext queryShardContext = createShardContext();
Query query = queryBuilder.rewrite(queryShardContext).toQuery(queryShardContext);
assertThat(query, notNullValue());
assertThat(query, instanceOf(MatchNoDocsQuery.class));
}
}

View File

@ -1,258 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.join.query;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.join.ScoreMode;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.query.IdsQueryBuilder;
import org.elasticsearch.index.query.InnerHitBuilder;
import org.elasticsearch.index.query.InnerHitContextBuilder;
import org.elasticsearch.index.query.MatchAllQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.query.QueryShardException;
import org.elasticsearch.index.query.TermQueryBuilder;
import org.elasticsearch.index.query.WrapperQueryBuilder;
import org.elasticsearch.join.ParentJoinPlugin;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.search.fetch.subphase.InnerHitsContext;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.sort.FieldSortBuilder;
import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.test.AbstractQueryTestCase;
import org.elasticsearch.test.VersionUtils;
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import static org.elasticsearch.join.query.JoinQueryBuilders.hasParentQuery;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.CoreMatchers.notNullValue;
public class LegacyHasParentQueryBuilderTests extends AbstractQueryTestCase<HasParentQueryBuilder> {
protected static final String PARENT_TYPE = "parent";
protected static final String CHILD_TYPE = "child";
boolean requiresRewrite = false;
@Override
protected Collection<Class<? extends Plugin>> getPlugins() {
return Collections.singletonList(ParentJoinPlugin.class);
}
@Override
protected Settings indexSettings() {
return Settings.builder()
.put(super.indexSettings())
.put("index.version.created", Version.V_5_6_0) // legacy needs multi types
.build();
}
@Override
protected void initializeAdditionalMappings(MapperService mapperService) throws IOException {
// TODO: use a single type when inner hits have been changed to work with join field,
// this test randomly generates queries with inner hits
mapperService.merge(PARENT_TYPE, new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef(PARENT_TYPE,
STRING_FIELD_NAME, "type=text",
STRING_FIELD_NAME_2, "type=keyword",
INT_FIELD_NAME, "type=integer",
DOUBLE_FIELD_NAME, "type=double",
BOOLEAN_FIELD_NAME, "type=boolean",
DATE_FIELD_NAME, "type=date",
OBJECT_FIELD_NAME, "type=object"
))), MapperService.MergeReason.MAPPING_UPDATE);
mapperService.merge(CHILD_TYPE, new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef(CHILD_TYPE,
"_parent", "type=" + PARENT_TYPE,
STRING_FIELD_NAME, "type=text",
STRING_FIELD_NAME_2, "type=keyword",
INT_FIELD_NAME, "type=integer",
DOUBLE_FIELD_NAME, "type=double",
BOOLEAN_FIELD_NAME, "type=boolean",
DATE_FIELD_NAME, "type=date",
OBJECT_FIELD_NAME, "type=object"
))), MapperService.MergeReason.MAPPING_UPDATE);
mapperService.merge("just_a_type", new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef("just_a_type"
))), MapperService.MergeReason.MAPPING_UPDATE);
}
/**
* @return a {@link HasChildQueryBuilder} with random values all over the place
*/
@Override
protected HasParentQueryBuilder doCreateTestQueryBuilder() {
QueryBuilder innerQueryBuilder = new MatchAllQueryBuilder();
if (randomBoolean()) {
requiresRewrite = true;
innerQueryBuilder = new WrapperQueryBuilder(innerQueryBuilder.toString());
}
HasParentQueryBuilder hqb = new HasParentQueryBuilder(PARENT_TYPE, innerQueryBuilder, randomBoolean());
hqb.ignoreUnmapped(randomBoolean());
if (randomBoolean()) {
hqb.innerHit(new InnerHitBuilder()
.setName(randomAlphaOfLengthBetween(1, 10))
.setSize(randomIntBetween(0, 100))
.addSort(new FieldSortBuilder(STRING_FIELD_NAME_2).order(SortOrder.ASC))
.setIgnoreUnmapped(hqb.ignoreUnmapped()));
}
return hqb;
}
@Override
protected void doAssertLuceneQuery(HasParentQueryBuilder queryBuilder, Query query, SearchContext searchContext) throws IOException {
assertThat(query, instanceOf(HasChildQueryBuilder.LateParsingQuery.class));
HasChildQueryBuilder.LateParsingQuery lpq = (HasChildQueryBuilder.LateParsingQuery) query;
assertEquals(queryBuilder.score() ? ScoreMode.Max : ScoreMode.None, lpq.getScoreMode());
if (queryBuilder.innerHit() != null) {
// have to rewrite again because the provided queryBuilder hasn't been rewritten (directly returned from
// doCreateTestQueryBuilder)
queryBuilder = (HasParentQueryBuilder) queryBuilder.rewrite(searchContext.getQueryShardContext());
assertNotNull(searchContext);
Map<String, InnerHitContextBuilder> innerHitBuilders = new HashMap<>();
InnerHitContextBuilder.extractInnerHits(queryBuilder, innerHitBuilders);
for (InnerHitContextBuilder builder : innerHitBuilders.values()) {
builder.build(searchContext, searchContext.innerHits());
}
assertNotNull(searchContext.innerHits());
assertEquals(1, searchContext.innerHits().getInnerHits().size());
assertTrue(searchContext.innerHits().getInnerHits().containsKey(queryBuilder.innerHit().getName()));
InnerHitsContext.InnerHitSubContext innerHits = searchContext.innerHits()
.getInnerHits().get(queryBuilder.innerHit().getName());
assertEquals(innerHits.size(), queryBuilder.innerHit().getSize());
assertEquals(innerHits.sort().sort.getSort().length, 1);
assertEquals(innerHits.sort().sort.getSort()[0].getField(), STRING_FIELD_NAME_2);
}
}
/**
* Test (de)serialization on all previous released versions
*/
public void testSerializationBWC() throws IOException {
for (Version version : VersionUtils.allReleasedVersions()) {
HasParentQueryBuilder testQuery = createTestQueryBuilder();
if (version.before(Version.V_5_2_0) && testQuery.innerHit() != null) {
// ignore unmapped for inner_hits has been added on 5.2
testQuery.innerHit().setIgnoreUnmapped(false);
}
assertSerialization(testQuery, version);
}
}
public void testIllegalValues() throws IOException {
QueryBuilder query = new MatchAllQueryBuilder();
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> hasParentQuery(null, query, false));
assertThat(e.getMessage(), equalTo("[has_parent] requires 'type' field"));
e = expectThrows(IllegalArgumentException.class,
() -> hasParentQuery("foo", null, false));
assertThat(e.getMessage(), equalTo("[has_parent] requires 'query' field"));
QueryShardContext context = createShardContext();
HasParentQueryBuilder qb = hasParentQuery("just_a_type", new MatchAllQueryBuilder(), false);
QueryShardException qse = expectThrows(QueryShardException.class, () -> qb.doToQuery(context));
assertThat(qse.getMessage(), equalTo("[has_parent] no child types found for type [just_a_type]"));
}
public void testToQueryInnerQueryType() throws IOException {
String[] searchTypes = new String[]{CHILD_TYPE};
QueryShardContext shardContext = createShardContext();
shardContext.setTypes(searchTypes);
HasParentQueryBuilder hasParentQueryBuilder = new HasParentQueryBuilder(PARENT_TYPE, new IdsQueryBuilder().addIds("id"),
false);
Query query = hasParentQueryBuilder.toQuery(shardContext);
//verify that the context types are still the same as the ones we previously set
assertThat(shardContext.getTypes(), equalTo(searchTypes));
LegacyHasChildQueryBuilderTests.assertLateParsingQuery(query, PARENT_TYPE, "id");
}
@Override
public void testMustRewrite() throws IOException {
try {
super.testMustRewrite();
} catch (UnsupportedOperationException e) {
if (requiresRewrite == false) {
throw e;
}
}
}
public void testFromJson() throws IOException {
String json =
"{\n" +
" \"has_parent\" : {\n" +
" \"query\" : {\n" +
" \"term\" : {\n" +
" \"tag\" : {\n" +
" \"value\" : \"something\",\n" +
" \"boost\" : 1.0\n" +
" }\n" +
" }\n" +
" },\n" +
" \"parent_type\" : \"blog\",\n" +
" \"score\" : true,\n" +
" \"ignore_unmapped\" : false,\n" +
" \"boost\" : 1.0\n" +
" }\n" +
"}";
HasParentQueryBuilder parsed = (HasParentQueryBuilder) parseQuery(json);
checkGeneratedJson(json, parsed);
assertEquals(json, "blog", parsed.type());
assertEquals(json, "something", ((TermQueryBuilder) parsed.query()).value());
}
public void testIgnoreUnmapped() throws IOException {
final HasParentQueryBuilder queryBuilder = new HasParentQueryBuilder("unmapped", new MatchAllQueryBuilder(), false);
queryBuilder.ignoreUnmapped(true);
Query query = queryBuilder.toQuery(createShardContext());
assertThat(query, notNullValue());
assertThat(query, instanceOf(MatchNoDocsQuery.class));
final HasParentQueryBuilder failingQueryBuilder = new HasParentQueryBuilder("unmapped", new MatchAllQueryBuilder(), false);
failingQueryBuilder.ignoreUnmapped(false);
QueryShardException e = expectThrows(QueryShardException.class, () -> failingQueryBuilder.toQuery(createShardContext()));
assertThat(e.getMessage(),
containsString("[" + HasParentQueryBuilder.NAME + "] query configured 'parent_type' [unmapped] is not a valid type"));
}
public void testIgnoreUnmappedWithRewrite() throws IOException {
// WrapperQueryBuilder makes sure we always rewrite
final HasParentQueryBuilder queryBuilder =
new HasParentQueryBuilder("unmapped", new WrapperQueryBuilder(new MatchAllQueryBuilder().toString()), false);
queryBuilder.ignoreUnmapped(true);
QueryShardContext queryShardContext = createShardContext();
Query query = queryBuilder.rewrite(queryShardContext).toQuery(queryShardContext);
assertThat(query, notNullValue());
assertThat(query, instanceOf(MatchNoDocsQuery.class));
}
}

View File

@ -1,28 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.join.query;
public class LegacyInnerHitsIT extends InnerHitsIT {
@Override
protected boolean legacy() {
return true;
}
}

View File

@ -1,121 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.join.query;
import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.DocValuesTermsQuery;
import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.TypeFieldMapper;
import org.elasticsearch.index.query.QueryShardException;
import org.elasticsearch.join.ParentJoinPlugin;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.AbstractQueryTestCase;
import org.hamcrest.Matchers;
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.CoreMatchers.notNullValue;
public class LegacyParentIdQueryBuilderTests extends AbstractQueryTestCase<ParentIdQueryBuilder> {
protected static final String PARENT_TYPE = "parent";
protected static final String CHILD_TYPE = "child";
@Override
protected Collection<Class<? extends Plugin>> getPlugins() {
return Collections.singletonList(ParentJoinPlugin.class);
}
@Override
protected Settings indexSettings() {
return Settings.builder()
.put(super.indexSettings())
.put("index.version.created", Version.V_5_6_0) // legacy needs multi type
.build();
}
@Override
protected void initializeAdditionalMappings(MapperService mapperService) throws IOException {
mapperService.merge(PARENT_TYPE, new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef(PARENT_TYPE,
STRING_FIELD_NAME, "type=text",
INT_FIELD_NAME, "type=integer",
DOUBLE_FIELD_NAME, "type=double",
BOOLEAN_FIELD_NAME, "type=boolean",
DATE_FIELD_NAME, "type=date",
OBJECT_FIELD_NAME, "type=object"
))), MapperService.MergeReason.MAPPING_UPDATE);
mapperService.merge(CHILD_TYPE, new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef(CHILD_TYPE,
"_parent", "type=" + PARENT_TYPE,
STRING_FIELD_NAME, "type=text",
INT_FIELD_NAME, "type=integer",
DOUBLE_FIELD_NAME, "type=double",
BOOLEAN_FIELD_NAME, "type=boolean",
DATE_FIELD_NAME, "type=date",
OBJECT_FIELD_NAME, "type=object"
))), MapperService.MergeReason.MAPPING_UPDATE);
}
@Override
protected ParentIdQueryBuilder doCreateTestQueryBuilder() {
return new ParentIdQueryBuilder(CHILD_TYPE, randomAlphaOfLength(4)).ignoreUnmapped(randomBoolean());
}
@Override
protected void doAssertLuceneQuery(ParentIdQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException {
assertThat(query, Matchers.instanceOf(BooleanQuery.class));
BooleanQuery booleanQuery = (BooleanQuery) query;
assertThat(booleanQuery.clauses().size(), Matchers.equalTo(2));
DocValuesTermsQuery idQuery = (DocValuesTermsQuery) booleanQuery.clauses().get(0).getQuery();
// there are no getters to get the field and terms on DocValuesTermsQuery, so lets validate by creating a
// new query based on the builder:
assertThat(idQuery, Matchers.equalTo(new DocValuesTermsQuery("_parent#" + PARENT_TYPE, queryBuilder.getId())));
TermQuery typeQuery = (TermQuery) booleanQuery.clauses().get(1).getQuery();
assertThat(typeQuery.getTerm().field(), Matchers.equalTo(TypeFieldMapper.NAME));
assertThat(typeQuery.getTerm().text(), Matchers.equalTo(queryBuilder.getType()));
}
public void testIgnoreUnmapped() throws IOException {
final ParentIdQueryBuilder queryBuilder = new ParentIdQueryBuilder("unmapped", "foo");
queryBuilder.ignoreUnmapped(true);
Query query = queryBuilder.toQuery(createShardContext());
assertThat(query, notNullValue());
assertThat(query, instanceOf(MatchNoDocsQuery.class));
final ParentIdQueryBuilder failingQueryBuilder = new ParentIdQueryBuilder("unmapped", "foo");
failingQueryBuilder.ignoreUnmapped(false);
QueryShardException e = expectThrows(QueryShardException.class, () -> failingQueryBuilder.toQuery(createShardContext()));
assertThat(e.getMessage(), containsString("[" + ParentIdQueryBuilder.NAME + "] no mapping found for type [unmapped]"));
}
}

View File

@ -18,7 +18,6 @@
*/
package org.elasticsearch.join.query;
import org.elasticsearch.Version;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings;
@ -63,17 +62,9 @@ public abstract class ParentChildTestCase extends ESIntegTestCase {
.put(IndexModule.INDEX_QUERY_CACHE_ENABLED_SETTING.getKey(), true)
.put(IndexModule.INDEX_QUERY_CACHE_EVERYTHING_SETTING.getKey(), true);
if (legacy()) {
builder.put("index.version.created", Version.V_5_6_0);
}
return builder.build();
}
protected boolean legacy() {
return false;
}
protected IndexRequestBuilder createIndexRequest(String index, String type, String id, String parentId, Object... fields) {
Map<String, Object> source = new HashMap<>();
for (int i = 0; i < fields.length; i += 2) {
@ -123,17 +114,9 @@ public abstract class ParentChildTestCase extends ESIntegTestCase {
private IndexRequestBuilder createIndexRequest(String index, String type, String id, String parentId, Map<String, Object> source) {
String name = type;
if (legacy() == false) {
type = "doc";
}
IndexRequestBuilder indexRequestBuilder = client().prepareIndex(index, type, id);
if (legacy()) {
if (parentId != null) {
indexRequestBuilder.setParent(parentId);
}
indexRequestBuilder.setSource(source);
} else {
Map<String, Object> joinField = new HashMap<>();
if (parentId != null) {
joinField.put("name", name);
@ -144,7 +127,6 @@ public abstract class ParentChildTestCase extends ESIntegTestCase {
}
source.put("join_field", joinField);
indexRequestBuilder.setSource(source);
}
return indexRequestBuilder;
}

View File

@ -69,7 +69,6 @@ import org.elasticsearch.index.analysis.FieldNameAnalyzer;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.DocumentMapperForType;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ParseContext;
@ -582,7 +581,6 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder<PercolateQueryBu
final List<ParsedDocument> docs = new ArrayList<>();
final DocumentMapper docMapper;
final MapperService mapperService = context.getMapperService();
if (context.getIndexSettings().isSingleType()) {
Collection<String> types = mapperService.types();
if (types.size() != 1) {
throw new IllegalStateException("Only a single type should exist, but [" + types.size() + " types exists");
@ -599,16 +597,6 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder<PercolateQueryBu
for (BytesReference document : documents) {
docs.add(docMapper.parse(source(context.index().getName(), type, "_temp_id", document, documentXContentType)));
}
} else {
if (documentType == null) {
throw new IllegalArgumentException("[percolate] query is missing required [document_type] parameter");
}
DocumentMapperForType docMapperForType = mapperService.documentMapperWithAutoCreate(documentType);
docMapper = docMapperForType.getDocumentMapper();
for (BytesReference document : documents) {
docs.add(docMapper.parse(source(context.index().getName(), documentType, "_temp_id", document, documentXContentType)));
}
}
FieldNameAnalyzer fieldNameAnalyzer = (FieldNameAnalyzer) docMapper.mappers().indexAnalyzer();
// Need to this custom impl because FieldNameAnalyzer is strict and the percolator sometimes isn't when

View File

@ -103,9 +103,6 @@ public class PercolateQueryBuilderTests extends AbstractQueryTestCase<PercolateQ
mapperService.merge(docType, new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef(docType,
STRING_FIELD_NAME, "type=text"
))), MapperService.MergeReason.MAPPING_UPDATE);
if (mapperService.getIndexSettings().isSingleType() == false) {
PercolateQueryBuilderTests.docType = docType;
}
}
@Override
@ -244,14 +241,8 @@ public class PercolateQueryBuilderTests extends AbstractQueryTestCase<PercolateQ
public void testFromJsonNoDocumentType() throws IOException {
QueryShardContext queryShardContext = createShardContext();
QueryBuilder queryBuilder = parseQuery("{\"percolate\" : { \"document\": {}, \"field\":\"" + queryField + "\"}}");
if (indexVersionCreated.before(Version.V_6_0_0_alpha1)) {
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> queryBuilder.toQuery(queryShardContext));
assertThat(e.getMessage(), equalTo("[percolate] query is missing required [document_type] parameter"));
} else {
queryBuilder.toQuery(queryShardContext);
}
}
public void testBothDocumentAndDocumentsSpecified() throws IOException {
expectThrows(IllegalArgumentException.class,

View File

@ -97,7 +97,7 @@ public class PercolatorQuerySearchIT extends ESIntegTestCase {
logger.info("percolating doc with 1 field");
response = client().prepareSearch()
.setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON))
.addSort("_uid", SortOrder.ASC)
.addSort("_id", SortOrder.ASC)
.get();
assertHitCount(response, 2);
assertThat(response.getHits().getAt(0).getId(), equalTo("1"));
@ -109,7 +109,7 @@ public class PercolatorQuerySearchIT extends ESIntegTestCase {
logger.info("percolating doc with 2 fields");
response = client().prepareSearch()
.setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON))
.addSort("_uid", SortOrder.ASC)
.addSort("_id", SortOrder.ASC)
.get();
assertHitCount(response, 3);
assertThat(response.getHits().getAt(0).getId(), equalTo("1"));
@ -125,7 +125,7 @@ public class PercolatorQuerySearchIT extends ESIntegTestCase {
BytesReference.bytes(jsonBuilder().startObject().field("field1", "value").endObject()),
BytesReference.bytes(jsonBuilder().startObject().field("field1", "value").field("field2", "value").endObject())
), XContentType.JSON))
.addSort("_uid", SortOrder.ASC)
.addSort("_id", SortOrder.ASC)
.get();
assertHitCount(response, 3);
assertThat(response.getHits().getAt(0).getId(), equalTo("1"));
@ -312,7 +312,7 @@ public class PercolatorQuerySearchIT extends ESIntegTestCase {
logger.info("percolating doc with 1 field");
response = client().prepareSearch()
.setQuery(new PercolateQueryBuilder("query", "test", "type", "5", null, null, null))
.addSort("_uid", SortOrder.ASC)
.addSort("_id", SortOrder.ASC)
.get();
assertHitCount(response, 2);
assertThat(response.getHits().getAt(0).getId(), equalTo("1"));
@ -321,7 +321,7 @@ public class PercolatorQuerySearchIT extends ESIntegTestCase {
logger.info("percolating doc with 2 fields");
response = client().prepareSearch()
.setQuery(new PercolateQueryBuilder("query", "test", "type", "6", null, null, null))
.addSort("_uid", SortOrder.ASC)
.addSort("_id", SortOrder.ASC)
.get();
assertHitCount(response, 3);
assertThat(response.getHits().getAt(0).getId(), equalTo("1"));
@ -408,7 +408,7 @@ public class PercolatorQuerySearchIT extends ESIntegTestCase {
.endObject());
SearchResponse response = client().prepareSearch()
.setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON))
.addSort("_uid", SortOrder.ASC)
.addSort("_id", SortOrder.ASC)
.get();
assertHitCount(response, 4);
assertThat(response.getHits().getAt(0).getId(), equalTo("1"));
@ -455,7 +455,7 @@ public class PercolatorQuerySearchIT extends ESIntegTestCase {
SearchResponse searchResponse = client().prepareSearch()
.setQuery(new PercolateQueryBuilder("query", document, XContentType.JSON))
.highlighter(new HighlightBuilder().field("field1"))
.addSort("_uid", SortOrder.ASC)
.addSort("_id", SortOrder.ASC)
.get();
assertHitCount(searchResponse, 5);
@ -482,7 +482,7 @@ public class PercolatorQuerySearchIT extends ESIntegTestCase {
.should(new PercolateQueryBuilder("query", document2, XContentType.JSON).setName("query2"))
)
.highlighter(new HighlightBuilder().field("field1"))
.addSort("_uid", SortOrder.ASC)
.addSort("_id", SortOrder.ASC)
.get();
logger.info("searchResponse={}", searchResponse);
assertHitCount(searchResponse, 5);
@ -506,7 +506,7 @@ public class PercolatorQuerySearchIT extends ESIntegTestCase {
BytesReference.bytes(jsonBuilder().startObject().field("field1", "brown fox").endObject())
), XContentType.JSON))
.highlighter(new HighlightBuilder().field("field1"))
.addSort("_uid", SortOrder.ASC)
.addSort("_id", SortOrder.ASC)
.get();
assertHitCount(searchResponse, 5);
assertThat(searchResponse.getHits().getAt(0).getFields().get("_percolator_document_slot").getValues(),
@ -546,7 +546,7 @@ public class PercolatorQuerySearchIT extends ESIntegTestCase {
), XContentType.JSON).setName("query2"))
)
.highlighter(new HighlightBuilder().field("field1"))
.addSort("_uid", SortOrder.ASC)
.addSort("_id", SortOrder.ASC)
.get();
logger.info("searchResponse={}", searchResponse);
assertHitCount(searchResponse, 5);

View File

@ -45,7 +45,6 @@ import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
import org.elasticsearch.index.VersionType;
import org.elasticsearch.index.mapper.IdFieldMapper;
import org.elasticsearch.index.mapper.IndexFieldMapper;
import org.elasticsearch.index.mapper.ParentFieldMapper;
import org.elasticsearch.index.mapper.RoutingFieldMapper;
import org.elasticsearch.index.mapper.SourceFieldMapper;
import org.elasticsearch.index.mapper.TypeFieldMapper;
@ -181,7 +180,6 @@ public abstract class AbstractAsyncBulkByScrollAction<Request extends AbstractBu
* Copies the metadata from a hit to the request.
*/
protected RequestWrapper<?> copyMetadata(RequestWrapper<?> request, ScrollableHitSource.Hit doc) {
request.setParent(doc.getParent());
copyRouting(request, doc.getRouting());
return request;
}
@ -550,10 +548,6 @@ public abstract class AbstractAsyncBulkByScrollAction<Request extends AbstractBu
void setVersionType(VersionType versionType);
void setParent(String parent);
String getParent();
void setRouting(String routing);
String getRouting();
@ -621,16 +615,6 @@ public abstract class AbstractAsyncBulkByScrollAction<Request extends AbstractBu
request.versionType(versionType);
}
@Override
public void setParent(String parent) {
request.parent(parent);
}
@Override
public String getParent() {
return request.parent();
}
@Override
public void setRouting(String routing) {
request.routing(routing);
@ -720,16 +704,6 @@ public abstract class AbstractAsyncBulkByScrollAction<Request extends AbstractBu
request.versionType(versionType);
}
@Override
public void setParent(String parent) {
request.parent(parent);
}
@Override
public String getParent() {
return request.parent();
}
@Override
public void setRouting(String routing) {
request.routing(routing);
@ -807,8 +781,6 @@ public abstract class AbstractAsyncBulkByScrollAction<Request extends AbstractBu
context.put(IdFieldMapper.NAME, doc.getId());
Long oldVersion = doc.getVersion();
context.put(VersionFieldMapper.NAME, oldVersion);
String oldParent = doc.getParent();
context.put(ParentFieldMapper.NAME, oldParent);
String oldRouting = doc.getRouting();
context.put(RoutingFieldMapper.NAME, oldRouting);
context.put(SourceFieldMapper.NAME, request.getSource());
@ -846,10 +818,6 @@ public abstract class AbstractAsyncBulkByScrollAction<Request extends AbstractBu
if (false == Objects.equals(oldVersion, newValue)) {
scriptChangedVersion(request, newValue);
}
newValue = context.remove(ParentFieldMapper.NAME);
if (false == Objects.equals(oldParent, newValue)) {
scriptChangedParent(request, newValue);
}
/*
* Its important that routing comes after parent in case you want to
* change them both.
@ -879,7 +847,6 @@ public abstract class AbstractAsyncBulkByScrollAction<Request extends AbstractBu
RequestWrapper<DeleteRequest> delete = wrap(new DeleteRequest(request.getIndex(), request.getType(), request.getId()));
delete.setVersion(request.getVersion());
delete.setVersionType(VersionType.INTERNAL);
delete.setParent(request.getParent());
delete.setRouting(request.getRouting());
return delete;
default:
@ -897,8 +864,6 @@ public abstract class AbstractAsyncBulkByScrollAction<Request extends AbstractBu
protected abstract void scriptChangedRouting(RequestWrapper<?> request, Object to);
protected abstract void scriptChangedParent(RequestWrapper<?> request, Object to);
}
public enum OpType {

View File

@ -69,7 +69,6 @@ public class AsyncDeleteByQueryAction extends AbstractAsyncBulkByScrollAction<De
*/
@Override
protected RequestWrapper<?> copyMetadata(RequestWrapper<?> request, ScrollableHitSource.Hit doc) {
request.setParent(doc.getParent());
request.setRouting(doc.getRouting());
return request;
}

View File

@ -359,7 +359,6 @@ public class TransportReindexAction extends HandledTransportAction<ReindexReques
* here on out operates on the index request rather than the template.
*/
index.routing(mainRequest.getDestination().routing());
index.parent(mainRequest.getDestination().parent());
index.setPipeline(mainRequest.getDestination().getPipeline());
// OpType is synthesized from version so it is handled when we copy version above.
@ -432,14 +431,6 @@ public class TransportReindexAction extends HandledTransportAction<ReindexReques
}
}
@Override
protected void scriptChangedParent(RequestWrapper<?> request, Object to) {
// Have to override routing with parent just in case its changed
String routing = Objects.toString(to, null);
request.setParent(routing);
request.setRouting(routing);
}
@Override
protected void scriptChangedRouting(RequestWrapper<?> request, Object to) {
request.setRouting(Objects.toString(to, null));

View File

@ -34,7 +34,6 @@ import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.VersionType;
import org.elasticsearch.index.mapper.IdFieldMapper;
import org.elasticsearch.index.mapper.IndexFieldMapper;
import org.elasticsearch.index.mapper.ParentFieldMapper;
import org.elasticsearch.index.mapper.RoutingFieldMapper;
import org.elasticsearch.index.mapper.TypeFieldMapper;
import org.elasticsearch.script.Script;
@ -161,11 +160,6 @@ public class TransportUpdateByQueryAction extends HandledTransportAction<UpdateB
throw new IllegalArgumentException("Modifying [" + RoutingFieldMapper.NAME + "] not allowed");
}
@Override
protected void scriptChangedParent(RequestWrapper<?> request, Object to) {
throw new IllegalArgumentException("Modifying [" + ParentFieldMapper.NAME + "] not allowed");
}
}
}
}

View File

@ -86,23 +86,20 @@ final class RemoteResponseParsers {
}
}, new ParseField("_source"));
ParseField routingField = new ParseField("_routing");
ParseField parentField = new ParseField("_parent");
ParseField ttlField = new ParseField("_ttl");
ParseField parentField = new ParseField("_parent");
HIT_PARSER.declareString(BasicHit::setRouting, routingField);
HIT_PARSER.declareString(BasicHit::setParent, parentField);
// Pre-2.0.0 parent and routing come back in "fields"
// Pre-2.0.0 routing come back in "fields"
class Fields {
String routing;
String parent;
}
ObjectParser<Fields, XContentType> fieldsParser = new ObjectParser<>("fields", Fields::new);
HIT_PARSER.declareObject((hit, fields) -> {
hit.setRouting(fields.routing);
hit.setParent(fields.parent);
}, fieldsParser, new ParseField("fields"));
fieldsParser.declareString((fields, routing) -> fields.routing = routing, routingField);
fieldsParser.declareString((fields, parent) -> fields.parent = parent, parentField);
fieldsParser.declareLong((fields, ttl) -> {}, ttlField); // ignore ttls since they have been removed
fieldsParser.declareString((fields, parent) -> {}, parentField); // ignore parents since they have been removed
}
/**

View File

@ -307,25 +307,4 @@ public class DeleteByQueryBasicTests extends ReindexTestCase {
}
/**
* Test delete by query support for filtering by type. This entire feature
* can and should be removed when we drop support for types index with
* multiple types from core.
*/
public void testFilterByType() throws Exception {
assertAcked(client().admin().indices().prepareCreate("test")
.setSettings(Settings.builder().put("index.version.created", Version.V_5_6_0.id))); // allows for multiple types
indexRandom(true,
client().prepareIndex("test", "test1", "1").setSource("foo", "a"),
client().prepareIndex("test", "test2", "2").setSource("foo", "a"),
client().prepareIndex("test", "test2", "3").setSource("foo", "b"));
assertHitCount(client().prepareSearch("test").setSize(0).get(), 3);
// Deletes doc of the type "type2" that also matches foo:a
DeleteByQueryRequestBuilder builder = deleteByQuery().source("test").filter(termQuery("foo", "a")).refresh(true);
builder.source().setTypes("test2");
assertThat(builder.get(), matcher().deleted(1));
assertHitCount(client().prepareSearch("test").setSize(0).get(), 2);
}
}

View File

@ -1,204 +0,0 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.reindex;
import org.elasticsearch.Version;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.join.ParentJoinPlugin;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.script.MockScriptPlugin;
import org.elasticsearch.test.InternalSettingsPlugin;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import static org.elasticsearch.index.query.QueryBuilders.idsQuery;
import static org.elasticsearch.index.query.QueryBuilders.typeQuery;
import static org.elasticsearch.join.query.JoinQueryBuilders.hasParentQuery;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasToString;
import static org.hamcrest.Matchers.instanceOf;
/**
* Reindex tests for legacy parent/child. Tests for the new {@code join}
* field are in a qa project.
*/
public class ReindexParentChildTests extends ReindexTestCase {
QueryBuilder findsCountry;
QueryBuilder findsCity;
QueryBuilder findsNeighborhood;
@Override
protected boolean ignoreExternalCluster() {
return true;
}
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
final List<Class<? extends Plugin>> plugins = new ArrayList<>(super.nodePlugins());
plugins.add(ParentJoinPlugin.class);
plugins.add(InternalSettingsPlugin.class);
plugins.add(CustomScriptPlugin.class);
return Collections.unmodifiableList(plugins);
}
@Override
protected Collection<Class<? extends Plugin>> transportClientPlugins() {
return nodePlugins();
}
public void testParentChild() throws Exception {
createParentChildIndex("source");
createParentChildIndex("dest");
createParentChildDocs("source", true);
// Copy parent to the new index
ReindexRequestBuilder copy = reindex().source("source").destination("dest").filter(findsCountry).refresh(true);
assertThat(copy.get(), matcher().created(1));
// Copy the child to a new index
copy = reindex().source("source").destination("dest").filter(findsCity).refresh(true);
assertThat(copy.get(), matcher().created(1));
// Make sure parent/child is intact on that index
assertSearchHits(client().prepareSearch("dest").setQuery(findsCity).get(), "pittsburgh");
// Copy the grandchild to a new index
copy = reindex().source("source").destination("dest").filter(findsNeighborhood).refresh(true);
assertThat(copy.get(), matcher().created(1));
// Make sure parent/child is intact on that index
assertSearchHits(client().prepareSearch("dest").setQuery(findsNeighborhood).get(),
"make-believe");
// Copy the parent/child/grandchild structure all at once to a third index
createParentChildIndex("dest_all_at_once");
copy = reindex().source("source").destination("dest_all_at_once").refresh(true);
assertThat(copy.get(), matcher().created(3));
// Make sure parent/child/grandchild is intact there too
assertSearchHits(client().prepareSearch("dest_all_at_once").setQuery(findsNeighborhood).get(),
"make-believe");
}
/**
* Tests for adding the {@code _parent} via script and adding *both* {@code _parent} and {@code _routing} values via scripts.
*/
public void testScriptAddsParent() throws Exception {
assertAcked(client().admin().indices().prepareCreate("source")
.setSettings(Settings.builder().put("index.version.created", Version.V_5_6_0.id))); // allows for multiple types
createParentChildIndex("dest");
createParentChildDocs("source", false);
ReindexRequestBuilder copy = reindex().source("source").destination("dest").filter(typeQuery("country")).refresh(true);
assertThat(copy.get(), matcher().created(1));
copy = reindex().source("source").destination("dest").filter(typeQuery("city"))
.script(mockScript("ctx._parent='united states'")).refresh(true);
assertThat(copy.get(), matcher().created(1));
assertSearchHits(client().prepareSearch("dest").setQuery(findsCity).get(), "pittsburgh");
copy = reindex().source("source").destination("dest").filter(typeQuery("neighborhood"))
.script(mockScript("ctx._parent='pittsburgh';ctx._routing='united states'")).refresh(true);
assertThat(copy.get(), matcher().created(1));
assertSearchHits(client().prepareSearch("dest").setQuery(findsNeighborhood).get(), "make-believe");
}
public void testErrorMessageWhenBadParentChild() throws Exception {
createParentChildIndex("source");
createParentChildDocs("source", true);
ReindexRequestBuilder copy = reindex().source("source").destination("dest").filter(findsCity);
final BulkByScrollResponse response = copy.get();
assertThat(response.getBulkFailures().size(), equalTo(1));
final Exception cause = response.getBulkFailures().get(0).getCause();
assertThat(cause, instanceOf(IllegalArgumentException.class));
assertThat(cause, hasToString(containsString("can't specify parent if no parent field has been configured")));
}
/**
* Setup a parent/child index and return a query that should find the child
* using the parent.
*/
private void createParentChildIndex(String indexName) throws Exception {
CreateIndexRequestBuilder create = client().admin().indices().prepareCreate(indexName);
create.setSettings(Settings.builder().put("index.version.created", Version.V_5_6_0.id)); // allows for multiple types
create.addMapping("city", "{\"_parent\": {\"type\": \"country\"}}", XContentType.JSON);
create.addMapping("neighborhood", "{\"_parent\": {\"type\": \"city\"}}", XContentType.JSON);
assertAcked(create);
ensureGreen();
}
private void createParentChildDocs(String indexName, boolean addParents) throws Exception {
indexRandom(true,
client().prepareIndex(indexName, "country", "united states")
.setSource("foo", "bar"),
client().prepareIndex(indexName, "city", "pittsburgh")
.setParent(addParents ? "united states" : null)
.setSource("foo", "bar"),
client().prepareIndex(indexName, "neighborhood", "make-believe")
.setParent(addParents ? "pittsburgh" : null)
.setRouting(addParents ? "united states" : null)
.setSource("foo", "bar"));
findsCountry = idsQuery("country").addIds("united states");
findsCity = hasParentQuery("country", findsCountry, false);
findsNeighborhood = hasParentQuery("city", findsCity, false);
if (addParents) {
// Make sure we built the parent/child relationship
assertSearchHits(client().prepareSearch(indexName).setQuery(findsCity).get(), "pittsburgh");
assertSearchHits(client().prepareSearch(indexName).setQuery(findsNeighborhood).get(), "make-believe");
}
}
public static class CustomScriptPlugin extends MockScriptPlugin {
@Override
@SuppressWarnings("unchecked")
protected Map<String, Function<Map<String, Object>, Object>> pluginScripts() {
Map<String, Function<Map<String, Object>, Object>> scripts = new HashMap<>();
scripts.put("ctx._parent='united states'", vars -> {
Map<String, String> ctx = (Map<String, String>) vars.get("ctx");
ctx.put("_parent", "united states");
return null;
});
scripts.put("ctx._parent='pittsburgh';ctx._routing='united states'", vars -> {
Map<String, String> ctx = (Map<String, String>) vars.get("ctx");
ctx.put("_parent", "pittsburgh");
ctx.put("_routing", "united states");
return null;
});
return scripts;
}
}
}

View File

@ -92,12 +92,6 @@ public class ReindexScriptTests extends AbstractAsyncBulkByScrollActionScriptTes
}
}
public void testSetParent() throws Exception {
String parent = randomRealisticUnicodeOfLengthBetween(5, 20);
IndexRequest index = applyScript((Map<String, Object> ctx) -> ctx.put("_parent", parent));
assertEquals(parent, index.parent());
}
public void testSetRouting() throws Exception {
String routing = randomRealisticUnicodeOfLengthBetween(5, 20);
IndexRequest index = applyScript((Map<String, Object> ctx) -> ctx.put("_routing", routing));

View File

@ -39,7 +39,7 @@ public class UpdateByQueryWithScriptTests
* error message to the user, not some ClassCastException.
*/
Object[] options = new Object[] {"cat", new Object(), 123, new Date(), Math.PI};
for (String ctxVar: new String[] {"_index", "_type", "_id", "_version", "_parent", "_routing"}) {
for (String ctxVar: new String[] {"_index", "_type", "_id", "_version", "_routing"}) {
try {
applyScript((Map<String, Object> ctx) -> ctx.put(ctxVar, randomFrom(options)));
} catch (IllegalArgumentException e) {

View File

@ -207,7 +207,6 @@ public class RemoteScrollableHitSourceTests extends ESTestCase {
assertEquals("AVToMiDL50DjIiBO3yKA", r.getHits().get(0).getId());
assertEquals("{\"test\":\"test3\"}", r.getHits().get(0).getSource().utf8ToString());
assertEquals("testrouting", r.getHits().get(0).getRouting());
assertEquals("testparent", r.getHits().get(0).getParent());
called.set(true);
});
assertTrue(called.get());
@ -222,7 +221,6 @@ public class RemoteScrollableHitSourceTests extends ESTestCase {
assertEquals("AVToMiDL50DjIiBO3yKA", r.getHits().get(0).getId());
assertEquals("{\"test\":\"test3\"}", r.getHits().get(0).getSource().utf8ToString());
assertEquals("testrouting", r.getHits().get(0).getRouting());
assertEquals("testparent", r.getHits().get(0).getParent());
called.set(true);
});
assertTrue(called.get());

View File

@ -87,7 +87,7 @@ public class ICUCollationKeywordFieldMapperIT extends ESIntegTestCase {
.fetchSource(false)
.query(QueryBuilders.termQuery("collate", randomBoolean() ? equilavent[0] : equilavent[1]))
.sort("collate")
.sort("_uid", SortOrder.DESC) // secondary sort should kick in because both will collate to same value
.sort("_id", SortOrder.DESC) // secondary sort should kick in because both will collate to same value
);
SearchResponse response = client().search(request).actionGet();
@ -128,7 +128,7 @@ public class ICUCollationKeywordFieldMapperIT extends ESIntegTestCase {
.query(QueryBuilders.termQuery("collate", "a"))
// if mode max we use c and b as sort values, if max we use "a" for both
.sort(SortBuilders.fieldSort("collate").sortMode(SortMode.MAX).order(SortOrder.DESC))
.sort("_uid", SortOrder.DESC) // will be ignored
.sort("_id", SortOrder.DESC) // will be ignored
);
SearchResponse response = client().search(request).actionGet();
@ -145,7 +145,7 @@ public class ICUCollationKeywordFieldMapperIT extends ESIntegTestCase {
.query(QueryBuilders.termQuery("collate", "a"))
// if mode max we use c and b as sort values, if max we use "a" for both
.sort(SortBuilders.fieldSort("collate").sortMode(SortMode.MIN).order(SortOrder.DESC))
.sort("_uid", SortOrder.DESC) // will NOT be ignored and will determine order
.sort("_id", SortOrder.DESC) // will NOT be ignored and will determine order
);
response = client().search(request).actionGet();
@ -188,7 +188,7 @@ public class ICUCollationKeywordFieldMapperIT extends ESIntegTestCase {
.fetchSource(false)
.query(QueryBuilders.termQuery("collate", randomBoolean() ? equilavent[0] : equilavent[1]))
.sort("collate")
.sort("_uid", SortOrder.DESC) // secondary sort should kick in because both will collate to same value
.sort("_id", SortOrder.DESC) // secondary sort should kick in because both will collate to same value
);
SearchResponse response = client().search(request).actionGet();
@ -230,7 +230,7 @@ public class ICUCollationKeywordFieldMapperIT extends ESIntegTestCase {
.fetchSource(false)
.query(QueryBuilders.termQuery("collate", randomBoolean() ? equilavent[0] : equilavent[1]))
.sort("collate")
.sort("_uid", SortOrder.DESC) // secondary sort should kick in because both will collate to same value
.sort("_id", SortOrder.DESC) // secondary sort should kick in because both will collate to same value
);
SearchResponse response = client().search(request).actionGet();
@ -273,7 +273,7 @@ public class ICUCollationKeywordFieldMapperIT extends ESIntegTestCase {
.fetchSource(false)
.query(QueryBuilders.termQuery("collate", randomBoolean() ? equilavent[0] : equilavent[1]))
.sort("collate")
.sort("_uid", SortOrder.DESC) // secondary sort should kick in because both will collate to same value
.sort("_id", SortOrder.DESC) // secondary sort should kick in because both will collate to same value
);
SearchResponse response = client().search(request).actionGet();
@ -316,7 +316,7 @@ public class ICUCollationKeywordFieldMapperIT extends ESIntegTestCase {
.source(new SearchSourceBuilder()
.fetchSource(false)
.sort("collate", SortOrder.ASC)
.sort("_uid", SortOrder.ASC) // secondary sort should kick in on docs 1 and 3 because same value collate value
.sort("_id", SortOrder.ASC) // secondary sort should kick in on docs 1 and 3 because same value collate value
);
SearchResponse response = client().search(request).actionGet();
@ -398,7 +398,7 @@ public class ICUCollationKeywordFieldMapperIT extends ESIntegTestCase {
.source(new SearchSourceBuilder()
.fetchSource(false)
.sort("collate", SortOrder.ASC)
.sort("_uid", SortOrder.DESC)
.sort("_id", SortOrder.DESC)
);
SearchResponse response = client().search(request).actionGet();
@ -492,7 +492,7 @@ public class ICUCollationKeywordFieldMapperIT extends ESIntegTestCase {
.fetchSource(false)
.query(QueryBuilders.termQuery("collate", randomBoolean() ? equilavent[0] : equilavent[1]))
.sort("collate", SortOrder.ASC)
.sort("_uid", SortOrder.DESC) // secondary sort should kick in because both will collate to same value
.sort("_id", SortOrder.DESC) // secondary sort should kick in because both will collate to same value
);
SearchResponse response = client().search(request).actionGet();

View File

@ -25,6 +25,7 @@ import org.elasticsearch.Version;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.index.mapper.EnabledAttributeMapper;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.Mapper;
@ -101,7 +102,7 @@ public class SizeFieldMapper extends MetadataFieldMapper {
String fieldName = entry.getKey();
Object fieldNode = entry.getValue();
if (fieldName.equals("enabled")) {
boolean enabled = TypeParsers.nodeBooleanValue(name, "enabled", fieldNode, parserContext);
boolean enabled = XContentMapValues.nodeBooleanValue(fieldNode, name + ".enabled");
builder.enabled(enabled ? EnabledAttributeMapper.ENABLED : EnabledAttributeMapper.DISABLED);
iterator.remove();
}

View File

@ -33,7 +33,7 @@ public class IngestDocumentMustacheIT extends AbstractScriptTestCase {
public void testAccessMetaDataViaTemplate() {
Map<String, Object> document = new HashMap<>();
document.put("foo", "bar");
IngestDocument ingestDocument = new IngestDocument("index", "type", "id", null, null, null, null, document);
IngestDocument ingestDocument = new IngestDocument("index", "type", "id", null, null, null, document);
ingestDocument.setFieldValue(compile("field1"), ValueSource.wrap("1 {{foo}}", scriptService));
assertThat(ingestDocument.getFieldValue("field1", String.class), equalTo("1 bar"));
@ -48,7 +48,7 @@ public class IngestDocumentMustacheIT extends AbstractScriptTestCase {
innerObject.put("baz", "hello baz");
innerObject.put("qux", Collections.singletonMap("fubar", "hello qux and fubar"));
document.put("foo", innerObject);
IngestDocument ingestDocument = new IngestDocument("index", "type", "id", null, null, null, null, document);
IngestDocument ingestDocument = new IngestDocument("index", "type", "id", null, null, null, document);
ingestDocument.setFieldValue(compile("field1"),
ValueSource.wrap("1 {{foo.bar}} {{foo.baz}} {{foo.qux.fubar}}", scriptService));
assertThat(ingestDocument.getFieldValue("field1", String.class), equalTo("1 hello bar hello baz hello qux and fubar"));
@ -67,7 +67,7 @@ public class IngestDocumentMustacheIT extends AbstractScriptTestCase {
list.add(value);
list.add(null);
document.put("list2", list);
IngestDocument ingestDocument = new IngestDocument("index", "type", "id", null, null, null, null, document);
IngestDocument ingestDocument = new IngestDocument("index", "type", "id", null, null, null, document);
ingestDocument.setFieldValue(compile("field1"), ValueSource.wrap("1 {{list1.0}} {{list2.0}}", scriptService));
assertThat(ingestDocument.getFieldValue("field1", String.class), equalTo("1 foo {field=value}"));
}
@ -77,7 +77,7 @@ public class IngestDocumentMustacheIT extends AbstractScriptTestCase {
Map<String, Object> ingestMap = new HashMap<>();
ingestMap.put("timestamp", "bogus_timestamp");
document.put("_ingest", ingestMap);
IngestDocument ingestDocument = new IngestDocument("index", "type", "id", null, null, null, null, document);
IngestDocument ingestDocument = new IngestDocument("index", "type", "id", null, null, null, document);
ingestDocument.setFieldValue(compile("ingest_timestamp"),
ValueSource.wrap("{{_ingest.timestamp}} and {{_source._ingest.timestamp}}", scriptService));
assertThat(ingestDocument.getFieldValue("ingest_timestamp", String.class),

View File

@ -64,7 +64,7 @@ public class ValueSourceMustacheIT extends AbstractScriptTestCase {
}
public void testAccessSourceViaTemplate() {
IngestDocument ingestDocument = new IngestDocument("marvel", "type", "id", null, null, null, null, new HashMap<>());
IngestDocument ingestDocument = new IngestDocument("marvel", "type", "id", null, null, null, new HashMap<>());
assertThat(ingestDocument.hasField("marvel"), is(false));
ingestDocument.setFieldValue(compile("{{_index}}"), ValueSource.wrap("{{_index}}", scriptService));
assertThat(ingestDocument.getFieldValue("marvel", String.class), equalTo("marvel"));

View File

@ -1,39 +0,0 @@
---
"Parent":
- do:
indices.create:
index: test_1
body:
mappings:
test:
_parent: { type: "foo" }
- do:
catch: /routing_missing_exception/
create:
index: test_1
type: test
id: 1
body: { foo: bar }
- do:
create:
index: test_1
type: test
id: 1
parent: 5
body: { foo: bar }
- do:
get:
index: test_1
type: test
id: 1
parent: 5
stored_fields: [_parent, _routing]
- match: { _id: "1"}
- match: { _parent: "5"}
- match: { _routing: "5"}

View File

@ -1,55 +0,0 @@
---
"Parent with routing":
- do:
indices.create:
index: test_1
body:
mappings:
test:
_parent: { type: "foo" }
settings:
number_of_shards: 5
number_of_replicas: 0
- do:
cluster.health:
wait_for_status: green
- do:
create:
index: test_1
type: test
id: 1
parent: 5
routing: 4
body: { foo: bar }
- do:
get:
index: test_1
type: test
id: 1
parent: 5
routing: 4
stored_fields: [_parent, _routing]
- match: { _id: "1"}
- match: { _parent: "5"}
- match: { _routing: "4"}
- do:
catch: missing
get:
index: test_1
type: test
id: 1
parent: 5
- do:
get:
index: test_1
type: test
id: 1
routing: 4

View File

@ -1,36 +0,0 @@
---
"Parent":
- do:
indices.create:
index: test_1
body:
settings:
number_of_shards: 5
number_of_routing_shards: 5
mappings:
test:
_parent: { type: "foo" }
- do:
index:
index: test_1
type: test
id: 1
parent: 5
body: { foo: bar }
- do:
catch: missing
delete:
index: test_1
type: test
id: 1
parent: 1
- do:
delete:
index: test_1
type: test
id: 1
parent: 5

View File

@ -1,29 +0,0 @@
---
"Delete on all shards when parent not specified":
- do:
indices.create:
index: test_1
body:
mappings:
test:
_parent: { type: "foo" }
- do:
cluster.health:
wait_for_status: yellow
- do:
index:
index: test_1
type: test
id: 1
parent: 5
body: { foo: bar }
- do:
catch: /routing_missing_exception/
delete:
index: test_1
type: test
id: 1

View File

@ -1,44 +0,0 @@
---
"Parent with routing":
- do:
indices.create:
index: test_1
body:
mappings:
test:
_parent: { type: "foo" }
settings:
number_of_shards: 5
number_of_replicas: 0
- do:
cluster.health:
wait_for_status: green
- do:
index:
index: test_1
type: test
id: 1
parent: 5
routing: 4
body: { foo: bar }
- do:
catch: missing
delete:
index: test_1
type: test
id: 1
parent: 5
routing: 1
- do:
delete:
index: test_1
type: test
id: 1
parent: 5
routing: 4

View File

@ -1,39 +0,0 @@
---
setup:
- do:
indices.create:
index: test_1
body:
mappings:
test:
_parent: { type: "foo" }
---
"Parent":
- do:
index:
index: test_1
type: test
id: 1
parent: 5
body: { foo: bar }
- do:
exists:
index: test_1
type: test
id: 1
parent: 5
- is_true: ''
---
"Parent omitted":
- do:
catch: bad_request
exists:
index: test_1
type: test
id: 1

View File

@ -1,57 +0,0 @@
---
"Parent with routing":
- do:
indices.create:
index: test_1
body:
mappings:
test:
_parent: { type: "foo" }
settings:
number_of_shards: 5
number_of_replicas: 0
- do:
cluster.health:
wait_for_status: green
- do:
index:
index: test_1
type: test
id: 1
parent: 5
routing: 4
body: { foo: bar }
- is_true: ''
- do:
exists:
index: test_1
type: test
id: 1
parent: 5
routing: 4
- is_true: ''
- do:
exists:
index: test_1
type: test
id: 1
parent: 5
- is_false: ''
- do:
exists:
index: test_1
type: test
id: 1
routing: 4
- is_true: ''

View File

@ -1,42 +0,0 @@
---
setup:
- do:
indices.create:
index: test_1
body:
mappings:
test:
_parent: { type: "foo" }
- do:
index:
index: test_1
type: test
id: 1
parent: 中文
body: { foo: bar }
---
"Parent":
- do:
get:
index: test_1
type: test
id: 1
parent: 中文
stored_fields: [_parent, _routing]
- match: { _id: "1"}
- match: { _parent: 中文 }
- match: { _routing: 中文}
---
"Parent omitted":
- do:
catch: bad_request
get:
index: test_1
type: test
id: 1

View File

@ -1,55 +0,0 @@
---
"Parent with routing":
- do:
indices.create:
index: test_1
body:
mappings:
test:
_parent: { type: "foo" }
settings:
number_of_shards: 5
number_of_replicas: 0
- do:
cluster.health:
wait_for_status: green
- do:
index:
index: test_1
type: test
id: 1
parent: 5
routing: 4
body: { foo: bar }
- do:
get:
index: test_1
type: test
id: 1
parent: 5
routing: 4
stored_fields: [_parent, _routing]
- match: { _id: "1"}
- match: { _parent: "5"}
- match: { _routing: "4"}
- do:
catch: missing
get:
index: test_1
type: test
id: 1
parent: 5
- do:
get:
index: test_1
type: test
id: 1
routing: 4

View File

@ -1,40 +0,0 @@
---
setup:
- do:
indices.create:
index: test_1
body:
mappings:
test:
_parent: { type: "foo" }
- do:
index:
index: test_1
type: test
id: 1
parent: 5
body: { foo: bar }
---
"Parent":
- do:
get_source:
index: test_1
type: test
id: 1
parent: 5
- match: { '': {foo: bar}}
---
"Parent omitted":
- do:
catch: bad_request
get_source:
index: test_1
type: test
id: 1

View File

@ -1,52 +0,0 @@
---
"Parent with routing":
- do:
indices.create:
index: test_1
body:
mappings:
test:
_parent: { type: "foo" }
settings:
number_of_shards: 5
number_of_replicas: 0
- do:
cluster.health:
wait_for_status: green
- do:
index:
index: test_1
type: test
id: 1
parent: 5
routing: 4
body: { foo: bar }
- do:
get_source:
index: test_1
type: test
id: 1
parent: 5
routing: 4
- match: { '': {foo: bar}}
- do:
catch: missing
get_source:
index: test_1
type: test
id: 1
parent: 5
- do:
get_source:
index: test_1
type: test
id: 1
routing: 4

View File

@ -1,39 +0,0 @@
---
"Parent":
- do:
indices.create:
index: test_1
body:
mappings:
test:
_parent: { type: "foo" }
- do:
catch: /routing_missing_exception/
index:
index: test_1
type: test
id: 1
body: { foo: bar }
- do:
index:
index: test_1
type: test
id: 1
parent: 5
body: { foo: bar }
- do:
get:
index: test_1
type: test
id: 1
parent: 5
stored_fields: [_parent, _routing]
- match: { _id: "1"}
- match: { _parent: "5"}
- match: { _routing: "5"}

View File

@ -1,55 +0,0 @@
---
"Parent with routing":
- do:
indices.create:
index: test_1
body:
mappings:
test:
_parent: { type: "foo" }
settings:
number_of_shards: 5
number_of_replicas: 0
- do:
cluster.health:
wait_for_status: green
- do:
index:
index: test_1
type: test
id: 1
parent: 5
routing: 4
body: { foo: bar }
- do:
get:
index: test_1
type: test
id: 1
parent: 5
routing: 4
stored_fields: [_parent, _routing]
- match: { _id: "1"}
- match: { _parent: "5"}
- match: { _routing: "4"}
- do:
catch: missing
get:
index: test_1
type: test
id: 1
parent: 5
- do:
get:
index: test_1
type: test
id: 1
routing: 4

View File

@ -1,56 +0,0 @@
---
"Parent":
- do:
indices.create:
index: test_1
body:
mappings:
test:
_parent: { type: "foo" }
settings:
number_of_shards: 5
- do:
index:
index: test_1
type: test
id: 1
parent: 4
body: { foo: bar }
- do:
index:
index: test_1
type: test
id: 2
parent: 5
body: { foo: bar }
- do:
mget:
index: test_1
type: test
body:
docs:
- { _id: 1 }
- { _id: 1, parent: 5, stored_fields: [ _parent, _routing ] }
- { _id: 1, parent: 4, stored_fields: [ _parent, _routing ] }
- { _id: 2, parent: 5, stored_fields: [ _parent, _routing ] }
- is_false: docs.0.found
- is_false: docs.1.found
- is_true: docs.2.found
- match: { docs.2._index: test_1 }
- match: { docs.2._type: test }
- match: { docs.2._id: "1" }
- match: { docs.2._parent: "4" }
- match: { docs.2._routing: "4" }
- is_true: docs.3.found
- match: { docs.3._index: test_1 }
- match: { docs.3._type: test }
- match: { docs.3._id: "2" }
- match: { docs.3._parent: "5" }
- match: { docs.3._routing: "5" }

View File

@ -1,47 +0,0 @@
---
"Parent":
- do:
indices.create:
index: test_1
body:
mappings:
test:
_parent: { type: "foo" }
settings:
number_of_shards: 5
number_of_replicas: 0
- do:
cluster.health:
wait_for_status: green
- do:
index:
index: test_1
type: test
id: 1
parent: 4
routing: 5
body: { foo: bar }
- do:
mget:
index: test_1
type: test
stored_fields: [ _routing , _parent]
body:
docs:
- { _id: 1 }
- { _id: 1, parent: 4 }
- { _id: 1, parent: 4, routing: 5 }
- is_false: docs.0.found
- is_false: docs.1.found
- is_true: docs.2.found
- match: { docs.2._index: test_1 }
- match: { docs.2._type: test }
- match: { docs.2._id: "1" }
- match: { docs.2._parent: "4" }
- match: { docs.2._routing: "5" }

View File

@ -581,21 +581,6 @@ setup:
- match: {hits.total: 4}
---
"Test exists query on _uid field":
- skip:
version: " - 6.0.99"
reason: exists on _uid not supported prior to 6.1.0
- do:
search:
index: test
body:
query:
exists:
field: _uid
- match: {hits.total: 4}
---
"Test exists query on _index field":
- skip:

View File

@ -1,75 +0,0 @@
---
setup:
- do:
indices.create:
index: test_1
body:
mappings:
test:
_parent: { type: "foo" }
---
"Parent":
- do:
catch: /routing_missing_exception/
update:
index: test_1
type: test
id: 1
body:
doc: { foo: baz }
upsert: { foo: bar }
- do:
update:
index: test_1
type: test
id: 1
parent: 5
body:
doc: { foo: baz }
upsert: { foo: bar }
- do:
get:
index: test_1
type: test
id: 1
parent: 5
stored_fields: [_parent, _routing]
- match: { _parent: "5"}
- match: { _routing: "5"}
- do:
update:
index: test_1
type: test
id: 1
parent: 5
_source: foo
body:
doc: { foo: baz }
- match: { get._source.foo: baz }
---
"Parent omitted":
- do:
index:
index: test_1
type: test
id: 1
parent: 5
body: { foo: bar }
- do:
catch: bad_request
update:
index: test_1
type: test
id: 1
body:
doc: { foo: baz }

View File

@ -1,64 +0,0 @@
---
"Parent with routing":
- do:
indices.create:
index: test_1
body:
mappings:
test:
_parent: { type: "foo" }
settings:
number_of_shards: 5
number_of_replicas: 0
- do:
cluster.health:
wait_for_status: green
- do:
update:
index: test_1
type: test
id: 1
parent: 5
routing: 4
body:
doc: { foo: baz }
upsert: { foo: bar }
- do:
get:
index: test_1
type: test
id: 1
routing: 4
parent: 5
stored_fields: [_parent, _routing]
- match: { _parent: "5"}
- match: { _routing: "4"}
- do:
catch: missing
update:
index: test_1
type: test
id: 1
parent: 5
body:
doc: { foo: baz }
- do:
update:
index: test_1
type: test
id: 1
parent: 5
routing: 4
_source: foo
body:
doc: { foo: baz }
- match: { get._source.foo: baz }

View File

@ -8,10 +8,6 @@
- do:
indices.create:
index: test_1
body:
mappings:
test:
_parent: { type: "foo" }
- do:
update:
@ -19,12 +15,11 @@
type: test
id: 1
parent: 5
fields: [ _parent, _routing ]
fields: [ _routing ]
body:
doc: { foo: baz }
upsert: { foo: bar }
- match: { get._parent: "5" }
- match: { get._routing: "5" }
- do:
@ -33,6 +28,6 @@
type: test
id: 1
parent: 5
stored_fields: [ _parent, _routing ]
stored_fields: [ _routing ]

View File

@ -71,13 +71,6 @@ public interface DocWriteRequest<T> extends IndicesRequest {
*/
String routing();
/**
* Get the parent for this request
* @return the Parent
*/
String parent();
/**
* Get the document version for this request
* @return the document version

View File

@ -75,7 +75,6 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesReques
private static final ParseField TYPE = new ParseField("_type");
private static final ParseField ID = new ParseField("_id");
private static final ParseField ROUTING = new ParseField("routing");
private static final ParseField PARENT = new ParseField("parent");
private static final ParseField OP_TYPE = new ParseField("op_type");
private static final ParseField VERSION = new ParseField("version");
private static final ParseField VERSION_TYPE = new ParseField("version_type");
@ -333,7 +332,6 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesReques
String type = defaultType;
String id = null;
String routing = defaultRouting;
String parent = null;
FetchSourceContext fetchSourceContext = defaultFetchSourceContext;
String[] fields = defaultFields;
String opType = null;
@ -363,8 +361,6 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesReques
id = parser.text();
} else if (ROUTING.match(currentFieldName, parser.getDeprecationHandler())) {
routing = parser.text();
} else if (PARENT.match(currentFieldName, parser.getDeprecationHandler())) {
parent = parser.text();
} else if (OP_TYPE.match(currentFieldName, parser.getDeprecationHandler())) {
opType = parser.text();
} else if (VERSION.match(currentFieldName, parser.getDeprecationHandler())) {
@ -402,7 +398,7 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesReques
}
if ("delete".equals(action)) {
add(new DeleteRequest(index, type, id).routing(routing).parent(parent).version(version).versionType(versionType), payload);
add(new DeleteRequest(index, type, id).routing(routing).version(version).versionType(versionType), payload);
} else {
nextMarker = findNextMarker(marker, from, data, length);
if (nextMarker == -1) {
@ -414,23 +410,22 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesReques
// of index request.
if ("index".equals(action)) {
if (opType == null) {
internalAdd(new IndexRequest(index, type, id).routing(routing).parent(parent).version(version).versionType(versionType)
internalAdd(new IndexRequest(index, type, id).routing(routing).version(version).versionType(versionType)
.setPipeline(pipeline)
.source(sliceTrimmingCarriageReturn(data, from, nextMarker,xContentType), xContentType), payload);
} else {
internalAdd(new IndexRequest(index, type, id).routing(routing).parent(parent).version(version).versionType(versionType)
internalAdd(new IndexRequest(index, type, id).routing(routing).version(version).versionType(versionType)
.create("create".equals(opType)).setPipeline(pipeline)
.source(sliceTrimmingCarriageReturn(data, from, nextMarker, xContentType), xContentType), payload);
}
} else if ("create".equals(action)) {
internalAdd(new IndexRequest(index, type, id).routing(routing).parent(parent).version(version).versionType(versionType)
internalAdd(new IndexRequest(index, type, id).routing(routing).version(version).versionType(versionType)
.create(true).setPipeline(pipeline)
.source(sliceTrimmingCarriageReturn(data, from, nextMarker, xContentType), xContentType), payload);
} else if ("update".equals(action)) {
UpdateRequest updateRequest = new UpdateRequest(index, type, id).routing(routing).parent(parent).retryOnConflict(retryOnConflict)
UpdateRequest updateRequest = new UpdateRequest(index, type, id).routing(routing).retryOnConflict(retryOnConflict)
.version(version).versionType(versionType)
.routing(routing)
.parent(parent);
.routing(routing);
// EMPTY is safe here because we never call namedObject
try (InputStream dataStream = sliceTrimmingCarriageReturn(data, from, nextMarker, xContentType).streamInput();
XContentParser sliceParser = xContent.createParser(NamedXContentRegistry.EMPTY,

View File

@ -299,7 +299,7 @@ public class TransportBulkAction extends HandledTransportAction<BulkRequest, Bul
TransportUpdateAction.resolveAndValidateRouting(metaData, concreteIndex.getName(), (UpdateRequest) docWriteRequest);
break;
case DELETE:
docWriteRequest.routing(metaData.resolveIndexRouting(docWriteRequest.parent(), docWriteRequest.routing(), docWriteRequest.index()));
docWriteRequest.routing(metaData.resolveIndexRouting(docWriteRequest.routing(), docWriteRequest.index()));
// check if routing is required, if so, throw error if routing wasn't specified
if (docWriteRequest.routing() == null && metaData.routingRequired(concreteIndex.getName(), docWriteRequest.type())) {
throw new RoutingMissingException(concreteIndex.getName(), docWriteRequest.type(), docWriteRequest.id());

View File

@ -513,7 +513,7 @@ public class TransportShardBulkAction extends TransportWriteAction<BulkShardRequ
final SourceToParse sourceToParse =
SourceToParse.source(shardId.getIndexName(),
indexRequest.type(), indexRequest.id(), indexRequest.source(), indexRequest.getContentType())
.routing(indexRequest.routing()).parent(indexRequest.parent());
.routing(indexRequest.routing());
return replica.applyIndexOperationOnReplica(primaryResponse.getSeqNo(), primaryResponse.getVersion(),
indexRequest.versionType().versionTypeForReplicationAndRecovery(), indexRequest.getAutoGeneratedTimestamp(),
indexRequest.isRetry(), sourceToParse, update -> {
@ -539,7 +539,7 @@ public class TransportShardBulkAction extends TransportWriteAction<BulkShardRequ
MappingUpdatePerformer mappingUpdater) throws Exception {
final SourceToParse sourceToParse =
SourceToParse.source(request.index(), request.type(), request.id(), request.source(), request.getContentType())
.routing(request.routing()).parent(request.parent());
.routing(request.routing());
try {
// if a mapping update is required to index this request, issue a mapping update on the master, and abort the
// current indexing operation so that it can be retried with the updated mapping from the master

View File

@ -19,6 +19,7 @@
package org.elasticsearch.action.delete;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.CompositeIndicesRequest;
import org.elasticsearch.action.DocWriteRequest;
@ -51,8 +52,6 @@ public class DeleteRequest extends ReplicatedWriteRequest<DeleteRequest> impleme
private String id;
@Nullable
private String routing;
@Nullable
private String parent;
private long version = Versions.MATCH_ANY;
private VersionType versionType = VersionType.INTERNAL;
@ -130,22 +129,6 @@ public class DeleteRequest extends ReplicatedWriteRequest<DeleteRequest> impleme
return this;
}
/**
* @return The parent for this request.
*/
@Override
public String parent() {
return parent;
}
/**
* Sets the parent id of this document.
*/
public DeleteRequest parent(String parent) {
this.parent = parent;
return this;
}
/**
* Controls the shard routing of the request. Using this value to hash the shard
* and not the id.
@ -202,7 +185,9 @@ public class DeleteRequest extends ReplicatedWriteRequest<DeleteRequest> impleme
type = in.readString();
id = in.readString();
routing = in.readOptionalString();
parent = in.readOptionalString();
if (in.getVersion().before(Version.V_7_0_0_alpha1)) {
in.readOptionalString(); // _parent
}
version = in.readLong();
versionType = VersionType.fromValue(in.readByte());
}
@ -213,7 +198,9 @@ public class DeleteRequest extends ReplicatedWriteRequest<DeleteRequest> impleme
out.writeString(type);
out.writeString(id);
out.writeOptionalString(routing());
out.writeOptionalString(parent());
if (out.getVersion().before(Version.V_7_0_0_alpha1)) {
out.writeOptionalString(null); // _parent
}
out.writeLong(version);
out.writeByte(versionType.getValue());
}

View File

@ -55,15 +55,6 @@ public class DeleteRequestBuilder extends ReplicationRequestBuilder<DeleteReques
return this;
}
/**
* Sets the parent id of this document. Will simply set the routing to this value, as it is only
* used for routing with delete requests.
*/
public DeleteRequestBuilder setParent(String parent) {
request.parent(parent);
return this;
}
/**
* Controls the shard routing of the delete request. Using this value to hash the shard
* and not the id.

View File

@ -19,6 +19,7 @@
package org.elasticsearch.action.get;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.RealtimeRequest;
import org.elasticsearch.action.ValidateActions;
@ -48,7 +49,6 @@ public class GetRequest extends SingleShardRequest<GetRequest> implements Realti
private String type;
private String id;
private String routing;
private String parent;
private String preference;
private String[] storedFields;
@ -126,21 +126,6 @@ public class GetRequest extends SingleShardRequest<GetRequest> implements Realti
return this;
}
/**
* @return The parent for this request.
*/
public String parent() {
return parent;
}
/**
* Sets the parent id of this document.
*/
public GetRequest parent(String parent) {
this.parent = parent;
return this;
}
/**
* Controls the shard routing of the request. Using this value to hash the shard
* and not the id.
@ -260,7 +245,9 @@ public class GetRequest extends SingleShardRequest<GetRequest> implements Realti
type = in.readString();
id = in.readString();
routing = in.readOptionalString();
parent = in.readOptionalString();
if (in.getVersion().before(Version.V_7_0_0_alpha1)) {
in.readOptionalString();
}
preference = in.readOptionalString();
refresh = in.readBoolean();
storedFields = in.readOptionalStringArray();
@ -277,7 +264,9 @@ public class GetRequest extends SingleShardRequest<GetRequest> implements Realti
out.writeString(type);
out.writeString(id);
out.writeOptionalString(routing);
out.writeOptionalString(parent);
if (out.getVersion().before(Version.V_7_0_0_alpha1)) {
out.writeOptionalString(null);
}
out.writeOptionalString(preference);
out.writeBoolean(refresh);

View File

@ -56,15 +56,6 @@ public class GetRequestBuilder extends SingleShardOperationRequestBuilder<GetReq
return this;
}
/**
* Sets the parent id of this document. Will simply set the routing to this value, as it is only
* used for routing with delete requests.
*/
public GetRequestBuilder setParent(String parent) {
request.parent(parent);
return this;
}
/**
* Controls the shard routing of the request. Using this value to hash the shard
* and not the id.

View File

@ -20,6 +20,7 @@
package org.elasticsearch.action.get;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionRequest;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.CompositeIndicesRequest;
@ -58,7 +59,6 @@ public class MultiGetRequest extends ActionRequest
private static final ParseField TYPE = new ParseField("_type");
private static final ParseField ID = new ParseField("_id");
private static final ParseField ROUTING = new ParseField("routing");
private static final ParseField PARENT = new ParseField("parent");
private static final ParseField VERSION = new ParseField("version");
private static final ParseField VERSION_TYPE = new ParseField("version_type");
private static final ParseField FIELDS = new ParseField("fields");
@ -74,7 +74,6 @@ public class MultiGetRequest extends ActionRequest
private String type;
private String id;
private String routing;
private String parent;
private String[] storedFields;
private long version = Versions.MATCH_ANY;
private VersionType versionType = VersionType.INTERNAL;
@ -141,18 +140,6 @@ public class MultiGetRequest extends ActionRequest
return this.routing;
}
public Item parent(String parent) {
this.parent = parent;
return this;
}
/**
* @return The parent for this request.
*/
public String parent() {
return parent;
}
public Item storedFields(String... fields) {
this.storedFields = fields;
return this;
@ -204,7 +191,9 @@ public class MultiGetRequest extends ActionRequest
type = in.readOptionalString();
id = in.readString();
routing = in.readOptionalString();
parent = in.readOptionalString();
if (in.getVersion().before(Version.V_7_0_0_alpha1)) {
in.readOptionalString(); // _parent
}
storedFields = in.readOptionalStringArray();
version = in.readLong();
versionType = VersionType.fromValue(in.readByte());
@ -218,7 +207,9 @@ public class MultiGetRequest extends ActionRequest
out.writeOptionalString(type);
out.writeString(id);
out.writeOptionalString(routing);
out.writeOptionalString(parent);
if (out.getVersion().before(Version.V_7_0_0_alpha1)) {
out.writeOptionalString(null); // _parent
}
out.writeOptionalStringArray(storedFields);
out.writeLong(version);
out.writeByte(versionType.getValue());
@ -233,7 +224,6 @@ public class MultiGetRequest extends ActionRequest
builder.field(TYPE.getPreferredName(), type);
builder.field(ID.getPreferredName(), id);
builder.field(ROUTING.getPreferredName(), routing);
builder.field(PARENT.getPreferredName(), parent);
builder.field(STORED_FIELDS.getPreferredName(), storedFields);
builder.field(VERSION.getPreferredName(), version);
builder.field(VERSION_TYPE.getPreferredName(), VersionType.toString(versionType));
@ -256,7 +246,6 @@ public class MultiGetRequest extends ActionRequest
if (!id.equals(item.id)) return false;
if (!index.equals(item.index)) return false;
if (routing != null ? !routing.equals(item.routing) : item.routing != null) return false;
if (parent != null ? !parent.equals(item.parent) : item.parent != null) return false;
if (type != null ? !type.equals(item.type) : item.type != null) return false;
if (versionType != item.versionType) return false;
@ -269,7 +258,6 @@ public class MultiGetRequest extends ActionRequest
result = 31 * result + (type != null ? type.hashCode() : 0);
result = 31 * result + id.hashCode();
result = 31 * result + (routing != null ? routing.hashCode() : 0);
result = 31 * result + (parent != null ? parent.hashCode() : 0);
result = 31 * result + (storedFields != null ? Arrays.hashCode(storedFields) : 0);
result = 31 * result + Long.hashCode(version);
result = 31 * result + versionType.hashCode();
@ -407,7 +395,6 @@ public class MultiGetRequest extends ActionRequest
String type = defaultType;
String id = null;
String routing = defaultRouting;
String parent = null;
List<String> storedFields = null;
long version = Versions.MATCH_ANY;
VersionType versionType = VersionType.INTERNAL;
@ -429,8 +416,6 @@ public class MultiGetRequest extends ActionRequest
id = parser.text();
} else if (ROUTING.match(currentFieldName, parser.getDeprecationHandler())) {
routing = parser.text();
} else if (PARENT.match(currentFieldName, parser.getDeprecationHandler())) {
parent = parser.text();
} else if (FIELDS.match(currentFieldName, parser.getDeprecationHandler())) {
throw new ParsingException(parser.getTokenLocation(),
"Unsupported field [fields] used, expected [stored_fields] instead");
@ -510,7 +495,7 @@ public class MultiGetRequest extends ActionRequest
} else {
aFields = defaultFields;
}
items.add(new Item(index, type, id).routing(routing).storedFields(aFields).parent(parent).version(version).versionType(versionType)
items.add(new Item(index, type, id).routing(routing).storedFields(aFields).version(version).versionType(versionType)
.fetchSourceContext(fetchSourceContext == FetchSourceContext.FETCH_SOURCE ? defaultFetchSource : fetchSourceContext));
}
}

View File

@ -69,7 +69,7 @@ public class TransportGetAction extends TransportSingleShardAction<GetRequest, G
@Override
protected void resolveRequest(ClusterState state, InternalRequest request) {
// update the routing (request#index here is possibly an alias)
request.request().routing(state.metaData().resolveIndexRouting(request.request().parent(), request.request().routing(), request.request().index()));
request.request().routing(state.metaData().resolveIndexRouting(request.request().routing(), request.request().index()));
// Fail fast on the node that received the request.
if (request.request().routing() == null && state.getMetaData().routingRequired(request.concreteIndex(), request.request().type())) {
throw new RoutingMissingException(request.concreteIndex(), request.request().type(), request.request().id());

View File

@ -68,7 +68,7 @@ public class TransportMultiGetAction extends HandledTransportAction<MultiGetRequ
try {
concreteSingleIndex = indexNameExpressionResolver.concreteSingleIndex(clusterState, item).getName();
item.routing(clusterState.metaData().resolveIndexRouting(item.parent(), item.routing(), item.index()));
item.routing(clusterState.metaData().resolveIndexRouting(item.routing(), item.index()));
if ((item.routing() == null) && (clusterState.getMetaData().routingRequired(concreteSingleIndex, item.type()))) {
String message = "routing is required for [" + concreteSingleIndex + "]/[" + item.type() + "]/[" + item.id() + "]";
responses.set(i, newItemFailure(concreteSingleIndex, item.type(), item.id(), new IllegalArgumentException(message)));

View File

@ -84,8 +84,6 @@ public class IndexRequest extends ReplicatedWriteRequest<IndexRequest> implement
private String id;
@Nullable
private String routing;
@Nullable
private String parent;
private BytesReference source;
@ -254,19 +252,6 @@ public class IndexRequest extends ReplicatedWriteRequest<IndexRequest> implement
return this.routing;
}
/**
* Sets the parent id of this document.
*/
public IndexRequest parent(String parent) {
this.parent = parent;
return this;
}
@Override
public String parent() {
return this.parent;
}
/**
* Sets the ingest pipeline to be executed before indexing the document
*/
@ -490,14 +475,6 @@ public class IndexRequest extends ReplicatedWriteRequest<IndexRequest> implement
if (mappingMd.routing().required() && routing == null) {
throw new RoutingMissingException(concreteIndex, type, id);
}
if (parent != null && !mappingMd.hasParentField()) {
throw new IllegalArgumentException("can't specify parent if no parent field has been configured");
}
} else {
if (parent != null) {
throw new IllegalArgumentException("can't specify parent if no parent field has been configured");
}
}
if ("".equals(id)) {
@ -520,7 +497,7 @@ public class IndexRequest extends ReplicatedWriteRequest<IndexRequest> implement
/* resolve the routing if needed */
public void resolveRouting(MetaData metaData) {
routing(metaData.resolveIndexRouting(parent, routing, index));
routing(metaData.resolveIndexRouting(routing, index));
}
@Override
@ -529,7 +506,9 @@ public class IndexRequest extends ReplicatedWriteRequest<IndexRequest> implement
type = in.readOptionalString();
id = in.readOptionalString();
routing = in.readOptionalString();
parent = in.readOptionalString();
if (in.getVersion().before(Version.V_7_0_0_alpha1)) {
in.readOptionalString(); // _parent
}
if (in.getVersion().before(Version.V_6_0_0_alpha1)) {
in.readOptionalString(); // timestamp
in.readOptionalWriteable(TimeValue::new); // ttl
@ -554,7 +533,9 @@ public class IndexRequest extends ReplicatedWriteRequest<IndexRequest> implement
out.writeOptionalString(type);
out.writeOptionalString(id);
out.writeOptionalString(routing);
out.writeOptionalString(parent);
if (out.getVersion().before(Version.V_7_0_0_alpha1)) {
out.writeOptionalString(null); // _parent
}
if (out.getVersion().before(Version.V_6_0_0_alpha1)) {
// Serialize a fake timestamp. 5.x expect this value to be set by the #process method so we can't use null.
// On the other hand, indices created on 5.x do not index the timestamp field. Therefore passing a 0 (or any value) for

View File

@ -71,15 +71,6 @@ public class IndexRequestBuilder extends ReplicationRequestBuilder<IndexRequest,
return this;
}
/**
* Sets the parent id of this document. If routing is not set, automatically set it as the
* routing as well.
*/
public IndexRequestBuilder setParent(String parent) {
request.parent(parent);
return this;
}
/**
* Sets the source.
*/

View File

@ -193,8 +193,6 @@ public class SimulatePipelineRequest extends ActionRequest {
dataMap, MetaData.ID.getFieldName(), "_id");
String routing = ConfigurationUtils.readOptionalStringOrIntProperty(null, null,
dataMap, MetaData.ROUTING.getFieldName());
String parent = ConfigurationUtils.readOptionalStringOrIntProperty(null, null,
dataMap, MetaData.PARENT.getFieldName());
Long version = null;
if (dataMap.containsKey(MetaData.VERSION.getFieldName())) {
version = (Long) ConfigurationUtils.readObject(null, null, dataMap, MetaData.VERSION.getFieldName());
@ -205,7 +203,7 @@ public class SimulatePipelineRequest extends ActionRequest {
MetaData.VERSION_TYPE.getFieldName()));
}
IngestDocument ingestDocument =
new IngestDocument(index, type, id, routing, parent, version, versionType, document);
new IngestDocument(index, type, id, routing, version, versionType, document);
ingestDocumentList.add(ingestDocument);
}
return ingestDocumentList;

View File

@ -65,7 +65,6 @@ public class TermVectorsRequest extends SingleShardRequest<TermVectorsRequest> i
private static final ParseField TYPE = new ParseField("_type");
private static final ParseField ID = new ParseField("_id");
private static final ParseField ROUTING = new ParseField("routing");
private static final ParseField PARENT = new ParseField("parent");
private static final ParseField VERSION = new ParseField("version");
private static final ParseField VERSION_TYPE = new ParseField("version_type");
private static final ParseField FIELDS = new ParseField("fields");
@ -87,8 +86,6 @@ public class TermVectorsRequest extends SingleShardRequest<TermVectorsRequest> i
private String routing;
private String parent;
private VersionType versionType = VersionType.INTERNAL;
private long version = Versions.MATCH_ANY;
@ -185,7 +182,6 @@ public class TermVectorsRequest extends SingleShardRequest<TermVectorsRequest> i
this.flagsEnum = other.getFlags().clone();
this.preference = other.preference();
this.routing = other.routing();
this.parent = other.parent();
if (other.selectedFields != null) {
this.selectedFields = new HashSet<>(other.selectedFields);
}
@ -204,7 +200,6 @@ public class TermVectorsRequest extends SingleShardRequest<TermVectorsRequest> i
this.type = item.type();
this.selectedFields(item.storedFields());
this.routing(item.routing());
this.parent(item.parent());
}
public EnumSet<Flag> getFlags() {
@ -293,18 +288,6 @@ public class TermVectorsRequest extends SingleShardRequest<TermVectorsRequest> i
return this;
}
public String parent() {
return parent;
}
/**
* Sets the parent id of this document.
*/
public TermVectorsRequest parent(String parent) {
this.parent = parent;
return this;
}
public String preference() {
return this.preference;
}
@ -522,7 +505,10 @@ public class TermVectorsRequest extends SingleShardRequest<TermVectorsRequest> i
}
}
routing = in.readOptionalString();
parent = in.readOptionalString();
if (in.getVersion().before(Version.V_7_0_0_alpha1)) {
in.readOptionalString(); // _parent
}
preference = in.readOptionalString();
long flags = in.readVLong();
@ -565,7 +551,9 @@ public class TermVectorsRequest extends SingleShardRequest<TermVectorsRequest> i
}
}
out.writeOptionalString(routing);
out.writeOptionalString(parent);
if (out.getVersion().before(Version.V_7_0_0_alpha1)) {
out.writeOptionalString(null); // _parent
}
out.writeOptionalString(preference);
long longFlags = 0;
for (Flag flag : flagsEnum) {
@ -650,8 +638,6 @@ public class TermVectorsRequest extends SingleShardRequest<TermVectorsRequest> i
termVectorsRequest.doc(jsonBuilder().copyCurrentStructure(parser));
} else if (ROUTING.match(currentFieldName, parser.getDeprecationHandler())) {
termVectorsRequest.routing = parser.text();
} else if (PARENT.match(currentFieldName, parser.getDeprecationHandler())) {
termVectorsRequest.parent = parser.text();
} else if (VERSION.match(currentFieldName, parser.getDeprecationHandler())) {
termVectorsRequest.version = parser.longValue();
} else if (VERSION_TYPE.match(currentFieldName, parser.getDeprecationHandler())) {

View File

@ -88,15 +88,6 @@ public class TermVectorsRequestBuilder extends ActionRequestBuilder<TermVectorsR
return this;
}
/**
* Sets the parent id of this document. Will simply set the routing to this value, as it is only
* used for routing with delete requests.
*/
public TermVectorsRequestBuilder setParent(String parent) {
request.parent(parent);
return this;
}
/**
* Sets the preference to execute the search. Defaults to randomize across shards. Can be set to
* <tt>_local</tt> to prefer local shards or a custom value, which guarantees that the same order

View File

@ -64,7 +64,7 @@ public class TransportMultiTermVectorsAction extends HandledTransportAction<Mult
Map<ShardId, MultiTermVectorsShardRequest> shardRequests = new HashMap<>();
for (int i = 0; i < request.requests.size(); i++) {
TermVectorsRequest termVectorsRequest = request.requests.get(i);
termVectorsRequest.routing(clusterState.metaData().resolveIndexRouting(termVectorsRequest.parent(), termVectorsRequest.routing(), termVectorsRequest.index()));
termVectorsRequest.routing(clusterState.metaData().resolveIndexRouting(termVectorsRequest.routing(), termVectorsRequest.index()));
if (!clusterState.metaData().hasConcreteIndex(termVectorsRequest.index())) {
responses.set(i, new MultiTermVectorsItemResponse(null, new MultiTermVectorsResponse.Failure(termVectorsRequest.index(),
termVectorsRequest.type(), termVectorsRequest.id(), new IndexNotFoundException(termVectorsRequest.index()))));

View File

@ -78,7 +78,7 @@ public class TransportTermVectorsAction extends TransportSingleShardAction<TermV
@Override
protected void resolveRequest(ClusterState state, InternalRequest request) {
// update the routing (request#index here is possibly an alias or a parent)
request.request().routing(state.metaData().resolveIndexRouting(request.request().parent(), request.request().routing(), request.request().index()));
request.request().routing(state.metaData().resolveIndexRouting(request.request().routing(), request.request().index()));
// Fail fast on the node that received the request.
if (request.request().routing() == null && state.getMetaData().routingRequired(request.concreteIndex(), request.request().type())) {
throw new RoutingMissingException(request.concreteIndex(), request.request().type(), request.request().id());

View File

@ -105,7 +105,7 @@ public class TransportUpdateAction extends TransportInstanceSingleOperationActio
}
public static void resolveAndValidateRouting(MetaData metaData, String concreteIndex, UpdateRequest request) {
request.routing((metaData.resolveIndexRouting(request.parent(), request.routing(), request.index())));
request.routing((metaData.resolveIndexRouting(request.routing(), request.index())));
// Fail fast on the node that received the request, rather than failing when translating on the index or delete request.
if (request.routing() == null && metaData.routingRequired(concreteIndex, request.type())) {
throw new RoutingMissingException(concreteIndex, request.type(), request.id());

View File

@ -40,7 +40,6 @@ import org.elasticsearch.index.VersionType;
import org.elasticsearch.index.engine.DocumentMissingException;
import org.elasticsearch.index.engine.DocumentSourceMissingException;
import org.elasticsearch.index.get.GetResult;
import org.elasticsearch.index.mapper.ParentFieldMapper;
import org.elasticsearch.index.mapper.RoutingFieldMapper;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.shard.ShardId;
@ -152,7 +151,7 @@ public class UpdateHelper extends AbstractComponent {
indexRequest.index(request.index())
.type(request.type()).id(request.id()).setRefreshPolicy(request.getRefreshPolicy()).routing(request.routing())
.parent(request.parent()).timeout(request.timeout()).waitForActiveShards(request.waitForActiveShards())
.timeout(request.timeout()).waitForActiveShards(request.waitForActiveShards())
// it has to be a "create!"
.create(true);
@ -191,20 +190,6 @@ public class UpdateHelper extends AbstractComponent {
}
}
/**
* Calculate a parent value to be used, either the included index request's parent, or retrieved document's parent when defined.
*/
@Nullable
static String calculateParent(GetResult getResult, @Nullable IndexRequest updateIndexRequest) {
if (updateIndexRequest != null && updateIndexRequest.parent() != null) {
return updateIndexRequest.parent();
} else if (getResult.getFields().containsKey(ParentFieldMapper.NAME)) {
return getResult.field(ParentFieldMapper.NAME).getValue().toString();
} else {
return null;
}
}
/**
* Prepare the request for merging the existing document with a new one, can optionally detect a noop change. Returns a {@code Result}
* containing a new {@code IndexRequest} to be executed on the primary and replicas.
@ -213,7 +198,6 @@ public class UpdateHelper extends AbstractComponent {
final long updateVersion = calculateUpdateVersion(request, getResult);
final IndexRequest currentRequest = request.doc();
final String routing = calculateRouting(getResult, currentRequest);
final String parent = calculateParent(getResult, currentRequest);
final Tuple<XContentType, Map<String, Object>> sourceAndContent = XContentHelper.convertToMap(getResult.internalSourceRef(), true);
final XContentType updateSourceContentType = sourceAndContent.v1();
final Map<String, Object> updatedSourceAsMap = sourceAndContent.v2();
@ -230,7 +214,7 @@ public class UpdateHelper extends AbstractComponent {
return new Result(update, DocWriteResponse.Result.NOOP, updatedSourceAsMap, updateSourceContentType);
} else {
final IndexRequest finalIndexRequest = Requests.indexRequest(request.index())
.type(request.type()).id(request.id()).routing(routing).parent(parent)
.type(request.type()).id(request.id()).routing(routing)
.source(updatedSourceAsMap, updateSourceContentType).version(updateVersion).versionType(request.versionType())
.waitForActiveShards(request.waitForActiveShards()).timeout(request.timeout())
.setRefreshPolicy(request.getRefreshPolicy());
@ -247,7 +231,6 @@ public class UpdateHelper extends AbstractComponent {
final long updateVersion = calculateUpdateVersion(request, getResult);
final IndexRequest currentRequest = request.doc();
final String routing = calculateRouting(getResult, currentRequest);
final String parent = calculateParent(getResult, currentRequest);
final Tuple<XContentType, Map<String, Object>> sourceAndContent = XContentHelper.convertToMap(getResult.internalSourceRef(), true);
final XContentType updateSourceContentType = sourceAndContent.v1();
final Map<String, Object> sourceAsMap = sourceAndContent.v2();
@ -259,7 +242,6 @@ public class UpdateHelper extends AbstractComponent {
ctx.put(ContextFields.ID, getResult.getId());
ctx.put(ContextFields.VERSION, getResult.getVersion());
ctx.put(ContextFields.ROUTING, routing);
ctx.put(ContextFields.PARENT, parent);
ctx.put(ContextFields.SOURCE, sourceAsMap);
ctx.put(ContextFields.NOW, nowInMillis.getAsLong());
@ -272,14 +254,14 @@ public class UpdateHelper extends AbstractComponent {
switch (operation) {
case INDEX:
final IndexRequest indexRequest = Requests.indexRequest(request.index())
.type(request.type()).id(request.id()).routing(routing).parent(parent)
.type(request.type()).id(request.id()).routing(routing)
.source(updatedSourceAsMap, updateSourceContentType).version(updateVersion).versionType(request.versionType())
.waitForActiveShards(request.waitForActiveShards()).timeout(request.timeout())
.setRefreshPolicy(request.getRefreshPolicy());
return new Result(indexRequest, DocWriteResponse.Result.UPDATED, updatedSourceAsMap, updateSourceContentType);
case DELETE:
DeleteRequest deleteRequest = Requests.deleteRequest(request.index())
.type(request.type()).id(request.id()).routing(routing).parent(parent)
.type(request.type()).id(request.id()).routing(routing)
.version(updateVersion).versionType(request.versionType()).waitForActiveShards(request.waitForActiveShards())
.timeout(request.timeout()).setRefreshPolicy(request.getRefreshPolicy());
return new Result(deleteRequest, DocWriteResponse.Result.DELETED, updatedSourceAsMap, updateSourceContentType);
@ -452,6 +434,5 @@ public class UpdateHelper extends AbstractComponent {
public static final String ID = "_id";
public static final String VERSION = "_version";
public static final String ROUTING = "_routing";
public static final String PARENT = "_parent";
}
}

View File

@ -20,6 +20,8 @@
package org.elasticsearch.action.update;
import java.util.Arrays;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.DocWriteRequest;
import org.elasticsearch.action.index.IndexRequest;
@ -61,9 +63,6 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
@Nullable
private String routing;
@Nullable
private String parent;
@Nullable
Script script;
@ -194,18 +193,6 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
return this.routing;
}
/**
* The parent id is used for the upsert request.
*/
public UpdateRequest parent(String parent) {
this.parent = parent;
return this;
}
public String parent() {
return parent;
}
public ShardId getShardId() {
return this.shardId;
}
@ -790,7 +777,9 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
type = in.readString();
id = in.readString();
routing = in.readOptionalString();
parent = in.readOptionalString();
if (in.getVersion().before(Version.V_7_0_0_alpha1)) {
in.readOptionalString(); // _parent
}
if (in.readBoolean()) {
script = new Script(in);
}
@ -820,7 +809,10 @@ public class UpdateRequest extends InstanceShardOperationRequest<UpdateRequest>
out.writeString(type);
out.writeString(id);
out.writeOptionalString(routing);
out.writeOptionalString(parent);
if (out.getVersion().before(Version.V_7_0_0_alpha1)) {
out.writeOptionalString(null); // _parent
}
boolean hasScript = script != null;
out.writeBoolean(hasScript);
if (hasScript) {

View File

@ -74,11 +74,6 @@ public class UpdateRequestBuilder extends InstanceShardOperationRequestBuilder<U
return this;
}
public UpdateRequestBuilder setParent(String parent) {
request.parent(parent);
return this;
}
/**
* The script to execute. Note, make sure not to send different script each times and instead
* use script params if possible with the same (automatically compiled) script.

Some files were not shown because too many files have changed in this diff Show More