Fix reindex-from-remote for parent/child from <2.0
Versions before 2.0 needed to be told to return interesting fields like `_parent`, `_routing`, `_ttl`, and `_timestamp`. And they come back inside a `fields` block which we need to parse. Closes #21044
This commit is contained in:
parent
904dcc7127
commit
18393a06f3
|
@ -89,6 +89,10 @@ final class RemoteRequestBuilders {
|
||||||
params.put("sort", sorts.toString());
|
params.put("sort", sorts.toString());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if (remoteVersion.before(Version.V_2_0_0)) {
|
||||||
|
// Versions before 2.0.0 need prompting to return interesting fields. Note that timestamp isn't available at all....
|
||||||
|
searchRequest.source().storedField("_parent").storedField("_routing").storedField("_ttl");
|
||||||
|
}
|
||||||
if (searchRequest.source().storedFields() != null && false == searchRequest.source().storedFields().fieldNames().isEmpty()) {
|
if (searchRequest.source().storedFields() != null && false == searchRequest.source().storedFields().fieldNames().isEmpty()) {
|
||||||
StringBuilder fields = new StringBuilder(searchRequest.source().storedFields().fieldNames().get(0));
|
StringBuilder fields = new StringBuilder(searchRequest.source().storedFields().fieldNames().get(0));
|
||||||
for (int i = 1; i < searchRequest.source().storedFields().fieldNames().size(); i++) {
|
for (int i = 1; i < searchRequest.source().storedFields().fieldNames().size(); i++) {
|
||||||
|
@ -97,6 +101,8 @@ final class RemoteRequestBuilders {
|
||||||
String storedFieldsParamName = remoteVersion.before(Version.V_5_0_0_alpha4) ? "fields" : "stored_fields";
|
String storedFieldsParamName = remoteVersion.before(Version.V_5_0_0_alpha4) ? "fields" : "stored_fields";
|
||||||
params.put(storedFieldsParamName, fields.toString());
|
params.put(storedFieldsParamName, fields.toString());
|
||||||
}
|
}
|
||||||
|
// We always want the _source document and this will force it to be returned.
|
||||||
|
params.put("_source", "true");
|
||||||
return params;
|
return params;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -83,10 +83,28 @@ final class RemoteResponseParsers {
|
||||||
throw new ParsingException(p.getTokenLocation(), "[hit] failed to parse [_source]", e);
|
throw new ParsingException(p.getTokenLocation(), "[hit] failed to parse [_source]", e);
|
||||||
}
|
}
|
||||||
}, new ParseField("_source"));
|
}, new ParseField("_source"));
|
||||||
HIT_PARSER.declareString(BasicHit::setRouting, new ParseField("_routing"));
|
ParseField routingField = new ParseField("_routing");
|
||||||
HIT_PARSER.declareString(BasicHit::setParent, new ParseField("_parent"));
|
ParseField parentField = new ParseField("_parent");
|
||||||
HIT_PARSER.declareLong(BasicHit::setTTL, new ParseField("_ttl"));
|
ParseField ttlField = new ParseField("_ttl");
|
||||||
|
HIT_PARSER.declareString(BasicHit::setRouting, routingField);
|
||||||
|
HIT_PARSER.declareString(BasicHit::setParent, parentField);
|
||||||
|
HIT_PARSER.declareLong(BasicHit::setTTL, ttlField);
|
||||||
HIT_PARSER.declareLong(BasicHit::setTimestamp, new ParseField("_timestamp"));
|
HIT_PARSER.declareLong(BasicHit::setTimestamp, new ParseField("_timestamp"));
|
||||||
|
// Pre-2.0.0 parent and routing come back in "fields"
|
||||||
|
class Fields {
|
||||||
|
String routing;
|
||||||
|
String parent;
|
||||||
|
long ttl;
|
||||||
|
}
|
||||||
|
ObjectParser<Fields, ParseFieldMatcherSupplier> fieldsParser = new ObjectParser<>("fields", Fields::new);
|
||||||
|
HIT_PARSER.declareObject((hit, fields) -> {
|
||||||
|
hit.setRouting(fields.routing);
|
||||||
|
hit.setParent(fields.parent);
|
||||||
|
hit.setTTL(fields.ttl);
|
||||||
|
}, fieldsParser, new ParseField("fields"));
|
||||||
|
fieldsParser.declareString((fields, routing) -> fields.routing = routing, routingField);
|
||||||
|
fieldsParser.declareString((fields, parent) -> fields.parent = parent, parentField);
|
||||||
|
fieldsParser.declareLong((fields, ttl) -> fields.ttl = ttl, ttlField);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -113,7 +113,7 @@ public class RemoteRequestBuildersTests extends ESTestCase {
|
||||||
SearchRequest searchRequest = new SearchRequest().source(new SearchSourceBuilder());
|
SearchRequest searchRequest = new SearchRequest().source(new SearchSourceBuilder());
|
||||||
|
|
||||||
// Test request without any fields
|
// Test request without any fields
|
||||||
Version remoteVersion = Version.fromId(between(0, Version.CURRENT.id));
|
Version remoteVersion = Version.fromId(between(Version.V_2_0_0_beta1_ID, Version.CURRENT.id));
|
||||||
assertThat(initialSearchParams(searchRequest, remoteVersion),
|
assertThat(initialSearchParams(searchRequest, remoteVersion),
|
||||||
not(either(hasKey("stored_fields")).or(hasKey("fields"))));
|
not(either(hasKey("stored_fields")).or(hasKey("fields"))));
|
||||||
|
|
||||||
|
@ -125,8 +125,12 @@ public class RemoteRequestBuildersTests extends ESTestCase {
|
||||||
assertThat(initialSearchParams(searchRequest, remoteVersion), hasEntry("stored_fields", "_source,_id"));
|
assertThat(initialSearchParams(searchRequest, remoteVersion), hasEntry("stored_fields", "_source,_id"));
|
||||||
|
|
||||||
// Test fields for versions that support it
|
// Test fields for versions that support it
|
||||||
remoteVersion = Version.fromId(between(0, Version.V_5_0_0_alpha4_ID - 1));
|
remoteVersion = Version.fromId(between(Version.V_2_0_0_beta1_ID, Version.V_5_0_0_alpha4_ID - 1));
|
||||||
assertThat(initialSearchParams(searchRequest, remoteVersion), hasEntry("fields", "_source,_id"));
|
assertThat(initialSearchParams(searchRequest, remoteVersion), hasEntry("fields", "_source,_id"));
|
||||||
|
|
||||||
|
// Test extra fields for versions that need it
|
||||||
|
remoteVersion = Version.fromId(between(0, Version.V_2_0_0_beta1_ID - 1));
|
||||||
|
assertThat(initialSearchParams(searchRequest, remoteVersion), hasEntry("fields", "_source,_id,_parent,_routing,_ttl"));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testInitialSearchParamsMisc() {
|
public void testInitialSearchParamsMisc() {
|
||||||
|
@ -151,6 +155,7 @@ public class RemoteRequestBuildersTests extends ESTestCase {
|
||||||
assertThat(params, scroll == null ? not(hasKey("scroll")) : hasEntry("scroll", scroll.toString()));
|
assertThat(params, scroll == null ? not(hasKey("scroll")) : hasEntry("scroll", scroll.toString()));
|
||||||
assertThat(params, hasEntry("size", Integer.toString(size)));
|
assertThat(params, hasEntry("size", Integer.toString(size)));
|
||||||
assertThat(params, fetchVersion == null || fetchVersion == true ? hasEntry("version", null) : not(hasEntry("version", null)));
|
assertThat(params, fetchVersion == null || fetchVersion == true ? hasEntry("version", null) : not(hasEntry("version", null)));
|
||||||
|
assertThat(params, hasEntry("_source", "true"));
|
||||||
}
|
}
|
||||||
|
|
||||||
public void testInitialSearchEntity() throws IOException {
|
public void testInitialSearchEntity() throws IOException {
|
||||||
|
|
|
@ -192,7 +192,7 @@ public class RemoteScrollableHitSourceTests extends ESTestCase {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Test for parsing _ttl, _timestamp, and _routing.
|
* Test for parsing _ttl, _timestamp, _routing, and _parent.
|
||||||
*/
|
*/
|
||||||
public void testParseScrollFullyLoaded() throws Exception {
|
public void testParseScrollFullyLoaded() throws Exception {
|
||||||
AtomicBoolean called = new AtomicBoolean();
|
AtomicBoolean called = new AtomicBoolean();
|
||||||
|
@ -208,6 +208,24 @@ public class RemoteScrollableHitSourceTests extends ESTestCase {
|
||||||
assertTrue(called.get());
|
assertTrue(called.get());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Test for parsing _ttl, _routing, and _parent. _timestamp isn't available.
|
||||||
|
*/
|
||||||
|
public void testParseScrollFullyLoadedFrom1_7() throws Exception {
|
||||||
|
AtomicBoolean called = new AtomicBoolean();
|
||||||
|
sourceWithMockedRemoteCall("scroll_fully_loaded_1_7.json").doStartNextScroll("", timeValueMillis(0), r -> {
|
||||||
|
assertEquals("AVToMiDL50DjIiBO3yKA", r.getHits().get(0).getId());
|
||||||
|
assertEquals("{\"test\":\"test3\"}", r.getHits().get(0).getSource().utf8ToString());
|
||||||
|
assertEquals((Long) 1234L, r.getHits().get(0).getTTL());
|
||||||
|
assertNull(r.getHits().get(0).getTimestamp()); // Not available from 1.7
|
||||||
|
assertEquals("testrouting", r.getHits().get(0).getRouting());
|
||||||
|
assertEquals("testparent", r.getHits().get(0).getParent());
|
||||||
|
called.set(true);
|
||||||
|
});
|
||||||
|
assertTrue(called.get());
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Versions of Elasticsearch before 2.1.0 don't support sort:_doc and instead need to use search_type=scan. Scan doesn't return
|
* Versions of Elasticsearch before 2.1.0 don't support sort:_doc and instead need to use search_type=scan. Scan doesn't return
|
||||||
* documents the first iteration but reindex doesn't like that. So we jump start strait to the next iteration.
|
* documents the first iteration but reindex doesn't like that. So we jump start strait to the next iteration.
|
||||||
|
|
|
@ -0,0 +1,31 @@
|
||||||
|
{
|
||||||
|
"_scroll_id" : "DnF1ZXJ5VGhlbkZldGNoBQAAAfakescroll",
|
||||||
|
"took" : 3,
|
||||||
|
"timed_out" : false,
|
||||||
|
"terminated_early" : true,
|
||||||
|
"_shards" : {
|
||||||
|
"total" : 5,
|
||||||
|
"successful" : 5,
|
||||||
|
"failed" : 0
|
||||||
|
},
|
||||||
|
"hits" : {
|
||||||
|
"total" : 4,
|
||||||
|
"max_score" : null,
|
||||||
|
"hits" : [ {
|
||||||
|
"_index" : "test",
|
||||||
|
"_type" : "test",
|
||||||
|
"_id" : "AVToMiDL50DjIiBO3yKA",
|
||||||
|
"_version" : 1,
|
||||||
|
"_score" : null,
|
||||||
|
"_source" : {
|
||||||
|
"test" : "test3"
|
||||||
|
},
|
||||||
|
"sort" : [ 0 ],
|
||||||
|
"fields" : {
|
||||||
|
"_routing" : "testrouting",
|
||||||
|
"_ttl" : 1234,
|
||||||
|
"_parent" : "testparent"
|
||||||
|
}
|
||||||
|
} ]
|
||||||
|
}
|
||||||
|
}
|
Loading…
Reference in New Issue