Merge pull request #17822: Use try-with-resource when creating new parser instances where possible
We should wrap newly created XContent parser in a try-with-resources block so it gets properly closed after it has been used.
This commit is contained in:
commit
36622ecb59
|
@ -21,7 +21,6 @@ package org.elasticsearch.action.admin.indices.create;
|
|||
|
||||
import org.elasticsearch.ElasticsearchGenerationException;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.action.ActionRequest;
|
||||
import org.elasticsearch.action.ActionRequestValidationException;
|
||||
import org.elasticsearch.action.IndicesRequest;
|
||||
import org.elasticsearch.action.admin.indices.alias.Alias;
|
||||
|
@ -49,9 +48,9 @@ import java.util.Map;
|
|||
import java.util.Set;
|
||||
|
||||
import static org.elasticsearch.action.ValidateActions.addValidationError;
|
||||
import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS;
|
||||
import static org.elasticsearch.common.settings.Settings.readSettingsFromStream;
|
||||
import static org.elasticsearch.common.settings.Settings.writeSettingsToStream;
|
||||
import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS;
|
||||
|
||||
/**
|
||||
* A request to create an index. Best created with {@link org.elasticsearch.client.Requests#createIndexRequest(String)}.
|
||||
|
@ -305,8 +304,7 @@ public class CreateIndexRequest extends AcknowledgedRequest<CreateIndexRequest>
|
|||
* Sets the aliases that will be associated with the index when it gets created
|
||||
*/
|
||||
public CreateIndexRequest aliases(BytesReference source) {
|
||||
try {
|
||||
XContentParser parser = XContentHelper.createParser(source);
|
||||
try (XContentParser parser = XContentHelper.createParser(source)) {
|
||||
//move to the first alias
|
||||
parser.nextToken();
|
||||
while ((parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
|
|
|
@ -47,9 +47,9 @@ import java.util.Map;
|
|||
import java.util.Set;
|
||||
|
||||
import static org.elasticsearch.action.ValidateActions.addValidationError;
|
||||
import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS;
|
||||
import static org.elasticsearch.common.settings.Settings.readSettingsFromStream;
|
||||
import static org.elasticsearch.common.settings.Settings.writeSettingsToStream;
|
||||
import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS;
|
||||
|
||||
/**
|
||||
* A request to create an index template.
|
||||
|
@ -393,8 +393,7 @@ public class PutIndexTemplateRequest extends MasterNodeRequest<PutIndexTemplateR
|
|||
* Sets the aliases that will be associated with the index when it gets created
|
||||
*/
|
||||
public PutIndexTemplateRequest aliases(BytesReference source) {
|
||||
try {
|
||||
XContentParser parser = XContentHelper.createParser(source);
|
||||
try (XContentParser parser = XContentHelper.createParser(source)) {
|
||||
//move to the first alias
|
||||
parser.nextToken();
|
||||
while ((parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
|
|
|
@ -22,6 +22,7 @@ package org.elasticsearch.cluster;
|
|||
import com.carrotsearch.hppc.cursors.IntObjectCursor;
|
||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||
|
||||
import org.elasticsearch.cluster.block.ClusterBlock;
|
||||
import org.elasticsearch.cluster.block.ClusterBlocks;
|
||||
import org.elasticsearch.cluster.metadata.IndexMetaData;
|
||||
|
@ -440,8 +441,10 @@ public class ClusterState implements ToXContent, Diffable<ClusterState> {
|
|||
builder.startObject("mappings");
|
||||
for (ObjectObjectCursor<String, CompressedXContent> cursor1 : templateMetaData.mappings()) {
|
||||
byte[] mappingSource = cursor1.value.uncompressed();
|
||||
XContentParser parser = XContentFactory.xContent(mappingSource).createParser(mappingSource);
|
||||
Map<String, Object> mapping = parser.map();
|
||||
Map<String, Object> mapping;
|
||||
try (XContentParser parser = XContentFactory.xContent(mappingSource).createParser(mappingSource)) {
|
||||
mapping = parser.map();
|
||||
}
|
||||
if (mapping.size() == 1 && mapping.containsKey(cursor1.key)) {
|
||||
// the type name is the root value, reduce it
|
||||
mapping = (Map<String, Object>) mapping.get(cursor1.key);
|
||||
|
@ -470,8 +473,10 @@ public class ClusterState implements ToXContent, Diffable<ClusterState> {
|
|||
builder.startObject("mappings");
|
||||
for (ObjectObjectCursor<String, MappingMetaData> cursor : indexMetaData.getMappings()) {
|
||||
byte[] mappingSource = cursor.value.source().uncompressed();
|
||||
XContentParser parser = XContentFactory.xContent(mappingSource).createParser(mappingSource);
|
||||
Map<String, Object> mapping = parser.map();
|
||||
Map<String, Object> mapping;
|
||||
try (XContentParser parser = XContentFactory.xContent(mappingSource).createParser(mappingSource)) {
|
||||
mapping = parser.map();
|
||||
}
|
||||
if (mapping.size() == 1 && mapping.containsKey(cursor.key)) {
|
||||
// the type name is the root value, reduce it
|
||||
mapping = (Map<String, Object>) mapping.get(cursor.key);
|
||||
|
|
|
@ -290,10 +290,10 @@ public class AliasMetaData extends AbstractDiffable<AliasMetaData> {
|
|||
builder.field("filter", aliasMetaData.filter.compressed());
|
||||
} else {
|
||||
byte[] data = aliasMetaData.filter().uncompressed();
|
||||
XContentParser parser = XContentFactory.xContent(data).createParser(data);
|
||||
Map<String, Object> filter = parser.mapOrdered();
|
||||
parser.close();
|
||||
builder.field("filter", filter);
|
||||
try (XContentParser parser = XContentFactory.xContent(data).createParser(data)) {
|
||||
Map<String, Object> filter = parser.mapOrdered();
|
||||
builder.field("filter", filter);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (aliasMetaData.indexRouting() != null) {
|
||||
|
|
|
@ -118,8 +118,7 @@ public class AliasValidator extends AbstractComponent {
|
|||
*/
|
||||
public void validateAliasFilter(String alias, String filter, QueryShardContext queryShardContext) {
|
||||
assert queryShardContext != null;
|
||||
try {
|
||||
XContentParser parser = XContentFactory.xContent(filter).createParser(filter);
|
||||
try (XContentParser parser = XContentFactory.xContent(filter).createParser(filter)) {
|
||||
validateAliasFilter(parser, queryShardContext);
|
||||
} catch (Throwable e) {
|
||||
throw new IllegalArgumentException("failed to parse filter for alias [" + alias + "]", e);
|
||||
|
@ -133,8 +132,7 @@ public class AliasValidator extends AbstractComponent {
|
|||
*/
|
||||
public void validateAliasFilter(String alias, byte[] filter, QueryShardContext queryShardContext) {
|
||||
assert queryShardContext != null;
|
||||
try {
|
||||
XContentParser parser = XContentFactory.xContent(filter).createParser(filter);
|
||||
try (XContentParser parser = XContentFactory.xContent(filter).createParser(filter)) {
|
||||
validateAliasFilter(parser, queryShardContext);
|
||||
} catch (Throwable e) {
|
||||
throw new IllegalArgumentException("failed to parse filter for alias [" + alias + "]", e);
|
||||
|
|
|
@ -23,6 +23,7 @@ import com.carrotsearch.hppc.LongArrayList;
|
|||
import com.carrotsearch.hppc.cursors.IntObjectCursor;
|
||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||
|
||||
import org.elasticsearch.Version;
|
||||
import org.elasticsearch.cluster.Diff;
|
||||
import org.elasticsearch.cluster.Diffable;
|
||||
|
@ -927,10 +928,10 @@ public class IndexMetaData implements Diffable<IndexMetaData>, FromXContentBuild
|
|||
builder.value(cursor.value.source().compressed());
|
||||
} else {
|
||||
byte[] data = cursor.value.source().uncompressed();
|
||||
XContentParser parser = XContentFactory.xContent(data).createParser(data);
|
||||
Map<String, Object> mapping = parser.mapOrdered();
|
||||
parser.close();
|
||||
builder.map(mapping);
|
||||
try (XContentParser parser = XContentFactory.xContent(data).createParser(data)) {
|
||||
Map<String, Object> mapping = parser.mapOrdered();
|
||||
builder.map(mapping);
|
||||
}
|
||||
}
|
||||
}
|
||||
builder.endArray();
|
||||
|
|
|
@ -20,6 +20,7 @@ package org.elasticsearch.cluster.metadata;
|
|||
|
||||
import com.carrotsearch.hppc.cursors.ObjectCursor;
|
||||
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
|
||||
|
||||
import org.elasticsearch.cluster.AbstractDiffable;
|
||||
import org.elasticsearch.common.collect.ImmutableOpenMap;
|
||||
import org.elasticsearch.common.collect.MapBuilder;
|
||||
|
@ -329,8 +330,10 @@ public class IndexTemplateMetaData extends AbstractDiffable<IndexTemplateMetaDat
|
|||
builder.startObject("mappings");
|
||||
for (ObjectObjectCursor<String, CompressedXContent> cursor : indexTemplateMetaData.mappings()) {
|
||||
byte[] mappingSource = cursor.value.uncompressed();
|
||||
XContentParser parser = XContentFactory.xContent(mappingSource).createParser(mappingSource);
|
||||
Map<String, Object> mapping = parser.map();
|
||||
Map<String, Object> mapping;
|
||||
try (XContentParser parser = XContentFactory.xContent(mappingSource).createParser(mappingSource)) {;
|
||||
mapping = parser.map();
|
||||
}
|
||||
if (mapping.size() == 1 && mapping.containsKey(cursor.key)) {
|
||||
// the type name is the root value, reduce it
|
||||
mapping = (Map<String, Object>) mapping.get(cursor.key);
|
||||
|
|
|
@ -295,15 +295,15 @@ public abstract class MetaDataStateFormat<T> {
|
|||
try {
|
||||
final Path stateFile = pathAndStateId.file;
|
||||
final long id = pathAndStateId.id;
|
||||
final XContentParser parser;
|
||||
if (pathAndStateId.legacy) { // read the legacy format -- plain XContent
|
||||
final byte[] data = Files.readAllBytes(stateFile);
|
||||
if (data.length == 0) {
|
||||
logger.debug("{}: no data for [{}], ignoring...", prefix, stateFile.toAbsolutePath());
|
||||
continue;
|
||||
}
|
||||
parser = XContentHelper.createParser(new BytesArray(data));
|
||||
state = fromXContent(parser);
|
||||
try (final XContentParser parser = XContentHelper.createParser(new BytesArray(data))) {
|
||||
state = fromXContent(parser);
|
||||
}
|
||||
if (state == null) {
|
||||
logger.debug("{}: no data for [{}], ignoring...", prefix, stateFile.toAbsolutePath());
|
||||
}
|
||||
|
|
|
@ -382,9 +382,7 @@ public class GeoShapeQueryBuilder extends AbstractQueryBuilder<GeoShapeQueryBuil
|
|||
String[] pathElements = Strings.splitStringToArray(path, '.');
|
||||
int currentPathSlot = 0;
|
||||
|
||||
XContentParser parser = null;
|
||||
try {
|
||||
parser = XContentHelper.createParser(response.getSourceAsBytesRef());
|
||||
try (XContentParser parser = XContentHelper.createParser(response.getSourceAsBytesRef())) {
|
||||
XContentParser.Token currentToken;
|
||||
while ((currentToken = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (currentToken == XContentParser.Token.FIELD_NAME) {
|
||||
|
@ -400,10 +398,6 @@ public class GeoShapeQueryBuilder extends AbstractQueryBuilder<GeoShapeQueryBuil
|
|||
}
|
||||
}
|
||||
throw new IllegalStateException("Shape with name [" + getRequest.id() + "] found but missing " + path + " field");
|
||||
} finally {
|
||||
if (parser != null) {
|
||||
parser.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -425,10 +425,11 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
|
|||
if (contentType == builder.contentType()) {
|
||||
builder.rawField(Field.DOC.getPreferredName(), this.doc);
|
||||
} else {
|
||||
XContentParser parser = XContentFactory.xContent(contentType).createParser(this.doc);
|
||||
parser.nextToken();
|
||||
builder.field(Field.DOC.getPreferredName());
|
||||
builder.copyCurrentStructure(parser);
|
||||
try (XContentParser parser = XContentFactory.xContent(contentType).createParser(this.doc)) {
|
||||
parser.nextToken();
|
||||
builder.field(Field.DOC.getPreferredName());
|
||||
builder.copyCurrentStructure(parser);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (this.fields != null) {
|
||||
|
|
|
@ -177,10 +177,11 @@ public class PercolatorQueryBuilder extends AbstractQueryBuilder<PercolatorQuery
|
|||
if (contentType == builder.contentType()) {
|
||||
builder.rawField(DOCUMENT_FIELD.getPreferredName(), document);
|
||||
} else {
|
||||
XContentParser parser = XContentFactory.xContent(contentType).createParser(document);
|
||||
parser.nextToken();
|
||||
builder.field(DOCUMENT_FIELD.getPreferredName());
|
||||
builder.copyCurrentStructure(parser);
|
||||
try (XContentParser parser = XContentFactory.xContent(contentType).createParser(document)) {
|
||||
parser.nextToken();
|
||||
builder.field(DOCUMENT_FIELD.getPreferredName());
|
||||
builder.copyCurrentStructure(parser);
|
||||
}
|
||||
}
|
||||
}
|
||||
if (indexedDocumentIndex != null || indexedDocumentType != null || indexedDocumentId != null) {
|
||||
|
|
|
@ -148,8 +148,9 @@ public abstract class DecayFunctionBuilder<DFB extends DecayFunctionBuilder<DFB>
|
|||
public void doXContent(XContentBuilder builder, Params params) throws IOException {
|
||||
builder.startObject(getName());
|
||||
builder.field(fieldName);
|
||||
XContentParser parser = XContentFactory.xContent(functionBytes).createParser(functionBytes);
|
||||
builder.copyCurrentStructure(parser);
|
||||
try (XContentParser parser = XContentFactory.xContent(functionBytes).createParser(functionBytes)) {
|
||||
builder.copyCurrentStructure(parser);
|
||||
}
|
||||
builder.field(DecayFunctionParser.MULTI_VALUE_MODE.getPreferredName(), multiValueMode.name());
|
||||
builder.endObject();
|
||||
}
|
||||
|
@ -181,8 +182,11 @@ public abstract class DecayFunctionBuilder<DFB extends DecayFunctionBuilder<DFB>
|
|||
|
||||
@Override
|
||||
protected ScoreFunction doToFunction(QueryShardContext context) throws IOException {
|
||||
XContentParser parser = XContentFactory.xContent(functionBytes).createParser(functionBytes);
|
||||
return parseVariable(fieldName, parser, context, multiValueMode);
|
||||
AbstractDistanceScoreFunction scoreFunction;
|
||||
try (XContentParser parser = XContentFactory.xContent(functionBytes).createParser(functionBytes)) {
|
||||
scoreFunction = parseVariable(fieldName, parser, context, multiValueMode);
|
||||
}
|
||||
return scoreFunction;
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -63,35 +63,40 @@ public class RestRenderSearchTemplateAction extends BaseRestHandler {
|
|||
protected void handleRequest(RestRequest request, RestChannel channel, Client client) throws Exception {
|
||||
RenderSearchTemplateRequest renderSearchTemplateRequest;
|
||||
BytesReference source = RestActions.getRestContent(request);
|
||||
XContentParser parser = XContentFactory.xContent(source).createParser(source);
|
||||
String templateId = request.param("id");
|
||||
final Template template;
|
||||
if (templateId == null) {
|
||||
template = Template.parse(parser, parseFieldMatcher);
|
||||
} else {
|
||||
Map<String, Object> params = null;
|
||||
String currentFieldName = null;
|
||||
XContentParser.Token token = parser.nextToken();
|
||||
if (token != XContentParser.Token.START_OBJECT) {
|
||||
throw new ElasticsearchParseException("failed to parse request. request body must be an object but found [{}] instead", token);
|
||||
}
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (parseFieldMatcher.match(currentFieldName, ScriptField.PARAMS)) {
|
||||
if (token == XContentParser.Token.START_OBJECT) {
|
||||
params = parser.map();
|
||||
} else {
|
||||
throw new ElasticsearchParseException("failed to parse request. field [{}] is expected to be an object, but found [{}] instead", currentFieldName, token);
|
||||
}
|
||||
} else {
|
||||
throw new ElasticsearchParseException("failed to parse request. unknown field [{}] of type [{}]", currentFieldName, token);
|
||||
try (XContentParser parser = XContentFactory.xContent(source).createParser(source)) {
|
||||
String templateId = request.param("id");
|
||||
final Template template;
|
||||
if (templateId == null) {
|
||||
template = Template.parse(parser, parseFieldMatcher);
|
||||
} else {
|
||||
Map<String, Object> params = null;
|
||||
String currentFieldName = null;
|
||||
XContentParser.Token token = parser.nextToken();
|
||||
if (token != XContentParser.Token.START_OBJECT) {
|
||||
throw new ElasticsearchParseException("failed to parse request. request body must be an object but found [{}] instead",
|
||||
token);
|
||||
}
|
||||
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
|
||||
if (token == XContentParser.Token.FIELD_NAME) {
|
||||
currentFieldName = parser.currentName();
|
||||
} else if (parseFieldMatcher.match(currentFieldName, ScriptField.PARAMS)) {
|
||||
if (token == XContentParser.Token.START_OBJECT) {
|
||||
params = parser.map();
|
||||
} else {
|
||||
throw new ElasticsearchParseException(
|
||||
"failed to parse request. field [{}] is expected to be an object, but found [{}] instead",
|
||||
currentFieldName, token);
|
||||
}
|
||||
} else {
|
||||
throw new ElasticsearchParseException("failed to parse request. unknown field [{}] of type [{}]", currentFieldName,
|
||||
token);
|
||||
}
|
||||
}
|
||||
template = new Template(templateId, ScriptType.INDEXED, Template.DEFAULT_LANG, null, params);
|
||||
}
|
||||
template = new Template(templateId, ScriptType.INDEXED, Template.DEFAULT_LANG, null, params);
|
||||
renderSearchTemplateRequest = new RenderSearchTemplateRequest();
|
||||
renderSearchTemplateRequest.template(template);
|
||||
}
|
||||
renderSearchTemplateRequest = new RenderSearchTemplateRequest();
|
||||
renderSearchTemplateRequest.template(template);
|
||||
client.admin().cluster().renderSearchTemplate(renderSearchTemplateRequest, new RestBuilderListener<RenderSearchTemplateResponse>(channel) {
|
||||
|
||||
@Override
|
||||
|
|
|
@ -361,8 +361,7 @@ public class ScriptService extends AbstractComponent implements Closeable {
|
|||
}
|
||||
|
||||
private void validate(BytesReference scriptBytes, String scriptLang) {
|
||||
try {
|
||||
XContentParser parser = XContentFactory.xContent(scriptBytes).createParser(scriptBytes);
|
||||
try (XContentParser parser = XContentFactory.xContent(scriptBytes).createParser(scriptBytes)) {
|
||||
parser.nextToken();
|
||||
Template template = TemplateQueryBuilder.parse(scriptLang, parser, parseFieldMatcher, "params", "script", "template");
|
||||
if (Strings.hasLength(template.getScript())) {
|
||||
|
|
|
@ -811,6 +811,10 @@ public class SearchService extends AbstractLifecycleComponent<SearchService> imp
|
|||
}
|
||||
XContentLocation location = extParser != null ? extParser.getTokenLocation() : null;
|
||||
throw new SearchParseException(context, "failed to parse ext source [" + sSource + "]", location, e);
|
||||
} finally {
|
||||
if (extParser != null) {
|
||||
extParser.close();
|
||||
}
|
||||
}
|
||||
}
|
||||
if (source.version() != null) {
|
||||
|
|
|
@ -1257,9 +1257,10 @@ public final class SearchSourceBuilder extends ToXContentToBytes implements Writ
|
|||
|
||||
if (ext != null) {
|
||||
builder.field(EXT_FIELD.getPreferredName());
|
||||
XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(ext);
|
||||
parser.nextToken();
|
||||
builder.copyCurrentStructure(parser);
|
||||
try (XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(ext)) {
|
||||
parser.nextToken();
|
||||
builder.copyCurrentStructure(parser);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -255,9 +255,10 @@ public class CompletionSuggestionBuilder extends SuggestionBuilder<CompletionSug
|
|||
regexOptions.toXContent(builder, params);
|
||||
}
|
||||
if (contextBytes != null) {
|
||||
XContentParser contextParser = XContentFactory.xContent(XContentType.JSON).createParser(contextBytes);
|
||||
builder.field(CONTEXTS_FIELD.getPreferredName());
|
||||
builder.copyCurrentStructure(contextParser);
|
||||
try (XContentParser contextParser = XContentFactory.xContent(XContentType.JSON).createParser(contextBytes)) {
|
||||
builder.field(CONTEXTS_FIELD.getPreferredName());
|
||||
builder.copyCurrentStructure(contextParser);
|
||||
}
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
|
|
@ -19,14 +19,6 @@
|
|||
|
||||
package org.elasticsearch.index.reindex;
|
||||
|
||||
import static org.elasticsearch.common.unit.TimeValue.parseTimeValue;
|
||||
import static org.elasticsearch.rest.RestRequest.Method.POST;
|
||||
import static org.elasticsearch.rest.RestStatus.BAD_REQUEST;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.elasticsearch.action.WriteConsistencyLevel;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
|
@ -55,6 +47,14 @@ import org.elasticsearch.script.Script;
|
|||
import org.elasticsearch.search.aggregations.AggregatorParsers;
|
||||
import org.elasticsearch.search.suggest.Suggesters;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.elasticsearch.common.unit.TimeValue.parseTimeValue;
|
||||
import static org.elasticsearch.rest.RestRequest.Method.POST;
|
||||
import static org.elasticsearch.rest.RestStatus.BAD_REQUEST;
|
||||
|
||||
/**
|
||||
* Expose IndexBySearchRequest over rest.
|
||||
*/
|
||||
|
@ -77,8 +77,9 @@ public class RestReindexAction extends AbstractBaseReindexRestHandler<ReindexReq
|
|||
}
|
||||
XContentBuilder builder = XContentFactory.contentBuilder(parser.contentType());
|
||||
builder.map(source);
|
||||
parser = parser.contentType().xContent().createParser(builder.bytes());
|
||||
search.source().parseXContent(context.queryParseContext(parser), context.aggParsers, context.suggesters);
|
||||
try (XContentParser innerParser = parser.contentType().xContent().createParser(builder.bytes())) {
|
||||
search.source().parseXContent(context.queryParseContext(innerParser), context.aggParsers, context.suggesters);
|
||||
}
|
||||
};
|
||||
|
||||
ObjectParser<IndexRequest, ParseFieldMatcherSupplier> destParser = new ObjectParser<>("dest");
|
||||
|
|
Loading…
Reference in New Issue