remove parsing logic with ObjectParser

This commit is contained in:
Martijn van Groningen 2016-01-20 15:03:58 +01:00
parent 5eaaa95c61
commit 136e0fe1af
2 changed files with 56 additions and 50 deletions

View File

@ -21,8 +21,11 @@ package org.elasticsearch.ingest;
import org.elasticsearch.cluster.AbstractDiffable;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.collect.HppcMaps;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
@ -39,6 +42,7 @@ public final class IngestMetadata extends AbstractDiffable<MetaData.Custom> impl
public final static String TYPE = "ingest";
public final static IngestMetadata PROTO = new IngestMetadata();
private final ParseField PIPELINES_FIELD = new ParseField("pipeline");
// We can't use Pipeline class directly in cluster state, because we don't have the processor factories around when
// IngestMetadata is registered as custom metadata.
@ -82,33 +86,26 @@ public final class IngestMetadata extends AbstractDiffable<MetaData.Custom> impl
@Override
public MetaData.Custom fromXContent(XContentParser parser) throws IOException {
XContentParser.Token token;
String currentFieldName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
switch (token) {
case FIELD_NAME:
currentFieldName = parser.currentName();
break;
case START_ARRAY:
if ("pipelines".equals(currentFieldName)) {
Map<String, PipelineConfiguration> pipelines = new HashMap<>();
while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) {
if (token == XContentParser.Token.START_OBJECT) {
PipelineConfiguration pipeline = new PipelineConfiguration(parser);
pipelines.put(pipeline.getId(), pipeline);
}
}
return new IngestMetadata(pipelines);
}
break;
ObjectParser<Void, Void> ingestMetaDataParser = new ObjectParser<>("ingest_metadata", null);
Map<String, PipelineConfiguration> pipelines = new HashMap<>();
ingestMetaDataParser.declareField((parser1, aVoid, aVoid2) -> {
XContentParser.Token token;
while ((token = parser1.nextToken()) != XContentParser.Token.END_ARRAY) {
if (token == XContentParser.Token.START_OBJECT) {
PipelineConfiguration pipeline = new PipelineConfiguration.Builder(parser1).build();
pipelines.put(pipeline.getId(), pipeline);
}
}
}
return PROTO;
}, PIPELINES_FIELD, ObjectParser.ValueType.OBJECT);
ingestMetaDataParser.parse(parser);
return new IngestMetadata(pipelines);
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startArray("pipelines");
builder.startArray(PIPELINES_FIELD.getPreferredName());
for (PipelineConfiguration pipeline : pipelines.values()) {
pipeline.toXContent(builder, params);
}

View File

@ -19,10 +19,14 @@
package org.elasticsearch.ingest;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Build;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentHelper;
@ -43,40 +47,45 @@ public final class PipelineConfiguration implements Writeable<PipelineConfigurat
return PROTOTYPE.readFrom(in);
}
public static class Builder {
private final static ObjectParser<Builder, Void> PARSER = new ObjectParser<>("pipeline_config", null);
static {
PARSER.declareString(Builder::setId, new ParseField("id"));
PARSER.declareField((parser, builder, aVoid) -> {
XContentBuilder contentBuilder = XContentBuilder.builder(parser.contentType().xContent());
XContentHelper.copyCurrentEvent(contentBuilder.generator(), parser);
builder.setConfig(contentBuilder.bytes());
}, new ParseField("config"), ObjectParser.ValueType.OBJECT);
}
private String id;
private BytesReference config;
public Builder(XContentParser parser) throws IOException {
PARSER.parse(parser, this);
}
public void setId(String id) {
this.id = id;
}
public void setConfig(BytesReference config) {
this.config = config;
}
public PipelineConfiguration build() {
return new PipelineConfiguration(id, config);
}
}
private final String id;
// Store config as bytes reference, because the config is only used when the pipeline store reads the cluster state
// and the way the map of maps config is read requires a deep copy (it removes instead of gets entries to check for unused options)
// also the get pipeline api just directly returns this to the caller
private final BytesReference config;
PipelineConfiguration(XContentParser parser) throws IOException {
String id = null;
BytesReference config = null;
XContentParser.Token token;
String currentFieldName = null;
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
switch (token) {
case FIELD_NAME:
currentFieldName = parser.currentName();
break;
case VALUE_STRING:
if ("id".equals(currentFieldName)) {
id = parser.text();
}
break;
case START_OBJECT:
XContentBuilder builder = XContentBuilder.builder(parser.contentType().xContent());
XContentHelper.copyCurrentEvent(builder.generator(), parser);
config = builder.bytes();
break;
}
}
this.id = Objects.requireNonNull(id);
this.config = Objects.requireNonNull(config);
}
public PipelineConfiguration(String id, BytesReference config) {
this.id = id;
this.config = config;