Fix compilation after upstream Elasticsearch changes (elastic/elasticsearch#621)

The Elasticsearch change that caused the breakage was
73625f6291

Original commit: elastic/x-pack-elasticsearch@01b82a79d4
This commit is contained in:
David Roberts 2017-01-03 10:13:39 +00:00 committed by GitHub
parent ec8fb6c99f
commit ad9d65cfa5
6 changed files with 7 additions and 44 deletions

View File

@ -137,10 +137,6 @@ public class PrelertPlugin extends Plugin implements ActionPlugin {
private final ParseFieldMatcherSupplier parseFieldMatcherSupplier;
static {
MetaData.registerPrototype(PrelertMetadata.TYPE, PrelertMetadata.PROTO);
}
public PrelertPlugin(Settings settings) {
this.settings = settings;
this.env = new Environment(settings);

View File

@ -23,7 +23,6 @@ import org.elasticsearch.xpack.prelert.job.messages.Messages;
import org.elasticsearch.xpack.prelert.job.transform.TransformConfig;
import org.elasticsearch.xpack.prelert.job.transform.TransformConfigs;
import org.elasticsearch.xpack.prelert.job.transform.verification.TransformConfigsVerifier;
import org.elasticsearch.xpack.prelert.utils.ExceptionsHelper;
import org.elasticsearch.xpack.prelert.utils.PrelertStrings;
import org.elasticsearch.xpack.prelert.utils.time.TimeUtils;
@ -196,11 +195,6 @@ public class Job extends AbstractDiffable<Job> implements Writeable, ToXContent
indexName = in.readString();
}
@Override
public Job readFrom(StreamInput in) throws IOException {
return new Job(in);
}
/**
* Return the Job Id.
*

View File

@ -87,11 +87,6 @@ public class Allocation extends AbstractDiffable<Allocation> implements ToXConte
return statusReason;
}
@Override
public Allocation readFrom(StreamInput in) throws IOException {
return new Allocation(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
out.writeOptionalString(nodeId);

View File

@ -12,7 +12,6 @@ import org.elasticsearch.cluster.DiffableUtils;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.ParseFieldMatcherSupplier;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
@ -20,7 +19,6 @@ import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.SearchRequestParsers;
import org.elasticsearch.xpack.prelert.job.Job;
import org.elasticsearch.xpack.prelert.job.JobStatus;
@ -89,15 +87,10 @@ public class PrelertMetadata implements MetaData.Custom {
}
@Override
public String type() {
public String getWriteableName() {
return TYPE;
}
@Override
public MetaData.Custom fromXContent(XContentParser parser) throws IOException {
return PRELERT_METADATA_PARSER.parse(parser, () -> ParseFieldMatcher.STRICT).build();
}
@Override
public EnumSet<MetaData.XContentContext> context() {
// NORELEASE: Also include SNAPSHOT, but then we need to split the allocations from here and add them
@ -110,29 +103,25 @@ public class PrelertMetadata implements MetaData.Custom {
return new PrelertMetadataDiff((PrelertMetadata) previousState, this);
}
@Override
public Diff<MetaData.Custom> readDiffFrom(StreamInput in) throws IOException {
return new PrelertMetadataDiff(in);
}
@Override
public MetaData.Custom readFrom(StreamInput in) throws IOException {
public PrelertMetadata(StreamInput in) throws IOException {
int size = in.readVInt();
TreeMap<String, Job> jobs = new TreeMap<>();
for (int i = 0; i < size; i++) {
jobs.put(in.readString(), new Job(in));
}
this.jobs = jobs;
size = in.readVInt();
TreeMap<String, Allocation> allocations = new TreeMap<>();
for (int i = 0; i < size; i++) {
allocations.put(in.readString(), Allocation.PROTO.readFrom(in));
allocations.put(in.readString(), new Allocation(in));
}
this.allocations = allocations;
size = in.readVInt();
TreeMap<String, Scheduler> schedulers = new TreeMap<>();
for (int i = 0; i < size; i++) {
schedulers.put(in.readString(), new Scheduler(in));
}
return new PrelertMetadata(jobs, allocations, schedulers);
this.schedulers = schedulers;
}
@Override
@ -179,12 +168,6 @@ public class PrelertMetadata implements MetaData.Custom {
this.schedulers = DiffableUtils.diff(before.schedulers, after.schedulers, DiffableUtils.getStringKeySerializer());
}
PrelertMetadataDiff(StreamInput in) throws IOException {
jobs = DiffableUtils.readJdkMapDiff(in, DiffableUtils.getStringKeySerializer(), Job.PROTO);
allocations = DiffableUtils.readJdkMapDiff(in, DiffableUtils.getStringKeySerializer(), Allocation.PROTO);
schedulers = DiffableUtils.readJdkMapDiff(in, DiffableUtils.getStringKeySerializer(), Scheduler.PROTO);
}
@Override
public MetaData.Custom apply(MetaData.Custom part) {
TreeMap<String, Job> newJobs = new TreeMap<>(jobs.apply(((PrelertMetadata) part).jobs));

View File

@ -66,11 +66,6 @@ public class Scheduler extends AbstractDiffable<Scheduler> implements ToXContent
return status;
}
@Override
public Scheduler readFrom(StreamInput in) throws IOException {
return new Scheduler(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
config.writeTo(out);

View File

@ -68,7 +68,7 @@ public class PrelertMetadataTests extends AbstractSerializingTestCase<PrelertMet
@Override
protected Writeable.Reader<PrelertMetadata> instanceReader() {
return in -> (PrelertMetadata) PrelertMetadata.PROTO.readFrom(in);
return in -> new PrelertMetadata(in);
}
@Override