fixed compile errors due to upstream changes in elasticsearch itself

Original commit: elastic/x-pack-elasticsearch@defb853486
This commit is contained in:
Martijn van Groningen 2016-12-22 11:12:12 +01:00
parent 92def19a73
commit 84b419052f
41 changed files with 114 additions and 160 deletions

View File

@ -17,6 +17,7 @@ import org.elasticsearch.common.component.LifecycleListener;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Setting.Property;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.env.Environment;
import org.elasticsearch.plugins.ActionPlugin;
import org.elasticsearch.plugins.Plugin;
@ -161,9 +162,8 @@ public class PrelertPlugin extends Plugin implements ActionPlugin {
@Override
public Collection<Object> createComponents(Client client, ClusterService clusterService, ThreadPool threadPool,
ResourceWatcherService resourceWatcherService, ScriptService scriptService,
SearchRequestParsers searchRequestParsers) {
ResourceWatcherService resourceWatcherService, ScriptService scriptService,
SearchRequestParsers searchRequestParsers, NamedXContentRegistry xContentRegistry) {
JobResultsPersister jobResultsPersister = new JobResultsPersister(settings, client);
JobProvider jobProvider = new JobProvider(client, 0, parseFieldMatcherSupplier.getParseFieldMatcher());
JobRenormalizedResultsPersister jobRenormalizedResultsPersister = new JobRenormalizedResultsPersister(settings,

View File

@ -14,7 +14,6 @@ import org.elasticsearch.action.support.master.AcknowledgedResponse;
import org.elasticsearch.action.support.master.MasterNodeOperationRequestBuilder;
import org.elasticsearch.action.support.master.TransportMasterNodeAction;
import org.elasticsearch.client.ElasticsearchClient;
import org.elasticsearch.cluster.ClusterChangedEvent;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ClusterStateObserver;
import org.elasticsearch.cluster.block.ClusterBlockException;
@ -37,6 +36,7 @@ import org.elasticsearch.xpack.prelert.utils.ExceptionsHelper;
import java.io.IOException;
import java.util.Objects;
import java.util.function.Predicate;
public class CloseJobAction extends Action<CloseJobAction.Request, CloseJobAction.Response, CloseJobAction.RequestBuilder> {
@ -200,7 +200,7 @@ public class CloseJobAction extends Action<CloseJobAction.Request, CloseJobActio
}, new JobClosedChangePredicate(jobId), TimeValue.timeValueMinutes(30));
}
private class JobClosedChangePredicate implements ClusterStateObserver.ChangePredicate {
private class JobClosedChangePredicate implements Predicate<ClusterState> {
private final String jobId;
@ -209,17 +209,7 @@ public class CloseJobAction extends Action<CloseJobAction.Request, CloseJobActio
}
@Override
public boolean apply(ClusterState previousState, ClusterState.ClusterStateStatus previousStatus, ClusterState newState,
ClusterState.ClusterStateStatus newStatus) {
return apply(newState);
}
@Override
public boolean apply(ClusterChangedEvent changedEvent) {
return apply(changedEvent.state());
}
boolean apply(ClusterState newState) {
public boolean test(ClusterState newState) {
PrelertMetadata metadata = newState.getMetaData().custom(PrelertMetadata.TYPE);
if (metadata != null) {
Allocation allocation = metadata.getAllocations().get(jobId);
@ -227,6 +217,7 @@ public class CloseJobAction extends Action<CloseJobAction.Request, CloseJobActio
}
return false;
}
}
@Override
protected ClusterBlockException checkBlock(Request request, ClusterState state) {

View File

@ -31,6 +31,7 @@ import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.StatusToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
@ -279,7 +280,8 @@ public class GetListAction extends Action<GetListAction.Request, GetListAction.R
QueryPage<ListDocument> responseBody;
if (getDocResponse.isExists()) {
BytesReference docSource = getDocResponse.getSourceAsBytesRef();
XContentParser parser = XContentFactory.xContent(docSource).createParser(docSource);
XContentParser parser =
XContentFactory.xContent(docSource).createParser(NamedXContentRegistry.EMPTY, docSource);
ListDocument listDocument = ListDocument.PARSER.apply(parser, () -> parseFieldMatcher);
responseBody = new QueryPage<>(Collections.singletonList(listDocument), 1, ListDocument.RESULTS_FIELD);
@ -319,7 +321,8 @@ public class GetListAction extends Action<GetListAction.Request, GetListAction.R
List<ListDocument> docs = new ArrayList<>(response.getHits().hits().length);
for (SearchHit hit : response.getHits().getHits()) {
BytesReference docSource = hit.sourceRef();
XContentParser parser = XContentFactory.xContent(docSource).createParser(docSource);
XContentParser parser =
XContentFactory.xContent(docSource).createParser(NamedXContentRegistry.EMPTY, docSource);
docs.add(ListDocument.PARSER.apply(parser, () -> parseFieldMatcher));
}

View File

@ -15,7 +15,6 @@ import org.elasticsearch.action.support.master.AcknowledgedResponse;
import org.elasticsearch.action.support.master.MasterNodeOperationRequestBuilder;
import org.elasticsearch.action.support.master.TransportMasterNodeAction;
import org.elasticsearch.client.ElasticsearchClient;
import org.elasticsearch.cluster.ClusterChangedEvent;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ClusterStateObserver;
import org.elasticsearch.cluster.block.ClusterBlockException;
@ -40,6 +39,7 @@ import org.elasticsearch.xpack.prelert.utils.ExceptionsHelper;
import java.io.IOException;
import java.util.Objects;
import java.util.function.Predicate;
public class OpenJobAction extends Action<OpenJobAction.Request, OpenJobAction.Response, OpenJobAction.RequestBuilder> {
@ -237,7 +237,7 @@ public class OpenJobAction extends Action<OpenJobAction.Request, OpenJobAction.R
}, new JobOpenedChangePredicate(request.getJobId()), request.openTimeout);
}
private class JobOpenedChangePredicate implements ClusterStateObserver.ChangePredicate {
private class JobOpenedChangePredicate implements Predicate<ClusterState> {
private final String jobId;
@ -246,17 +246,7 @@ public class OpenJobAction extends Action<OpenJobAction.Request, OpenJobAction.R
}
@Override
public boolean apply(ClusterState previousState, ClusterState.ClusterStateStatus previousStatus, ClusterState newState,
ClusterState.ClusterStateStatus newStatus) {
return apply(newState);
}
@Override
public boolean apply(ClusterChangedEvent changedEvent) {
return apply(changedEvent.state());
}
boolean apply(ClusterState newState) {
public boolean test(ClusterState newState) {
PrelertMetadata metadata = newState.getMetaData().custom(PrelertMetadata.TYPE);
if (metadata != null) {
Allocation allocation = metadata.getAllocations().get(jobId);

View File

@ -15,6 +15,7 @@ import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.bytes.CompositeBytesReference;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContent;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
@ -167,7 +168,7 @@ public class CppLogMessageHandler implements Closeable {
private void parseMessage(XContent xContent, BytesReference bytesRef) {
try {
XContentParser parser = xContent.createParser(bytesRef);
XContentParser parser = xContent.createParser(NamedXContentRegistry.EMPTY, bytesRef);
CppLogMessage msg = CppLogMessage.PARSER.apply(parser, () -> ParseFieldMatcher.STRICT);
Level level = Level.getLevel(msg.getLevel());
if (level == null) {

View File

@ -29,7 +29,7 @@ public class JobAllocator extends AbstractComponent implements ClusterStateListe
super(settings);
this.threadPool = threadPool;
this.clusterService = clusterService;
clusterService.add(this);
clusterService.addListener(this);
}
ClusterState assignJobsToNodes(ClusterState current) {

View File

@ -34,7 +34,7 @@ public class JobLifeCycleService extends AbstractComponent implements ClusterSta
public JobLifeCycleService(Settings settings, Client client, ClusterService clusterService, DataProcessor dataProcessor,
Executor executor) {
super(settings);
clusterService.add(this);
clusterService.addListener(this);
this.client = Objects.requireNonNull(client);
this.dataProcessor = Objects.requireNonNull(dataProcessor);
this.executor = Objects.requireNonNull(executor);

View File

@ -30,7 +30,7 @@ public class PrelertInitializationService extends AbstractComponent implements C
this.threadPool = threadPool;
this.clusterService = clusterService;
this.jobProvider = jobProvider;
clusterService.add(this);
clusterService.addListener(this);
}
@Override

View File

@ -9,14 +9,14 @@ import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.xpack.prelert.job.results.Bucket;
import java.io.IOException;
import org.elasticsearch.xpack.prelert.job.results.Bucket;
class ElasticsearchBatchedBucketsIterator extends ElasticsearchBatchedResultsIterator<Bucket> {
public ElasticsearchBatchedBucketsIterator(Client client, String jobId, ParseFieldMatcher parseFieldMatcher) {
@ -28,7 +28,7 @@ class ElasticsearchBatchedBucketsIterator extends ElasticsearchBatchedResultsIte
BytesReference source = hit.getSourceRef();
XContentParser parser;
try {
parser = XContentFactory.xContent(source).createParser(source);
parser = XContentFactory.xContent(source).createParser(NamedXContentRegistry.EMPTY, source);
} catch (IOException e) {
throw new ElasticsearchParseException("failed to parse bucket", e);
}

View File

@ -9,14 +9,14 @@ import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.xpack.prelert.job.results.Influencer;
import java.io.IOException;
import org.elasticsearch.xpack.prelert.job.results.Influencer;
class ElasticsearchBatchedInfluencersIterator extends ElasticsearchBatchedResultsIterator<Influencer> {
public ElasticsearchBatchedInfluencersIterator(Client client, String jobId,
ParseFieldMatcher parserFieldMatcher) {
@ -28,7 +28,7 @@ class ElasticsearchBatchedInfluencersIterator extends ElasticsearchBatchedResult
BytesReference source = hit.getSourceRef();
XContentParser parser;
try {
parser = XContentFactory.xContent(source).createParser(source);
parser = XContentFactory.xContent(source).createParser(NamedXContentRegistry.EMPTY, source);
} catch (IOException e) {
throw new ElasticsearchParseException("failed to parser influencer", e);
}

View File

@ -9,14 +9,14 @@ import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.xpack.prelert.job.ModelSnapshot;
import java.io.IOException;
import org.elasticsearch.xpack.prelert.job.ModelSnapshot;
class ElasticsearchBatchedModelSnapshotIterator extends ElasticsearchBatchedDocumentsIterator<ModelSnapshot> {
public ElasticsearchBatchedModelSnapshotIterator(Client client, String jobId, ParseFieldMatcher parserFieldMatcher) {
super(client, AnomalyDetectorsIndex.jobStateIndexName(), parserFieldMatcher);
@ -32,7 +32,7 @@ class ElasticsearchBatchedModelSnapshotIterator extends ElasticsearchBatchedDocu
BytesReference source = hit.getSourceRef();
XContentParser parser;
try {
parser = XContentFactory.xContent(source).createParser(source);
parser = XContentFactory.xContent(source).createParser(NamedXContentRegistry.EMPTY, source);
} catch (IOException e) {
throw new ElasticsearchParseException("failed to parser model snapshot", e);
}

View File

@ -16,8 +16,6 @@ import org.elasticsearch.action.admin.indices.create.CreateIndexRequest;
import org.elasticsearch.action.admin.indices.create.CreateIndexResponse;
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest;
import org.elasticsearch.action.admin.indices.delete.DeleteIndexResponse;
import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsRequest;
import org.elasticsearch.action.admin.indices.exists.indices.IndicesExistsResponse;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.action.search.SearchRequestBuilder;
import org.elasticsearch.action.search.SearchResponse;
@ -28,6 +26,7 @@ import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
@ -342,7 +341,7 @@ public class JobProvider {
BytesReference source = response.getSourceAsBytesRef();
XContentParser parser;
try {
parser = XContentFactory.xContent(source).createParser(source);
parser = XContentFactory.xContent(source).createParser(NamedXContentRegistry.EMPTY, source);
return DataCounts.PARSER.apply(parser, () -> parseFieldMatcher);
} catch (IOException e) {
throw new ElasticsearchParseException("failed to parse bucket", e);
@ -443,7 +442,7 @@ public class JobProvider {
BytesReference source = hit.getSourceRef();
XContentParser parser;
try {
parser = XContentFactory.xContent(source).createParser(source);
parser = XContentFactory.xContent(source).createParser(NamedXContentRegistry.EMPTY, source);
} catch (IOException e) {
throw new ElasticsearchParseException("failed to parse bucket", e);
}
@ -497,7 +496,7 @@ public class JobProvider {
BytesReference source = hit.getSourceRef();
XContentParser parser;
try {
parser = XContentFactory.xContent(source).createParser(source);
parser = XContentFactory.xContent(source).createParser(NamedXContentRegistry.EMPTY, source);
} catch (IOException e) {
throw new ElasticsearchParseException("failed to parse bucket", e);
}
@ -569,7 +568,7 @@ public class JobProvider {
BytesReference source = hit.getSourceRef();
XContentParser parser;
try {
parser = XContentFactory.xContent(source).createParser(source);
parser = XContentFactory.xContent(source).createParser(NamedXContentRegistry.EMPTY, source);
} catch (IOException e) {
throw new ElasticsearchParseException("failed to parse PerPartitionMaxProbabilities", e);
}
@ -696,7 +695,7 @@ public class JobProvider {
BytesReference source = hit.getSourceRef();
XContentParser parser;
try {
parser = XContentFactory.xContent(source).createParser(source);
parser = XContentFactory.xContent(source).createParser(NamedXContentRegistry.EMPTY, source);
} catch (IOException e) {
throw new ElasticsearchParseException("failed to parse category definition", e);
}
@ -732,7 +731,7 @@ public class JobProvider {
BytesReference source = response.getSourceAsBytesRef();
XContentParser parser;
try {
parser = XContentFactory.xContent(source).createParser(source);
parser = XContentFactory.xContent(source).createParser(NamedXContentRegistry.EMPTY, source);
} catch (IOException e) {
throw new ElasticsearchParseException("failed to parse category definition", e);
}
@ -815,7 +814,7 @@ public class JobProvider {
BytesReference source = hit.getSourceRef();
XContentParser parser;
try {
parser = XContentFactory.xContent(source).createParser(source);
parser = XContentFactory.xContent(source).createParser(NamedXContentRegistry.EMPTY, source);
} catch (IOException e) {
throw new ElasticsearchParseException("failed to parse records", e);
}
@ -881,7 +880,7 @@ public class JobProvider {
BytesReference source = hit.getSourceRef();
XContentParser parser;
try {
parser = XContentFactory.xContent(source).createParser(source);
parser = XContentFactory.xContent(source).createParser(NamedXContentRegistry.EMPTY, source);
} catch (IOException e) {
throw new ElasticsearchParseException("failed to parse influencer", e);
}
@ -1030,7 +1029,7 @@ public class JobProvider {
BytesReference source = hit.getSourceRef();
XContentParser parser;
try {
parser = XContentFactory.xContent(source).createParser(source);
parser = XContentFactory.xContent(source).createParser(NamedXContentRegistry.EMPTY, source);
} catch (IOException e) {
throw new ElasticsearchParseException("failed to parse modelSnapshot", e);
}
@ -1109,7 +1108,7 @@ public class JobProvider {
BytesReference source = response.getSourceAsBytesRef();
XContentParser parser;
try {
parser = XContentFactory.xContent(source).createParser(source);
parser = XContentFactory.xContent(source).createParser(NamedXContentRegistry.EMPTY, source);
} catch (IOException e) {
throw new ElasticsearchParseException("failed to parse quantiles", e);
}
@ -1144,7 +1143,7 @@ public class JobProvider {
BytesReference source = hit.getSourceRef();
XContentParser parser;
try {
parser = XContentFactory.xContent(source).createParser(source);
parser = XContentFactory.xContent(source).createParser(NamedXContentRegistry.EMPTY, source);
} catch (IOException e) {
throw new ElasticsearchParseException("failed to parse modelDebugOutput", e);
}
@ -1175,7 +1174,7 @@ public class JobProvider {
BytesReference source = modelSizeStatsResponse.getSourceAsBytesRef();
XContentParser parser;
try {
parser = XContentFactory.xContent(source).createParser(source);
parser = XContentFactory.xContent(source).createParser(NamedXContentRegistry.EMPTY, source);
} catch (IOException e) {
throw new ElasticsearchParseException("failed to parse model size stats", e);
}
@ -1202,7 +1201,7 @@ public class JobProvider {
BytesReference source = response.getSourceAsBytesRef();
XContentParser parser;
try {
parser = XContentFactory.xContent(source).createParser(source);
parser = XContentFactory.xContent(source).createParser(NamedXContentRegistry.EMPTY, source);
} catch (IOException e) {
throw new ElasticsearchParseException("failed to parse list", e);
}

View File

@ -9,6 +9,7 @@ import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.ParseFieldMatcherSupplier;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
@ -40,7 +41,7 @@ public class AutodetectResultsParser extends AbstractComponent {
public Stream<AutodetectResult> parseResults(InputStream in) throws ElasticsearchParseException {
try {
XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(in);
XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(NamedXContentRegistry.EMPTY, in);
XContentParser.Token token = parser.nextToken();
// if start of an array ignore it, we expect an array of buckets
if (token != XContentParser.Token.START_ARRAY) {

View File

@ -11,6 +11,7 @@ import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.bytes.CompositeBytesReference;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContent;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
@ -79,7 +80,7 @@ public class NormalizerResultHandler extends AbstractComponent {
}
private void parseResult(XContent xContent, BytesReference bytesRef) throws IOException {
XContentParser parser = xContent.createParser(bytesRef);
XContentParser parser = xContent.createParser(NamedXContentRegistry.EMPTY, bytesRef);
NormalizerResult result = NormalizerResult.PARSER.apply(parser, () -> ParseFieldMatcher.STRICT);
normalizedResults.add(result);
}

View File

@ -8,7 +8,6 @@ package org.elasticsearch.xpack.prelert.rest.job;
import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.rest.BaseRestHandler;
import org.elasticsearch.rest.RestController;
@ -32,7 +31,7 @@ public class RestPutJobAction extends BaseRestHandler {
@Override
protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException {
XContentParser parser = XContentFactory.xContent(restRequest.content()).createParser(restRequest.content());
XContentParser parser = restRequest.contentParser();
PutJobAction.Request putJobRequest = PutJobAction.Request.parseRequest(parser, () -> parseFieldMatcher);
boolean overwrite = restRequest.paramAsBoolean("overwrite", false);
putJobRequest.setOverwrite(overwrite);

View File

@ -6,10 +6,8 @@
package org.elasticsearch.xpack.prelert.rest.list;
import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.rest.BaseRestHandler;
import org.elasticsearch.rest.RestController;
@ -33,8 +31,7 @@ public class RestPutListAction extends BaseRestHandler {
@Override
protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException {
BytesReference bodyBytes = restRequest.contentOrSourceParam();
XContentParser parser = XContentFactory.xContent(bodyBytes).createParser(bodyBytes);
XContentParser parser = restRequest.contentOrSourceParamParser();
PutListAction.Request putListRequest = PutListAction.Request.parseRequest(parser, () -> parseFieldMatcher);
return channel -> transportCreateListAction.execute(putListRequest, new AcknowledgedRestListener<>(channel));
}

View File

@ -6,10 +6,8 @@
package org.elasticsearch.xpack.prelert.rest.modelsnapshots;
import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.rest.BaseRestHandler;
import org.elasticsearch.rest.RestController;
@ -52,8 +50,7 @@ public class RestGetModelSnapshotsAction extends BaseRestHandler {
String jobId = restRequest.param(Job.ID.getPreferredName());
Request getModelSnapshots;
if (restRequest.hasContentOrSourceParam()) {
BytesReference bodyBytes = restRequest.contentOrSourceParam();
XContentParser parser = XContentFactory.xContent(bodyBytes).createParser(bodyBytes);
XContentParser parser = restRequest.contentOrSourceParamParser();
getModelSnapshots = Request.parseRequest(jobId, parser, () -> parseFieldMatcher);
} else {
getModelSnapshots = new Request(jobId);

View File

@ -6,10 +6,8 @@
package org.elasticsearch.xpack.prelert.rest.modelsnapshots;
import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.rest.BaseRestHandler;
import org.elasticsearch.rest.RestController;
@ -40,8 +38,7 @@ public class RestPutModelSnapshotDescriptionAction extends BaseRestHandler {
@Override
protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException {
BytesReference bodyBytes = restRequest.contentOrSourceParam();
XContentParser parser = XContentFactory.xContent(bodyBytes).createParser(bodyBytes);
XContentParser parser = restRequest.contentParser();
PutModelSnapshotDescriptionAction.Request getModelSnapshots = PutModelSnapshotDescriptionAction.Request.parseRequest(
restRequest.param(Job.ID.getPreferredName()),
restRequest.param(ModelSnapshot.SNAPSHOT_ID.getPreferredName()),

View File

@ -6,10 +6,8 @@
package org.elasticsearch.xpack.prelert.rest.modelsnapshots;
import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.rest.BaseRestHandler;
import org.elasticsearch.rest.RestController;
@ -45,8 +43,7 @@ public class RestRevertModelSnapshotAction extends BaseRestHandler {
String jobId = restRequest.param(Job.ID.getPreferredName());
RevertModelSnapshotAction.Request request;
if (restRequest.hasContentOrSourceParam()) {
BytesReference bodyBytes = restRequest.contentOrSourceParam();
XContentParser parser = XContentFactory.xContent(bodyBytes).createParser(bodyBytes);
XContentParser parser = restRequest.contentOrSourceParamParser();
request = RevertModelSnapshotAction.Request.parseRequest(jobId, parser, () -> parseFieldMatcher);
} else {
request = new RevertModelSnapshotAction.Request(jobId);

View File

@ -6,10 +6,8 @@
package org.elasticsearch.xpack.prelert.rest.results;
import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.rest.BaseRestHandler;
import org.elasticsearch.rest.RestController;
@ -47,10 +45,9 @@ public class RestGetBucketsAction extends BaseRestHandler {
@Override
protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException {
String jobId = restRequest.param(Job.ID.getPreferredName());
BytesReference bodyBytes = restRequest.content();
final GetBucketsAction.Request request;
if (bodyBytes != null && bodyBytes.length() > 0) {
XContentParser parser = XContentFactory.xContent(bodyBytes).createParser(bodyBytes);
if (restRequest.hasContent()) {
XContentParser parser = restRequest.contentParser();
request = GetBucketsAction.Request.parseRequest(jobId, parser, () -> parseFieldMatcher);
} else {
request = new GetBucketsAction.Request(jobId);

View File

@ -10,7 +10,6 @@ import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.rest.BaseRestHandler;
import org.elasticsearch.rest.RestController;
@ -54,7 +53,7 @@ public class RestGetCategoriesAction extends BaseRestHandler {
BytesReference bodyBytes = restRequest.content();
if (bodyBytes != null && bodyBytes.length() > 0) {
XContentParser parser = XContentFactory.xContent(bodyBytes).createParser(bodyBytes);
XContentParser parser = restRequest.contentParser();
request = GetCategoriesDefinitionAction.Request.parseRequest(jobId, parser, () -> parseFieldMatcher);
request.setCategoryId(categoryId);
} else {

View File

@ -6,10 +6,8 @@
package org.elasticsearch.xpack.prelert.rest.results;
import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.rest.BaseRestHandler;
import org.elasticsearch.rest.RestController;
@ -43,10 +41,9 @@ public class RestGetInfluencersAction extends BaseRestHandler {
String jobId = restRequest.param(Job.ID.getPreferredName());
String start = restRequest.param(GetInfluencersAction.Request.START.getPreferredName());
String end = restRequest.param(GetInfluencersAction.Request.END.getPreferredName());
BytesReference bodyBytes = restRequest.content();
final GetInfluencersAction.Request request;
if (bodyBytes != null && bodyBytes.length() > 0) {
XContentParser parser = XContentFactory.xContent(bodyBytes).createParser(bodyBytes);
if (restRequest.hasContent()) {
XContentParser parser = restRequest.contentParser();
request = GetInfluencersAction.Request.parseRequest(jobId, parser, () -> parseFieldMatcher);
} else {
request = new GetInfluencersAction.Request(jobId);

View File

@ -6,10 +6,8 @@
package org.elasticsearch.xpack.prelert.rest.results;
import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.rest.BaseRestHandler;
import org.elasticsearch.rest.RestController;
@ -41,9 +39,8 @@ public class RestGetRecordsAction extends BaseRestHandler {
protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException {
String jobId = restRequest.param(Job.ID.getPreferredName());
final GetRecordsAction.Request request;
BytesReference bodyBytes = restRequest.content();
if (bodyBytes != null && bodyBytes.length() > 0) {
XContentParser parser = XContentFactory.xContent(bodyBytes).createParser(bodyBytes);
if (restRequest.hasContent()) {
XContentParser parser = restRequest.contentParser();
request = GetRecordsAction.Request.parseRequest(jobId, parser, () -> parseFieldMatcher);
}
else {

View File

@ -8,7 +8,6 @@ package org.elasticsearch.xpack.prelert.rest.schedulers;
import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.rest.BaseRestHandler;
import org.elasticsearch.rest.RestController;
@ -36,7 +35,7 @@ public class RestPutSchedulerAction extends BaseRestHandler {
@Override
protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException {
String schedulerId = restRequest.param(SchedulerConfig.ID.getPreferredName());
XContentParser parser = XContentFactory.xContent(restRequest.content()).createParser(restRequest.content());
XContentParser parser = restRequest.contentParser();
PutSchedulerAction.Request putSchedulerRequest = PutSchedulerAction.Request.parseRequest(schedulerId, parser,
() -> parseFieldMatcher);
return channel -> transportPutSchedulerAction.execute(putSchedulerRequest, new RestToXContentListener<>(channel));

View File

@ -8,12 +8,10 @@ package org.elasticsearch.xpack.prelert.rest.schedulers;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.mapper.DateFieldMapper;
import org.elasticsearch.rest.BaseRestHandler;
@ -65,8 +63,7 @@ public class RestStartSchedulerAction extends BaseRestHandler {
StartSchedulerAction.Request jobSchedulerRequest;
if (restRequest.hasContentOrSourceParam()) {
BytesReference bodyBytes = restRequest.contentOrSourceParam();
XContentParser parser = XContentFactory.xContent(bodyBytes).createParser(bodyBytes);
XContentParser parser = restRequest.contentOrSourceParamParser();
jobSchedulerRequest = StartSchedulerAction.Request.parseRequest(schedulerId, parser, () -> parseFieldMatcher);
} else {
long startTimeMillis = parseDateOrThrow(restRequest.param(StartSchedulerAction.START_TIME.getPreferredName(),

View File

@ -6,10 +6,8 @@
package org.elasticsearch.xpack.prelert.rest.validate;
import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.rest.BaseRestHandler;
import org.elasticsearch.rest.RestController;
@ -34,8 +32,7 @@ public class RestValidateDetectorAction extends BaseRestHandler {
@Override
protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException {
BytesReference bodyBytes = restRequest.contentOrSourceParam();
XContentParser parser = XContentFactory.xContent(bodyBytes).createParser(bodyBytes);
XContentParser parser = restRequest.contentOrSourceParamParser();
ValidateDetectorAction.Request validateDetectorRequest = ValidateDetectorAction.Request.parseRequest(parser,
() -> parseFieldMatcher);
return channel -> transportValidateAction.execute(validateDetectorRequest,

View File

@ -6,10 +6,8 @@
package org.elasticsearch.xpack.prelert.rest.validate;
import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.rest.BaseRestHandler;
import org.elasticsearch.rest.RestController;
@ -34,8 +32,7 @@ public class RestValidateTransformAction extends BaseRestHandler {
@Override
protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException {
BytesReference bodyBytes = restRequest.contentOrSourceParam();
XContentParser parser = XContentFactory.xContent(bodyBytes).createParser(bodyBytes);
XContentParser parser = restRequest.contentOrSourceParamParser();
ValidateTransformAction.Request validateDetectorRequest = ValidateTransformAction.Request.parseRequest(parser,
() -> parseFieldMatcher);
return channel -> transportValidateAction.execute(validateDetectorRequest,

View File

@ -6,10 +6,8 @@
package org.elasticsearch.xpack.prelert.rest.validate;
import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.rest.BaseRestHandler;
import org.elasticsearch.rest.RestController;
@ -34,8 +32,7 @@ public class RestValidateTransformsAction extends BaseRestHandler {
@Override
protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException {
BytesReference bodyBytes = restRequest.contentOrSourceParam();
XContentParser parser = XContentFactory.xContent(bodyBytes).createParser(bodyBytes);
XContentParser parser = restRequest.contentOrSourceParamParser();
ValidateTransformsAction.Request validateDetectorRequest = ValidateTransformsAction.Request.PARSER.apply(parser,
() -> parseFieldMatcher);
return channel -> transportValidateAction.execute(validateDetectorRequest,

View File

@ -14,6 +14,7 @@ import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
@ -259,7 +260,7 @@ public class SchedulerConfig extends ToXContentToBytes implements Writeable {
private XContentParser createParser(ParseField parseField, BytesReference bytesReference) {
try {
return XContentFactory.xContent(query).createParser(query);
return XContentFactory.xContent(query).createParser(NamedXContentRegistry.EMPTY, query);
} catch (IOException e) {
throw ExceptionsHelper.parseException(parseField, e);
}

View File

@ -6,18 +6,18 @@
package org.elasticsearch.xpack.prelert.utils;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.cluster.ClusterChangedEvent;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ClusterStateObserver;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.xpack.prelert.scheduler.SchedulerStatus;
import org.elasticsearch.xpack.prelert.job.metadata.PrelertMetadata;
import org.elasticsearch.xpack.prelert.scheduler.Scheduler;
import org.elasticsearch.xpack.prelert.scheduler.SchedulerStatus;
import java.util.function.Consumer;
import java.util.function.Predicate;
public class SchedulerStatusObserver {
@ -57,7 +57,7 @@ public class SchedulerStatusObserver {
}, new SchedulerStoppedPredicate(schedulerId, expectedStatus), waitTimeout);
}
private static class SchedulerStoppedPredicate implements ClusterStateObserver.ChangePredicate {
private static class SchedulerStoppedPredicate implements Predicate<ClusterState> {
private final String schedulerId;
private final SchedulerStatus expectedStatus;
@ -68,17 +68,7 @@ public class SchedulerStatusObserver {
}
@Override
public boolean apply(ClusterState previousState, ClusterState.ClusterStateStatus previousStatus, ClusterState newState,
ClusterState.ClusterStateStatus newStatus) {
return apply(newState);
}
@Override
public boolean apply(ClusterChangedEvent changedEvent) {
return apply(changedEvent.state());
}
boolean apply(ClusterState newState) {
public boolean test(ClusterState newState) {
PrelertMetadata metadata = newState.getMetaData().custom(PrelertMetadata.TYPE);
if (metadata != null) {
Scheduler scheduler = metadata.getScheduler(schedulerId);
@ -88,6 +78,7 @@ public class SchedulerStatusObserver {
}
return false;
}
}
}

View File

@ -13,6 +13,7 @@ import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.plugins.Plugin;
@ -197,7 +198,7 @@ public class ScheduledJobsIT extends ESIntegTestCase {
return new DataCounts(jobId);
}
try (XContentParser parser = XContentHelper.createParser(getResponse.getSourceAsBytesRef())) {
try (XContentParser parser = XContentHelper.createParser(NamedXContentRegistry.EMPTY, getResponse.getSourceAsBytesRef())) {
return DataCounts.PARSER.apply(parser, () -> ParseFieldMatcher.EMPTY);
} catch (IOException e) {
throw new RuntimeException(e);

View File

@ -9,6 +9,7 @@ import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.xpack.prelert.job.DataDescription.DataFormat;
@ -173,7 +174,7 @@ public class DataDescriptionTests extends AbstractSerializingTestCase<DataDescri
public void testInvalidDataFormat() throws Exception {
BytesArray json = new BytesArray("{ \"format\":\"INEXISTENT_FORMAT\" }");
XContentParser parser = XContentFactory.xContent(json).createParser(json);
XContentParser parser = XContentFactory.xContent(json).createParser(NamedXContentRegistry.EMPTY, json);
ParsingException ex = expectThrows(ParsingException.class,
() -> DataDescription.PARSER.apply(parser, () -> ParseFieldMatcher.STRICT));
assertThat(ex.getMessage(), containsString("[data_description] failed to parse field [format]"));
@ -186,7 +187,7 @@ public class DataDescriptionTests extends AbstractSerializingTestCase<DataDescri
public void testInvalidFieldDelimiter() throws Exception {
BytesArray json = new BytesArray("{ \"field_delimiter\":\",,\" }");
XContentParser parser = XContentFactory.xContent(json).createParser(json);
XContentParser parser = XContentFactory.xContent(json).createParser(NamedXContentRegistry.EMPTY, json);
ParsingException ex = expectThrows(ParsingException.class,
() -> DataDescription.PARSER.apply(parser, () -> ParseFieldMatcher.STRICT));
assertThat(ex.getMessage(), containsString("[data_description] failed to parse field [field_delimiter]"));
@ -199,7 +200,7 @@ public class DataDescriptionTests extends AbstractSerializingTestCase<DataDescri
public void testInvalidQuoteCharacter() throws Exception {
BytesArray json = new BytesArray("{ \"quote_character\":\"''\" }");
XContentParser parser = XContentFactory.xContent(json).createParser(json);
XContentParser parser = XContentFactory.xContent(json).createParser(NamedXContentRegistry.EMPTY, json);
ParsingException ex = expectThrows(ParsingException.class,
() -> DataDescription.PARSER.apply(parser, () -> ParseFieldMatcher.STRICT));
assertThat(ex.getMessage(), containsString("[data_description] failed to parse field [quote_character]"));

View File

@ -5,15 +5,12 @@
*/
package org.elasticsearch.xpack.prelert.job.audit;
import static org.mockito.Mockito.when;
import java.io.IOException;
import org.elasticsearch.action.ListenableActionFuture;
import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
@ -22,6 +19,10 @@ import org.junit.Before;
import org.mockito.ArgumentCaptor;
import org.mockito.Mockito;
import java.io.IOException;
import static org.mockito.Mockito.when;
public class AuditorTests extends ESTestCase {
private Client client;
private ListenableActionFuture<IndexResponse> indexResponse;
@ -113,7 +114,7 @@ public class AuditorTests extends ESTestCase {
private AuditMessage parseAuditMessage() {
try {
String json = jsonCaptor.getValue().string();
XContentParser parser = XContentFactory.xContent(json).createParser(json);
XContentParser parser = XContentFactory.xContent(json).createParser(NamedXContentRegistry.EMPTY, json);
return AuditMessage.PARSER.apply(parser, () -> ParseFieldMatcher.STRICT);
} catch (IOException e) {
return new AuditMessage();
@ -123,7 +124,7 @@ public class AuditorTests extends ESTestCase {
private AuditActivity parseAuditActivity() {
try {
String json = jsonCaptor.getValue().string();
XContentParser parser = XContentFactory.xContent(json).createParser(json);
XContentParser parser = XContentFactory.xContent(json).createParser(NamedXContentRegistry.EMPTY, json);
return AuditActivity.PARSER.apply(parser, () -> ParseFieldMatcher.STRICT);
} catch (IOException e) {
return new AuditActivity();

View File

@ -8,6 +8,7 @@ package org.elasticsearch.xpack.prelert.job.condition;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.io.stream.Writeable.Reader;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.ESTestCase;
@ -69,7 +70,7 @@ public class ConditionTests extends AbstractSerializingTestCase<Condition> {
public void testInvalidTransformName() throws Exception {
BytesArray json = new BytesArray("{ \"value\":\"someValue\" }");
XContentParser parser = XContentFactory.xContent(json).createParser(json);
XContentParser parser = XContentFactory.xContent(json).createParser(NamedXContentRegistry.EMPTY, json);
IllegalArgumentException ex = expectThrows(IllegalArgumentException.class,
() -> Condition.PARSER.apply(parser, () -> ParseFieldMatcher.STRICT));
assertThat(ex.getMessage(), containsString("Required [operator]"));

View File

@ -6,6 +6,7 @@
package org.elasticsearch.xpack.prelert.job.process.autodetect;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
@ -21,7 +22,8 @@ public class BlackHoleAutodetectProcessTests extends ESTestCase {
String flushId = process.flushJob(InterimResultsParams.builder().build());
XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(process.getProcessOutStream());
XContentParser parser = XContentFactory.xContent(XContentType.JSON)
.createParser(NamedXContentRegistry.EMPTY, process.getProcessOutStream());
parser.nextToken(); // FlushAcknowledgementParser expects this to be
// called first
AutodetectResult result = AutodetectResult.PARSER.apply(parser, () -> ParseFieldMatcher.STRICT);

View File

@ -9,6 +9,7 @@ import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.ESTestCase;
@ -183,7 +184,7 @@ public class TransformConfigTests extends AbstractSerializingTestCase<TransformC
public void testInvalidTransformName() throws Exception {
BytesArray json = new BytesArray("{ \"transform\":\"\" }");
XContentParser parser = XContentFactory.xContent(json).createParser(json);
XContentParser parser = XContentFactory.xContent(json).createParser(NamedXContentRegistry.EMPTY, json);
ParsingException ex = expectThrows(ParsingException.class,
() -> TransformConfig.PARSER.apply(parser, () -> ParseFieldMatcher.STRICT));
assertThat(ex.getMessage(), containsString("[transform] failed to parse field [transform]"));

View File

@ -7,6 +7,7 @@ package org.elasticsearch.xpack.prelert.job.transform;
import com.fasterxml.jackson.core.JsonProcessingException;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.test.ESTestCase;
@ -18,7 +19,7 @@ public class TransformSerialisationTests extends ESTestCase {
public void testDeserialise_singleFieldAsArray() throws JsonProcessingException, IOException {
String json = "{\"inputs\":\"dns\", \"transform\":\"domain_split\"}";
XContentParser parser = XContentFactory.xContent(json).createParser(json);
XContentParser parser = XContentFactory.xContent(json).createParser(NamedXContentRegistry.EMPTY, json);
TransformConfig tr = TransformConfig.PARSER.apply(parser, () -> ParseFieldMatcher.STRICT);
assertEquals(1, tr.getInputs().size());
@ -30,7 +31,7 @@ public class TransformSerialisationTests extends ESTestCase {
json = "{\"inputs\":\"dns\", \"transform\":\"domain_split\", \"outputs\":\"catted\"}";
parser = XContentFactory.xContent(json).createParser(json);
parser = XContentFactory.xContent(json).createParser(NamedXContentRegistry.EMPTY, json);
tr = TransformConfig.PARSER.apply(parser, () -> ParseFieldMatcher.STRICT);
assertEquals(1, tr.getInputs().size());
@ -44,7 +45,7 @@ public class TransformSerialisationTests extends ESTestCase {
public void testDeserialise_fieldsArray() throws JsonProcessingException, IOException {
String json = "{\"inputs\":[\"dns\"], \"transform\":\"domain_split\"}";
XContentParser parser = XContentFactory.xContent(json).createParser(json);
XContentParser parser = XContentFactory.xContent(json).createParser(NamedXContentRegistry.EMPTY, json);
TransformConfig tr = TransformConfig.PARSER.apply(parser, () -> ParseFieldMatcher.STRICT);
assertEquals(1, tr.getInputs().size());
@ -52,7 +53,7 @@ public class TransformSerialisationTests extends ESTestCase {
assertEquals("domain_split", tr.getTransform());
json = "{\"inputs\":[\"a\", \"b\", \"c\"], \"transform\":\"concat\", \"outputs\":[\"catted\"]}";
parser = XContentFactory.xContent(json).createParser(json);
parser = XContentFactory.xContent(json).createParser(NamedXContentRegistry.EMPTY, json);
tr = TransformConfig.PARSER.apply(parser, () -> ParseFieldMatcher.STRICT);
assertEquals(3, tr.getInputs().size());

View File

@ -12,6 +12,7 @@ import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.rest.RestController;
import org.elasticsearch.rest.RestRequest;
import org.elasticsearch.search.SearchRequestParsers;
@ -20,9 +21,9 @@ import org.elasticsearch.test.rest.FakeRestRequest;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.xpack.prelert.job.Job;
import org.elasticsearch.xpack.prelert.job.JobStatus;
import org.elasticsearch.xpack.prelert.scheduler.SchedulerConfig;
import org.elasticsearch.xpack.prelert.job.metadata.PrelertMetadata;
import org.elasticsearch.xpack.prelert.scheduler.ScheduledJobRunnerTests;
import org.elasticsearch.xpack.prelert.scheduler.SchedulerConfig;
import java.util.HashMap;
import java.util.Map;
@ -51,7 +52,7 @@ public class RestStartJobSchedulerActionTests extends ESTestCase {
Map<String, String> params = new HashMap<>();
params.put("start", "not-a-date");
params.put("scheduler_id", "foo-scheduler");
RestRequest restRequest1 = new FakeRestRequest.Builder().withParams(params).build();
RestRequest restRequest1 = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withParams(params).build();
ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class,
() -> action.prepareRequest(restRequest1, mock(NodeClient.class)));
assertEquals("Query param 'start' with value 'not-a-date' cannot be parsed as a date or converted to a number (epoch).",
@ -60,7 +61,7 @@ public class RestStartJobSchedulerActionTests extends ESTestCase {
params = new HashMap<>();
params.put("end", "not-a-date");
params.put("scheduler_id", "foo-scheduler");
RestRequest restRequest2 = new FakeRestRequest.Builder().withParams(params).build();
RestRequest restRequest2 = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withParams(params).build();
e = expectThrows(ElasticsearchParseException.class, () -> action.prepareRequest(restRequest2, mock(NodeClient.class)));
assertEquals("Query param 'end' with value 'not-a-date' cannot be parsed as a date or converted to a number (epoch).",
e.getMessage());

View File

@ -8,6 +8,7 @@ package org.elasticsearch.xpack.prelert.scheduler;
import com.carrotsearch.randomizedtesting.generators.CodepointSetGenerator;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
@ -93,7 +94,7 @@ public class SchedulerConfigTests extends AbstractSerializingTestCase<SchedulerC
XContentBuilder xContentBuilder = toXContent(testInstance, randomFrom(XContentType.values()));
XContentBuilder shuffled = shuffleXContent(xContentBuilder, shuffleProtectedFields());
XContentParser parser = XContentFactory.xContent(shuffled.bytes()).createParser(shuffled.bytes());
XContentParser parser = XContentFactory.xContent(shuffled.bytes()).createParser(NamedXContentRegistry.EMPTY, shuffled.bytes());
SchedulerConfig parsedInstance = parseInstance(parser, ParseFieldMatcher.STRICT);
assertEquals(testInstance.getQueryAsMap(), parsedInstance.getQueryAsMap());

View File

@ -5,21 +5,22 @@
*/
package org.elasticsearch.xpack.prelert.support;
import java.io.IOException;
import java.util.Collections;
import java.util.Map;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.Writeable;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import java.io.IOException;
import java.util.Collections;
import java.util.Map;
public abstract class AbstractSerializingTestCase<T extends ToXContent & Writeable> extends AbstractWireSerializingTestCase<T> {
/**
@ -41,7 +42,7 @@ public abstract class AbstractSerializingTestCase<T extends ToXContent & Writeab
private void assertParsedInstance(BytesReference queryAsBytes, T expectedInstance)
throws IOException {
XContentParser parser = XContentFactory.xContent(queryAsBytes).createParser(queryAsBytes);
XContentParser parser = XContentFactory.xContent(queryAsBytes).createParser(NamedXContentRegistry.EMPTY, queryAsBytes);
T newInstance = parseQuery(parser, ParseFieldMatcher.STRICT);
assertNotSame(newInstance, expectedInstance);
assertEquals(expectedInstance, newInstance);

View File

@ -5,21 +5,22 @@
*/
package org.elasticsearch.xpack.prelert.support;
import java.io.IOException;
import java.util.Collections;
import java.util.Map;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.Streamable;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import java.io.IOException;
import java.util.Collections;
import java.util.Map;
public abstract class AbstractStreamableXContentTestCase<T extends ToXContent & Streamable> extends AbstractStreamableTestCase<T> {
/**
@ -41,7 +42,7 @@ public abstract class AbstractStreamableXContentTestCase<T extends ToXContent &
private void assertParsedInstance(BytesReference queryAsBytes, T expectedInstance)
throws IOException {
XContentParser parser = XContentFactory.xContent(queryAsBytes).createParser(queryAsBytes);
XContentParser parser = XContentFactory.xContent(queryAsBytes).createParser(NamedXContentRegistry.EMPTY, queryAsBytes);
T newInstance = parseQuery(parser, ParseFieldMatcher.STRICT);
assertNotSame(newInstance, expectedInstance);
assertEquals(expectedInstance, newInstance);