Update to pass content type from the rest APIs to transport requests (elastic/elasticsearch#4689)

This change adapts x-pack to pass on the parsed XContentType from rest requests to transport
requests and use this value in place of attempting to auto-detect the content type.

Original commit: elastic/x-pack-elasticsearch@57475fd403
This commit is contained in:
Jay Modi 2017-02-02 14:08:34 -05:00 committed by GitHub
parent 8cab4fec4b
commit 1f32ef21a2
103 changed files with 668 additions and 303 deletions

View File

@ -8,6 +8,7 @@ package org.elasticsearch.license;
import org.apache.lucene.util.CollectionUtil;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
@ -16,6 +17,7 @@ import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import java.io.IOException;
import java.nio.ByteBuffer;
@ -487,14 +489,9 @@ public class License implements ToXContentObject {
}
}
public static License fromSource(String content) throws IOException {
return fromSource(content.getBytes(StandardCharsets.UTF_8));
}
public static License fromSource(byte[] bytes) throws IOException {
public static License fromSource(BytesReference bytes, XContentType xContentType) throws IOException {
// EMPTY is safe here because we don't call namedObject
final XContentParser parser = XContentFactory.xContent(bytes).createParser(NamedXContentRegistry.EMPTY, bytes);
final XContentParser parser = xContentType.xContent().createParser(NamedXContentRegistry.EMPTY, bytes);
License license = null;
if (parser.nextToken() == XContentParser.Token.START_OBJECT) {
if (parser.nextToken() == XContentParser.Token.FIELD_NAME) {

View File

@ -8,8 +8,10 @@ package org.elasticsearch.license;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.ValidateActions;
import org.elasticsearch.action.support.master.AcknowledgedRequest;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentType;
import java.io.IOException;
@ -31,10 +33,11 @@ public class PutLicenseRequest extends AcknowledgedRequest<PutLicenseRequest> {
* Parses license from json format to an instance of {@link License}
*
* @param licenseDefinition licenses definition
* @param xContentType the content type of the license
*/
public PutLicenseRequest license(String licenseDefinition) {
public PutLicenseRequest license(BytesReference licenseDefinition, XContentType xContentType) {
try {
return license(License.fromSource(licenseDefinition));
return license(License.fromSource(licenseDefinition, xContentType));
} catch (IOException e) {
throw new IllegalArgumentException("failed to parse license source", e);
}

View File

@ -7,6 +7,8 @@ package org.elasticsearch.license;
import org.elasticsearch.action.support.master.AcknowledgedRequestBuilder;
import org.elasticsearch.client.ElasticsearchClient;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.XContentType;
/**
* Register license request builder
@ -37,8 +39,8 @@ public class PutLicenseRequestBuilder extends AcknowledgedRequestBuilder<PutLice
return this;
}
public PutLicenseRequestBuilder setLicense(String licenseSource) {
request.license(licenseSource);
public PutLicenseRequestBuilder setLicense(BytesReference licenseSource, XContentType xContentType) {
request.license(licenseSource, xContentType);
return this;
}

View File

@ -45,7 +45,7 @@ public class RestPutLicenseAction extends XPackRestHandler {
@Override
public RestChannelConsumer doPrepareRequest(final RestRequest request, final XPackClient client) throws IOException {
PutLicenseRequest putLicenseRequest = new PutLicenseRequest();
putLicenseRequest.license(request.content().utf8ToString());
putLicenseRequest.license(request.content(), request.getXContentType());
putLicenseRequest.acknowledge(request.paramAsBoolean("acknowledge", false));
return channel -> client.es().admin().cluster().execute(PutLicenseAction.INSTANCE, putLicenseRequest,
new RestBuilderListener<PutLicenseResponse>(channel) {

View File

@ -35,6 +35,8 @@ import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.io.Streams;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.xpack.common.http.auth.ApplicableHttpAuth;
import org.elasticsearch.xpack.common.http.auth.HttpAuthRegistry;
import org.elasticsearch.xpack.common.socket.SocketAccess;
@ -115,6 +117,14 @@ public class HttpClient extends AbstractComponent {
}
}
// BWC - hack for input requests made to elasticsearch that do not provide the right content-type header!
if (request.hasBody() && internalRequest.containsHeader("Content-Type") == false) {
XContentType xContentType = XContentFactory.xContentType(request.body());
if (xContentType != null) {
internalRequest.setHeader("Content-Type", xContentType.mediaType());
}
}
// proxy
if (request.proxy != null && request.proxy.equals(HttpProxy.NO_PROXY) == false) {
HttpHost proxy = new HttpHost(request.proxy.getHost(), request.proxy.getPort(), request.scheme.scheme());

View File

@ -5,9 +5,13 @@
*/
package org.elasticsearch.xpack.monitoring.action;
import org.elasticsearch.Version;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.xpack.monitoring.exporter.MonitoringDoc;
import java.io.IOException;
@ -18,6 +22,7 @@ public class MonitoringBulkDoc extends MonitoringDoc {
private String type;
private String id;
private BytesReference source;
private XContentType xContentType;
public MonitoringBulkDoc(String monitoringId, String monitoringVersion) {
super(monitoringId, monitoringVersion);
@ -25,12 +30,13 @@ public class MonitoringBulkDoc extends MonitoringDoc {
public MonitoringBulkDoc(String monitoringId, String monitoringVersion,
MonitoringIndex index, String type, String id,
BytesReference source) {
BytesReference source, XContentType xContentType) {
super(monitoringId, monitoringVersion);
this.index = index;
this.type = type;
this.id = id;
this.source = source;
this.xContentType = xContentType;
}
/**
@ -42,6 +48,11 @@ public class MonitoringBulkDoc extends MonitoringDoc {
type = in.readOptionalString();
id = in.readOptionalString();
source = in.readBytesReference();
if (source != BytesArray.EMPTY && in.getVersion().after(Version.V_5_3_0_UNRELEASED)) { // TODO update to onOrAfter after backporting
xContentType = XContentType.readFrom(in);
} else {
xContentType = XContentFactory.xContentType(source);
}
}
@Override
@ -51,6 +62,10 @@ public class MonitoringBulkDoc extends MonitoringDoc {
out.writeOptionalString(type);
out.writeOptionalString(id);
out.writeBytesReference(source);
if (source != null && source != BytesArray.EMPTY && out.getVersion().after(Version.V_5_3_0_UNRELEASED)) { // TODO update to
// onOrAfter after backporting
xContentType.writeTo(out);
}
}
public MonitoringIndex getIndex() {
@ -81,8 +96,12 @@ public class MonitoringBulkDoc extends MonitoringDoc {
return source;
}
public void setSource(BytesReference source) {
public void setSource(BytesReference source, XContentType xContentType) {
this.source = source;
this.xContentType = xContentType;
}
public XContentType getXContentType() {
return xContentType;
}
}

View File

@ -15,6 +15,7 @@ import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentType;
import java.io.IOException;
import java.util.ArrayList;
@ -86,10 +87,10 @@ public class MonitoringBulkRequest extends ActionRequest {
* Parses a monitoring bulk request and builds the list of documents to be indexed.
*/
public MonitoringBulkRequest add(BytesReference content, String defaultMonitoringId, String defaultMonitoringApiVersion,
String defaultType) throws IOException {
String defaultType, XContentType xContentType) throws IOException {
// MonitoringBulkRequest accepts a body request that has the same format as the BulkRequest:
// instead of duplicating the parsing logic here we use a new BulkRequest instance to parse the content.
BulkRequest bulkRequest = Requests.bulkRequest().add(content, null, defaultType);
BulkRequest bulkRequest = Requests.bulkRequest().add(content, null, defaultType, xContentType);
for (DocWriteRequest request : bulkRequest.requests()) {
if (request instanceof IndexRequest) {
@ -102,7 +103,8 @@ public class MonitoringBulkRequest extends ActionRequest {
MonitoringIndex.from(indexRequest.index()),
indexRequest.type(),
indexRequest.id(),
indexRequest.source());
indexRequest.source(),
xContentType);
add(doc);
} else {

View File

@ -8,6 +8,7 @@ package org.elasticsearch.xpack.monitoring.action;
import org.elasticsearch.action.ActionRequestBuilder;
import org.elasticsearch.client.ElasticsearchClient;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.XContentType;
import java.io.IOException;
@ -23,9 +24,9 @@ public class MonitoringBulkRequestBuilder
return this;
}
public MonitoringBulkRequestBuilder add(BytesReference content, String defaultId, String defaultApiVersion, String defaultType)
throws IOException {
request.add(content, defaultId, defaultApiVersion, defaultType);
public MonitoringBulkRequestBuilder add(BytesReference content, String defaultId, String defaultApiVersion, String defaultType,
XContentType xContentType) throws IOException {
request.add(content, defaultId, defaultApiVersion, defaultType, xContentType);
return this;
}

View File

@ -60,7 +60,7 @@ public class LocalBulk extends ExportBulk {
try {
MonitoringIndexNameResolver<MonitoringDoc> resolver = resolvers.getResolver(doc);
IndexRequest request = new IndexRequest(resolver.index(doc), resolver.type(doc), resolver.id(doc));
request.source(resolver.source(doc, XContentType.SMILE));
request.source(resolver.source(doc, XContentType.SMILE), XContentType.SMILE);
// allow the use of ingest pipelines to be completely optional
if (usePipeline) {

View File

@ -364,7 +364,7 @@ public class LocalExporter extends Exporter implements ClusterStateListener, Cle
putMapping.type(type);
// avoid mapping at all; we use this index as a data cache rather than for search
putMapping.source("{\"enabled\":false}");
putMapping.source("{\"enabled\":false}", XContentType.JSON);
client.admin().indices().putMapping(putMapping, listener);
}
@ -404,7 +404,7 @@ public class LocalExporter extends Exporter implements ClusterStateListener, Cle
logger.debug("installing ingest pipeline [{}]", EXPORT_PIPELINE_NAME);
final BytesReference emptyPipeline = emptyPipeline(XContentType.JSON).bytes();
final PutPipelineRequest request = new PutPipelineRequest(EXPORT_PIPELINE_NAME, emptyPipeline);
final PutPipelineRequest request = new PutPipelineRequest(EXPORT_PIPELINE_NAME, emptyPipeline, XContentType.JSON);
client.admin().cluster().putPipeline(request, listener);
}
@ -440,7 +440,7 @@ public class LocalExporter extends Exporter implements ClusterStateListener, Cle
private void putTemplate(String template, String source, ActionListener<PutIndexTemplateResponse> listener) {
logger.debug("installing template [{}]",template);
PutIndexTemplateRequest request = new PutIndexTemplateRequest(template).source(source);
PutIndexTemplateRequest request = new PutIndexTemplateRequest(template).source(source, XContentType.JSON);
assert !Thread.currentThread().isInterrupted() : "current thread has been interrupted before putting index template!!!";
// async call, so we won't block cluster event thread

View File

@ -29,7 +29,7 @@ public class MonitoringBulkDataResolver extends MonitoringIndexNameResolver.Data
protected void buildXContent(MonitoringBulkDoc document, XContentBuilder builder, ToXContent.Params params) throws IOException {
BytesReference source = document.getSource();
if (source != null && source.length() > 0) {
builder.rawField(type(document), source);
builder.rawField(type(document), source, document.getXContentType());
}
}
}

View File

@ -30,7 +30,7 @@ public class MonitoringBulkTimestampedResolver extends MonitoringIndexNameResolv
protected void buildXContent(MonitoringBulkDoc document, XContentBuilder builder, ToXContent.Params params) throws IOException {
BytesReference source = document.getSource();
if (source != null && source.length() > 0) {
builder.rawField(type(document), source);
builder.rawField(type(document), source, document.getXContentType());
}
}
}

View File

@ -61,7 +61,7 @@ public class RestMonitoringBulkAction extends MonitoringRestHandler {
}
MonitoringBulkRequestBuilder requestBuilder = client.monitoring().prepareMonitoringBulk();
requestBuilder.add(request.content(), id, version, defaultType);
requestBuilder.add(request.content(), id, version, defaultType, request.getXContentType());
return channel -> requestBuilder.execute(new RestBuilderListener<MonitoringBulkResponse>(channel) {
@Override
public RestResponse buildResponse(MonitoringBulkResponse response, XContentBuilder builder) throws Exception {

View File

@ -27,5 +27,4 @@ public abstract class XPackRestHandler extends BaseRestHandler {
}
protected abstract RestChannelConsumer doPrepareRequest(RestRequest request, XPackClient client) throws IOException;
}

View File

@ -30,6 +30,7 @@ import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.gateway.GatewayService;
import org.elasticsearch.index.mapper.MapperService;
@ -37,6 +38,7 @@ import org.elasticsearch.xpack.security.authc.esnative.NativeRealmMigrator;
import org.elasticsearch.xpack.template.TemplateUtils;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
@ -212,7 +214,9 @@ public class SecurityTemplateService extends AbstractComponent implements Cluste
, SECURITY_INDEX_TEMPLATE_VERSION_PATTERN);
PutIndexTemplateRequest putTemplateRequest = client.admin().indices()
.preparePutTemplate(SECURITY_TEMPLATE_NAME).setSource(template).request();
.preparePutTemplate(SECURITY_TEMPLATE_NAME)
.setSource(new BytesArray(template.getBytes(StandardCharsets.UTF_8)), XContentType.JSON)
.request();
client.admin().indices().putTemplate(putTemplateRequest, new ActionListener<PutIndexTemplateResponse>() {
@Override
public void onResponse(PutIndexTemplateResponse putIndexTemplateResponse) {
@ -287,8 +291,8 @@ public class SecurityTemplateService extends AbstractComponent implements Cluste
for (Object typeMapping : mappings.values().toArray()) {
CompressedXContent typeMappingXContent = (CompressedXContent) typeMapping;
try {
Map<String, Object> typeMappingMap = XContentHelper.convertToMap(new BytesArray(typeMappingXContent.uncompressed()), false)
.v2();
Map<String, Object> typeMappingMap =
XContentHelper.convertToMap(new BytesArray(typeMappingXContent.uncompressed()), false, XContentType.JSON).v2();
// should always contain one entry with key = typename
assert (typeMappingMap.size() == 1);
String key = typeMappingMap.keySet().iterator().next();

View File

@ -10,6 +10,8 @@ import org.elasticsearch.action.support.WriteRequestBuilder;
import org.elasticsearch.client.ElasticsearchClient;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.xpack.security.authz.RoleDescriptor;
import java.io.IOException;
@ -29,10 +31,22 @@ public class PutRoleRequestBuilder extends ActionRequestBuilder<PutRoleRequest,
super(client, action, new PutRoleRequest());
}
/**
* Populate the put role request from the source and the role's name
* @deprecated use {@link #source(String, BytesReference, XContentType)} to avoid content type auto-detection
*/
@Deprecated
public PutRoleRequestBuilder source(String name, BytesReference source) throws IOException {
return source(name, source, XContentFactory.xContentType(source));
}
/**
* Populate the put role request from the source and the role's name
*/
public PutRoleRequestBuilder source(String name, BytesReference source, XContentType xContentType) throws IOException {
// we pass false as last parameter because we want to reject the request if field permissions
// are given in 2.x syntax
RoleDescriptor descriptor = RoleDescriptor.parse(name, source, false);
RoleDescriptor descriptor = RoleDescriptor.parse(name, source, false, xContentType);
assert name.equals(descriptor.getName());
request.name(name);
request.cluster(descriptor.getClusterPrivileges());

View File

@ -12,8 +12,9 @@ import org.elasticsearch.client.ElasticsearchClient;
import org.elasticsearch.common.ValidationException;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.xpack.security.authc.support.Hasher;
import org.elasticsearch.xpack.security.authc.support.SecuredString;
import org.elasticsearch.xpack.security.support.Validation;
@ -57,9 +58,21 @@ public class ChangePasswordRequestBuilder
return this;
}
/**
* Populate the change password request from the source
* @deprecated use {@link #source(BytesReference, XContentType)} to avoid content type auto-detection
*/
@Deprecated
public ChangePasswordRequestBuilder source(BytesReference source) throws IOException {
return source(source, XContentFactory.xContentType(source));
}
/**
* Populate the change password request from the source in the provided content type
*/
public ChangePasswordRequestBuilder source(BytesReference source, XContentType xContentType) throws IOException {
// EMPTY is ok here because we never call namedObject
try (XContentParser parser = XContentHelper.createParser(NamedXContentRegistry.EMPTY, source)) {
try (XContentParser parser = xContentType.xContent().createParser(NamedXContentRegistry.EMPTY, source)) {
XContentUtils.verifyObject(parser);
XContentParser.Token token;
String currentFieldName = null;

View File

@ -14,9 +14,10 @@ import org.elasticsearch.common.Strings;
import org.elasticsearch.common.ValidationException;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentParser.Token;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.xpack.security.authc.support.Hasher;
import org.elasticsearch.xpack.security.authc.support.SecuredString;
import org.elasticsearch.xpack.security.support.Validation;
@ -26,6 +27,7 @@ import org.elasticsearch.xpack.common.xcontent.XContentUtils;
import java.io.IOException;
import java.util.Arrays;
import java.util.Map;
import java.util.Objects;
public class PutUserRequestBuilder extends ActionRequestBuilder<PutUserRequest, PutUserResponse, PutUserRequestBuilder>
implements WriteRequestBuilder<PutUserRequestBuilder> {
@ -90,10 +92,23 @@ public class PutUserRequestBuilder extends ActionRequestBuilder<PutUserRequest,
return this;
}
/**
* Populate the put user request using the given source and username
* @deprecated use {@link #source(String, BytesReference, XContentType)} to avoid content type auto-detection
*/
@Deprecated
public PutUserRequestBuilder source(String username, BytesReference source) throws IOException {
return source(username, source, XContentFactory.xContentType(source));
}
/**
* Populate the put user request using the given source and username
*/
public PutUserRequestBuilder source(String username, BytesReference source, XContentType xContentType) throws IOException {
Objects.requireNonNull(xContentType);
username(username);
// EMPTY is ok here because we never call namedObject
try (XContentParser parser = XContentHelper.createParser(NamedXContentRegistry.EMPTY, source)) {
try (XContentParser parser = xContentType.xContent().createParser(NamedXContentRegistry.EMPTY, source)) {
XContentUtils.verifyObject(parser);
XContentParser.Token token;
String currentFieldName = null;

View File

@ -20,7 +20,7 @@ public class AuditUtil {
public static String restRequestContent(RestRequest request) {
if (request.hasContent()) {
try {
return XContentHelper.convertToJson(request.content(), false, false);
return XContentHelper.convertToJson(request.content(), false, false, request.getXContentType());
} catch (IOException ioe) {
return "Invalid Format: " + request.content().utf8ToString();
}

View File

@ -39,6 +39,7 @@ import org.elasticsearch.common.util.concurrent.EsExecutors;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.gateway.GatewayService;
import org.elasticsearch.node.Node;
import org.elasticsearch.rest.RestRequest;
@ -846,7 +847,7 @@ public class IndexAuditTrail extends AbstractComponent implements AuditTrail, Cl
ByteArrayOutputStream out = new ByteArrayOutputStream();
Streams.copy(is, out);
final byte[] template = out.toByteArray();
final PutIndexTemplateRequest request = new PutIndexTemplateRequest(INDEX_TEMPLATE_NAME).source(template);
final PutIndexTemplateRequest request = new PutIndexTemplateRequest(INDEX_TEMPLATE_NAME).source(template, XContentType.JSON);
if (customSettings != null && customSettings.names().size() > 0) {
Settings updatedSettings = Settings.builder()
.put(request.settings())
@ -890,10 +891,10 @@ public class IndexAuditTrail extends AbstractComponent implements AuditTrail, Cl
}
private void putAuditIndexMappings(String index, PutIndexTemplateRequest request, ActionListener<Void> listener) {
String mappings = request.mappings().get(DOC_TYPE);
client.admin().indices().preparePutMapping(index)
.setType(DOC_TYPE)
.setSource(request.mappings()
.get(DOC_TYPE))
.setSource(mappings, XContentType.JSON)
.execute(ActionListener.wrap((response) -> {
if (response.isAcknowledged()) {
listener.onResponse(null);

View File

@ -32,6 +32,7 @@ import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.env.Environment;
import org.elasticsearch.xpack.common.socket.SocketAccess;
@ -158,6 +159,7 @@ public class ESNativeRealmMigrateTool extends MultiCommand {
conn.setRequestProperty("Authorization",
UsernamePasswordToken.basicAuthHeaderValue(username.value(options),
new SecuredString(password.value(options).toCharArray())));
conn.setRequestProperty("Content-Type", XContentType.JSON.mediaType());
conn.setDoOutput(true); // we'll be sending a body
SocketAccess.doPrivileged(conn::connect);
if (bodyString != null) {

View File

@ -24,6 +24,7 @@ import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.support.WriteRequest.RefreshPolicy;
import org.elasticsearch.action.update.UpdateResponse;
import org.elasticsearch.client.Requests;
import org.elasticsearch.cluster.ClusterChangedEvent;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ClusterStateListener;
@ -34,6 +35,7 @@ import org.elasticsearch.common.ValidationException;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.gateway.GatewayService;
import org.elasticsearch.index.IndexNotFoundException;
import org.elasticsearch.index.engine.DocumentMissingException;
@ -231,7 +233,7 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL
}
client.prepareUpdate(SecurityTemplateService.SECURITY_INDEX_NAME, docType, username)
.setDoc(Fields.PASSWORD.getPreferredName(), String.valueOf(request.passwordHash()))
.setDoc(Requests.INDEX_CONTENT_TYPE, Fields.PASSWORD.getPreferredName(), String.valueOf(request.passwordHash()))
.setRefreshPolicy(request.getRefreshPolicy())
.execute(new ActionListener<UpdateResponse>() {
@Override
@ -318,7 +320,8 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL
assert putUserRequest.passwordHash() == null;
// We must have an existing document
client.prepareUpdate(SecurityTemplateService.SECURITY_INDEX_NAME, USER_DOC_TYPE, putUserRequest.username())
.setDoc(User.Fields.USERNAME.getPreferredName(), putUserRequest.username(),
.setDoc(Requests.INDEX_CONTENT_TYPE,
User.Fields.USERNAME.getPreferredName(), putUserRequest.username(),
User.Fields.ROLES.getPreferredName(), putUserRequest.roles(),
User.Fields.FULL_NAME.getPreferredName(), putUserRequest.fullName(),
User.Fields.EMAIL.getPreferredName(), putUserRequest.email(),
@ -403,7 +406,7 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL
final ActionListener<Void> listener) {
try {
client.prepareUpdate(SecurityTemplateService.SECURITY_INDEX_NAME, USER_DOC_TYPE, username)
.setDoc(User.Fields.ENABLED.getPreferredName(), enabled)
.setDoc(Requests.INDEX_CONTENT_TYPE, User.Fields.ENABLED.getPreferredName(), enabled)
.setRefreshPolicy(refreshPolicy)
.execute(new ActionListener<UpdateResponse>() {
@Override
@ -445,8 +448,9 @@ public class NativeUsersStore extends AbstractComponent implements ClusterStateL
final ActionListener<Void> listener) {
try {
client.prepareUpdate(SecurityTemplateService.SECURITY_INDEX_NAME, RESERVED_USER_DOC_TYPE, username)
.setDoc(User.Fields.ENABLED.getPreferredName(), enabled)
.setUpsert(User.Fields.PASSWORD.getPreferredName(), String.valueOf(ReservedRealm.DEFAULT_PASSWORD_HASH),
.setDoc(Requests.INDEX_CONTENT_TYPE, User.Fields.ENABLED.getPreferredName(), enabled)
.setUpsert(XContentType.JSON,
User.Fields.PASSWORD.getPreferredName(), String.valueOf(ReservedRealm.DEFAULT_PASSWORD_HASH),
User.Fields.ENABLED.getPreferredName(), enabled)
.setRefreshPolicy(refreshPolicy)
.execute(new ActionListener<UpdateResponse>() {

View File

@ -22,6 +22,7 @@ import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.xpack.common.xcontent.XContentUtils;
import org.elasticsearch.xpack.security.support.MetadataUtils;
@ -199,10 +200,11 @@ public class RoleDescriptor implements ToXContentObject {
}
}
public static RoleDescriptor parse(String name, BytesReference source, boolean allow2xFormat) throws IOException {
public static RoleDescriptor parse(String name, BytesReference source, boolean allow2xFormat, XContentType xContentType)
throws IOException {
assert name != null;
// EMPTY is safe here because we never use namedObject
try (XContentParser parser = XContentHelper.createParser(NamedXContentRegistry.EMPTY, source)) {
try (XContentParser parser = xContentType.xContent().createParser(NamedXContentRegistry.EMPTY, source)) {
return parse(name, parser, allow2xFormat);
}
}

View File

@ -33,6 +33,7 @@ import org.elasticsearch.common.settings.Setting.Property;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.gateway.GatewayService;
import org.elasticsearch.index.IndexNotFoundException;
import org.elasticsearch.index.get.GetResult;
@ -457,7 +458,7 @@ public class NativeRolesStore extends AbstractComponent implements ClusterStateL
try {
// we pass true as last parameter because we do not want to reject permissions if the field permissions
// are given in 2.x syntax
RoleDescriptor roleDescriptor = RoleDescriptor.parse(name, sourceBytes, true);
RoleDescriptor roleDescriptor = RoleDescriptor.parse(name, sourceBytes, true, XContentType.JSON);
if (licenseState.isDocumentAndFieldLevelSecurityAllowed()) {
return roleDescriptor;
} else {

View File

@ -9,6 +9,7 @@ import org.elasticsearch.action.ActionFuture;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.client.ElasticsearchClient;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.xpack.security.action.realm.ClearRealmCacheAction;
import org.elasticsearch.xpack.security.action.realm.ClearRealmCacheRequest;
import org.elasticsearch.xpack.security.action.realm.ClearRealmCacheRequestBuilder;
@ -143,8 +144,8 @@ public class SecurityClient {
client.execute(DeleteUserAction.INSTANCE, request, listener);
}
public PutUserRequestBuilder preparePutUser(String username, BytesReference source) throws IOException {
return new PutUserRequestBuilder(client).source(username, source);
public PutUserRequestBuilder preparePutUser(String username, BytesReference source, XContentType xContentType) throws IOException {
return new PutUserRequestBuilder(client).source(username, source, xContentType);
}
public PutUserRequestBuilder preparePutUser(String username, char[] password, String... roles) {
@ -159,8 +160,9 @@ public class SecurityClient {
return new ChangePasswordRequestBuilder(client).username(username).password(password);
}
public ChangePasswordRequestBuilder prepareChangePassword(String username, BytesReference source) throws IOException {
return new ChangePasswordRequestBuilder(client).username(username).source(source);
public ChangePasswordRequestBuilder prepareChangePassword(String username, BytesReference source, XContentType xContentType)
throws IOException {
return new ChangePasswordRequestBuilder(client).username(username).source(source, xContentType);
}
public void changePassword(ChangePasswordRequest request, ActionListener<ChangePasswordResponse> listener) {
@ -197,8 +199,8 @@ public class SecurityClient {
return new PutRoleRequestBuilder(client).name(name);
}
public PutRoleRequestBuilder preparePutRole(String name, BytesReference source) throws IOException {
return new PutRoleRequestBuilder(client).source(name, source);
public PutRoleRequestBuilder preparePutRole(String name, BytesReference source, XContentType xContentType) throws IOException {
return new PutRoleRequestBuilder(client).source(name, source, xContentType);
}
public void putRole(PutRoleRequest request, ActionListener<PutRoleResponse> listener) {

View File

@ -17,7 +17,6 @@ import org.elasticsearch.rest.RestRequest;
import java.io.IOException;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
/**
@ -32,7 +31,7 @@ public interface RestRequestFilter {
default RestRequest getFilteredRequest(RestRequest restRequest) throws IOException {
Set<String> fields = getFilteredFields();
if (restRequest.hasContent() && fields.isEmpty() == false) {
return new RestRequest(restRequest.getXContentRegistry(), restRequest.params(), restRequest.path()) {
return new RestRequest(restRequest.getXContentRegistry(), restRequest.params(), restRequest.path(), restRequest.getHeaders()) {
private BytesReference filteredBytes = null;
@ -67,16 +66,6 @@ public interface RestRequestFilter {
}
return filteredBytes;
}
@Override
public String header(String name) {
return restRequest.header(name);
}
@Override
public Iterable<Entry<String, String>> headers() {
return restRequest.headers();
}
};
} else {
return restRequest;

View File

@ -46,8 +46,9 @@ public class RestPutRoleAction extends BaseRestHandler {
@Override
public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException {
PutRoleRequestBuilder requestBuilder = new SecurityClient(client).preparePutRole(request.param("name"), request.content());
requestBuilder.setRefreshPolicy(request.param("refresh"));
PutRoleRequestBuilder requestBuilder = new SecurityClient(client)
.preparePutRole(request.param("name"), request.content(), request.getXContentType())
.setRefreshPolicy(request.param("refresh"));
return channel -> requestBuilder.execute(new RestBuilderListener<PutRoleResponse>(channel) {
@Override
public RestResponse buildResponse(PutRoleResponse putRoleResponse, XContentBuilder builder) throws Exception {

View File

@ -53,7 +53,8 @@ public class RestChangePasswordAction extends BaseRestHandler implements RestReq
final String refresh = request.param("refresh");
return channel ->
new SecurityClient(client).prepareChangePassword(username, request.content())
new SecurityClient(client)
.prepareChangePassword(username, request.content(), request.getXContentType())
.setRefreshPolicy(refresh)
.execute(new RestBuilderListener<ChangePasswordResponse>(channel) {
@Override

View File

@ -51,8 +51,10 @@ public class RestPutUserAction extends BaseRestHandler implements RestRequestFil
@Override
public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException {
PutUserRequestBuilder requestBuilder = new SecurityClient(client).preparePutUser(request.param("username"), request.content());
requestBuilder.setRefreshPolicy(request.param("refresh"));
PutUserRequestBuilder requestBuilder = new SecurityClient(client)
.preparePutUser(request.param("username"), request.content(), request.getXContentType())
.setRefreshPolicy(request.param("refresh"));
return channel -> requestBuilder.execute(new RestBuilderListener<PutUserResponse>(channel) {
@Override
public RestResponse buildResponse(PutUserResponse putUserResponse, XContentBuilder builder) throws Exception {

View File

@ -11,10 +11,12 @@ import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.NotXContentException;
import org.elasticsearch.common.io.Streams;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentType;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import java.util.regex.Pattern;
/**
@ -59,7 +61,7 @@ public class TemplateUtils {
}
try {
XContentHelper.convertToMap(source, false).v2();
XContentHelper.convertToMap(source, false, XContentType.JSON).v2();
} catch (NotXContentException e) {
throw new ElasticsearchParseException("Template must not be empty");
} catch (Exception e) {

View File

@ -18,6 +18,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.sort.SortBuilders;
@ -136,7 +137,7 @@ public class WatcherService extends AbstractComponent {
for (SearchHit hit : response.getHits()) {
String id = hit.getId();
try {
Watch watch = parser.parse(id, true, hit.getSourceRef());
Watch watch = parser.parse(id, true, hit.getSourceRef(), XContentType.JSON);
watch.version(hit.version());
watches.add(watch);
} catch (Exception e) {

View File

@ -17,6 +17,7 @@ import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexNotFoundException;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.xpack.common.stats.Counters;
@ -199,7 +200,8 @@ public final class ExecutionService extends AbstractComponent {
logger.warn("unable to find watch [{}] in watch index, perhaps it has been deleted", event.jobName());
} else {
try {
Watch watch = parser.parseWithSecrets(response.getId(), true, response.getSourceAsBytesRef(), now);
Watch watch =
parser.parseWithSecrets(response.getId(), true, response.getSourceAsBytesRef(), now, XContentType.JSON);
TriggeredExecutionContext ctx = new TriggeredExecutionContext(watch, now, event, defaultThrottlePeriod);
contexts.add(ctx);
triggeredWatches.add(new TriggeredWatch(ctx.id(), event));
@ -239,7 +241,7 @@ public final class ExecutionService extends AbstractComponent {
logger.warn("unable to find watch [{}] in watch index, perhaps it has been deleted", event.jobName());
continue;
}
Watch watch = parser.parseWithSecrets(response.getId(), true, response.getSourceAsBytesRef(), now);
Watch watch = parser.parseWithSecrets(response.getId(), true, response.getSourceAsBytesRef(), now, XContentType.JSON);
TriggeredExecutionContext ctx = new TriggeredExecutionContext(watch, now, event, defaultThrottlePeriod);
contexts.add(ctx);
triggeredWatches.add(new TriggeredWatch(ctx.id(), event));
@ -438,7 +440,7 @@ public final class ExecutionService extends AbstractComponent {
triggeredWatchStore.delete(triggeredWatch.id());
} else {
DateTime now = new DateTime(clock.millis(), UTC);
Watch watch = parser.parseWithSecrets(response.getId(), true, response.getSourceAsBytesRef(), now);
Watch watch = parser.parseWithSecrets(response.getId(), true, response.getSourceAsBytesRef(), now, XContentType.JSON);
TriggeredExecutionContext ctx =
new StartupExecutionContext(watch, now, triggeredWatch.triggerEvent(), defaultThrottlePeriod);
executeAsync(ctx, triggeredWatch);

View File

@ -74,7 +74,7 @@ public class RestExecuteWatchAction extends WatcherRestHandler implements RestRe
builder.setId(request.param("id"));
builder.setDebug(WatcherParams.debug(request));
if (request.content() == null || request.content().length() == 0) {
if (request.hasContent() == false) {
return builder.request();
}
@ -106,7 +106,7 @@ public class RestExecuteWatchAction extends WatcherRestHandler implements RestRe
} else if (Field.WATCH.match(currentFieldName)) {
XContentBuilder watcherSource = XContentBuilder.builder(parser.contentType().xContent());
XContentHelper.copyCurrentStructure(watcherSource.generator(), parser);
builder.setWatchSource(watcherSource.bytes());
builder.setWatchSource(watcherSource.bytes(), parser.contentType());
} else if (Field.ACTION_MODES.match(currentFieldName)) {
while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) {
if (token == XContentParser.Token.FIELD_NAME) {

View File

@ -43,7 +43,8 @@ public class RestPutWatchAction extends WatcherRestHandler implements RestReques
@Override
protected RestChannelConsumer doPrepareRequest(final RestRequest request, WatcherClient client) throws IOException {
PutWatchRequest putWatchRequest = new PutWatchRequest(request.param("id"), request.content());
PutWatchRequest putWatchRequest =
new PutWatchRequest(request.param("id"), request.content(), request.getXContentType());
putWatchRequest.masterNodeTimeout(request.paramAsTime("master_timeout", putWatchRequest.masterNodeTimeout()));
putWatchRequest.setActive(request.paramAsBoolean("active", putWatchRequest.isActive()));
return channel -> client.putWatch(putWatchRequest, new RestBuilderListener<PutWatchResponse>(channel) {

View File

@ -19,6 +19,7 @@ import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.gateway.GatewayService;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.xpack.security.InternalClient;
@ -169,7 +170,7 @@ public class WatcherIndexTemplateRegistry extends AbstractComponent implements C
final byte[] template = TemplateUtils.loadTemplate("/" + config.getFileName()+ ".json", INDEX_TEMPLATE_VERSION,
Pattern.quote("${xpack.watcher.template.version}")).getBytes(StandardCharsets.UTF_8);
PutIndexTemplateRequest request = new PutIndexTemplateRequest(config.getTemplateName()).source(template);
PutIndexTemplateRequest request = new PutIndexTemplateRequest(config.getTemplateName()).source(template, XContentType.JSON);
request.masterNodeTimeout(TimeValue.timeValueMinutes(1));
Settings customSettings = customIndexSettings.get(config.getSetting().getKey());
if (customSettings != null && customSettings.names().size() > 0) {

View File

@ -24,7 +24,7 @@ public final class WatcherUtils {
}
public static Map<String, Object> responseToData(ToXContentObject response) throws IOException {
return XContentHelper.convertToMap(XContentHelper.toXContent(response, XContentType.JSON, false), false).v2();
return XContentHelper.convertToMap(XContentHelper.toXContent(response, XContentType.JSON, false), false, XContentType.JSON).v2();
}
public static Map<String, Object> flattenModel(Map<String, Object> map) {

View File

@ -19,6 +19,7 @@ import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.license.XPackLicenseState;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
@ -74,7 +75,7 @@ public class TransportAckWatchAction extends WatcherTransportAction<AckWatchRequ
listener.onFailure(new ResourceNotFoundException("Watch with id [{}] does not exit", request.getWatchId()));
} else {
DateTime now = new DateTime(clock.millis(), UTC);
Watch watch = parser.parseWithSecrets(request.getWatchId(), true, response.getSourceAsBytesRef(), now);
Watch watch = parser.parseWithSecrets(request.getWatchId(), true, response.getSourceAsBytesRef(), now, XContentType.JSON);
watch.version(response.getVersion());
watch.status().version(response.getVersion());
String[] actionIds = request.getActionIds();

View File

@ -18,6 +18,7 @@ import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.license.XPackLicenseState;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
@ -86,7 +87,8 @@ public class TransportActivateWatchAction extends WatcherTransportAction<Activat
client.update(updateRequest, ActionListener.wrap(updateResponse -> {
client.getWatch(request.getWatchId(), ActionListener.wrap(getResponse -> {
if (getResponse.isExists()) {
Watch watch = parser.parseWithSecrets(request.getWatchId(), true, getResponse.getSourceAsBytesRef(), now);
Watch watch = parser.parseWithSecrets(request.getWatchId(), true, getResponse.getSourceAsBytesRef(), now,
XContentType.JSON);
watch.version(getResponse.getVersion());
watch.status().version(getResponse.getVersion());

View File

@ -5,6 +5,7 @@
*/
package org.elasticsearch.xpack.watcher.transport.actions.execute;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.ValidateActions;
import org.elasticsearch.action.support.master.MasterNodeReadRequest;
@ -12,6 +13,7 @@ import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.xpack.watcher.client.WatchSourceBuilder;
import org.elasticsearch.xpack.watcher.execution.ActionExecutionMode;
@ -37,6 +39,7 @@ public class ExecuteWatchRequest extends MasterNodeReadRequest<ExecuteWatchReque
@Nullable private Map<String, Object> alternativeInput = null;
private Map<String, ActionExecutionMode> actionModes = new HashMap<>();
private BytesReference watchSource;
private XContentType xContentType = XContentType.JSON;
private boolean debug = false;
@ -134,11 +137,26 @@ public class ExecuteWatchRequest extends MasterNodeReadRequest<ExecuteWatchReque
return watchSource;
}
public XContentType getXContentType() {
return xContentType;
}
/**
* @param watchSource instead of using an existing watch use this non persisted watch
* @deprecated use {@link #setWatchSource(BytesReference, XContentType)}
*/
@Deprecated
public void setWatchSource(BytesReference watchSource) {
this.watchSource = watchSource;
this.xContentType = XContentFactory.xContentType(watchSource);
}
/**
* @param watchSource instead of using an existing watch use this non persisted watch
*/
public void setWatchSource(BytesReference watchSource) {
public void setWatchSource(BytesReference watchSource, XContentType xContentType) {
this.watchSource = watchSource;
this.xContentType = xContentType;
}
/**
@ -146,6 +164,7 @@ public class ExecuteWatchRequest extends MasterNodeReadRequest<ExecuteWatchReque
*/
public void setWatchSource(WatchSourceBuilder watchSource) {
this.watchSource = watchSource.buildAsBytes(XContentType.JSON);
this.xContentType = XContentType.JSON;
}
/**
@ -231,6 +250,11 @@ public class ExecuteWatchRequest extends MasterNodeReadRequest<ExecuteWatchReque
}
if (in.readBoolean()) {
watchSource = in.readBytesReference();
if (in.getVersion().after(Version.V_5_3_0_UNRELEASED)) { // TODO update to onOrAfter after backporting
xContentType = XContentType.readFrom(in);
} else {
xContentType = XContentFactory.xContentType(watchSource);
}
}
debug = in.readBoolean();
}
@ -258,6 +282,9 @@ public class ExecuteWatchRequest extends MasterNodeReadRequest<ExecuteWatchReque
out.writeBoolean(watchSource != null);
if (watchSource != null) {
out.writeBytesReference(watchSource);
if (out.getVersion().after(Version.V_5_3_0_UNRELEASED)) { // TODO update to onOrAfter after backporting
xContentType.writeTo(out);
}
}
out.writeBoolean(debug);
}

View File

@ -8,6 +8,7 @@ package org.elasticsearch.xpack.watcher.transport.actions.execute;
import org.elasticsearch.action.support.master.MasterNodeOperationRequestBuilder;
import org.elasticsearch.client.ElasticsearchClient;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.xpack.watcher.client.WatchSourceBuilder;
import org.elasticsearch.xpack.watcher.execution.ActionExecutionMode;
import org.elasticsearch.xpack.watcher.trigger.TriggerEvent;
@ -79,12 +80,22 @@ public class ExecuteWatchRequestBuilder extends MasterNodeOperationRequestBuilde
/**
* @param watchSource instead of using an existing watch use this non persisted watch
* @deprecated use {@link #setWatchSource(BytesReference, XContentType)}
*/
@Deprecated
public ExecuteWatchRequestBuilder setWatchSource(BytesReference watchSource) {
request.setWatchSource(watchSource);
return this;
}
/**
* @param watchSource instead of using an existing watch use this non persisted watch
*/
public ExecuteWatchRequestBuilder setWatchSource(BytesReference watchSource, XContentType xContentType) {
request.setWatchSource(watchSource, xContentType);
return this;
}
/**
* @param watchSource instead of using an existing watch use this non persisted watch
*/

View File

@ -89,7 +89,7 @@ public class TransportExecuteWatchAction extends WatcherTransportAction<ExecuteW
try {
// should be executed async in the future
GetResponse getResponse = client.getWatch(request.getId());
Watch watch = watchParser.parse(request.getId(), true, getResponse.getSourceAsBytesRef());
Watch watch = watchParser.parse(request.getId(), true, getResponse.getSourceAsBytesRef(), XContentType.JSON);
ExecuteWatchResponse executeWatchResponse = executeWatch(request, watch, true);
listener.onResponse(executeWatchResponse);
} catch (IOException e) {
@ -98,7 +98,8 @@ public class TransportExecuteWatchAction extends WatcherTransportAction<ExecuteW
} else if (request.getWatchSource() != null) {
try {
assert !request.isRecordExecution();
Watch watch = watchParser.parse(ExecuteWatchRequest.INLINE_WATCH_ID, true, request.getWatchSource());
Watch watch =
watchParser.parse(ExecuteWatchRequest.INLINE_WATCH_ID, true, request.getWatchSource(), request.getXContentType());
ExecuteWatchResponse response = executeWatch(request, watch, false);
listener.onResponse(response);
} catch (Exception e) {

View File

@ -76,7 +76,7 @@ public class TransportGetWatchAction extends WatcherTransportAction<GetWatchRequ
// we don't include the status in the watch source itself, but as a separate top level field, so that
// it indicates the the status is managed by watcher itself.
DateTime now = new DateTime(clock.millis(), UTC);
Watch watch = parser.parseWithSecrets(request.getId(), true, getResponse.getSourceAsBytesRef(), now);
Watch watch = parser.parseWithSecrets(request.getId(), true, getResponse.getSourceAsBytesRef(), now, XContentType.JSON);
watch.toXContent(builder, WatcherParams.builder()
.hideSecrets(true)
.put(Watch.INCLUDE_STATUS_KEY, false)

View File

@ -6,6 +6,7 @@
package org.elasticsearch.xpack.watcher.transport.actions.put;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.ValidateActions;
import org.elasticsearch.action.support.master.MasterNodeRequest;
@ -13,6 +14,7 @@ import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.xpack.watcher.client.WatchSourceBuilder;
import org.elasticsearch.xpack.watcher.watch.Watch;
@ -30,17 +32,24 @@ public class PutWatchRequest extends MasterNodeRequest<PutWatchRequest> {
private String id;
private BytesReference source;
private boolean active = true;
private XContentType xContentType = XContentType.JSON;
public PutWatchRequest() {
}
public PutWatchRequest(String id, WatchSourceBuilder source) {
this(id, source.buildAsBytes(XContentType.JSON));
this(id, source.buildAsBytes(XContentType.JSON), XContentType.JSON);
}
@Deprecated
public PutWatchRequest(String id, BytesReference source) {
this(id, source, source != null ? XContentFactory.xContentType(source) : null);
}
public PutWatchRequest(String id, BytesReference source, XContentType xContentType) {
this.id = id;
this.source = source;
this.xContentType = xContentType;
masterNodeTimeout(DEFAULT_TIMEOUT);
}
@ -69,14 +78,25 @@ public class PutWatchRequest extends MasterNodeRequest<PutWatchRequest> {
* Set the source of the watch
*/
public void setSource(WatchSourceBuilder source) {
setSource(source.buildAsBytes(XContentType.JSON));
setSource(source.buildAsBytes(XContentType.JSON), XContentType.JSON);
}
/**
* Set the source of the watch
* @deprecated use {@link #setSource(BytesReference, XContentType)}
*/
@Deprecated
public void setSource(BytesReference source) {
this.source = source;
this.xContentType = XContentFactory.xContentType(source);
}
/**
* Set the source of the watch
*/
public void setSource(BytesReference source) {
public void setSource(BytesReference source, XContentType xContentType) {
this.source = source;
this.xContentType = xContentType;
}
/**
@ -93,6 +113,13 @@ public class PutWatchRequest extends MasterNodeRequest<PutWatchRequest> {
this.active = active;
}
/**
* Get the content type for the source
*/
public XContentType xContentType() {
return xContentType;
}
@Override
public ActionRequestValidationException validate() {
ActionRequestValidationException validationException = null;
@ -113,6 +140,11 @@ public class PutWatchRequest extends MasterNodeRequest<PutWatchRequest> {
id = in.readString();
source = in.readBytesReference();
active = in.readBoolean();
if (in.getVersion().after(Version.V_5_3_0_UNRELEASED)) { // TODO update to onOrAfter after backporting
xContentType = XContentType.readFrom(in);
} else {
xContentType = XContentFactory.xContentType(source);
}
}
@Override
@ -121,6 +153,9 @@ public class PutWatchRequest extends MasterNodeRequest<PutWatchRequest> {
out.writeString(id);
out.writeBytesReference(source);
out.writeBoolean(active);
if (out.getVersion().after(Version.V_5_3_0_UNRELEASED)) { // TODO update to onOrAfter after backporting
xContentType.writeTo(out);
}
}
}

View File

@ -21,6 +21,7 @@ import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.license.XPackLicenseState;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportService;
@ -73,7 +74,7 @@ public class TransportPutWatchAction extends WatcherTransportAction<PutWatchRequ
try {
DateTime now = new DateTime(clock.millis(), UTC);
Watch watch = parser.parseWithSecrets(request.getId(), false, request.getSource(), now);
Watch watch = parser.parseWithSecrets(request.getId(), false, request.getSource(), now, request.xContentType());
watch.setState(request.isActive(), now);
try (XContentBuilder builder = jsonBuilder()) {
@ -82,7 +83,7 @@ public class TransportPutWatchAction extends WatcherTransportAction<PutWatchRequ
BytesReference bytesReference = builder.bytes();
IndexRequest indexRequest = new IndexRequest(Watch.INDEX).type(Watch.DOC_TYPE).id(request.getId());
indexRequest.source(bytesReference);
indexRequest.source(bytesReference, XContentType.JSON);
client.index(indexRequest, ActionListener.wrap(indexResponse -> {
boolean created = indexResponse.getResult() == DocWriteResponse.Result.CREATED;

View File

@ -17,7 +17,9 @@ import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.xpack.common.secret.Secret;
import org.elasticsearch.xpack.security.crypto.CryptoService;
import org.elasticsearch.xpack.support.clock.HaltedClock;
@ -219,12 +221,13 @@ public class Watch implements TriggerEngine.Job, ToXContentObject {
this.clock = clock;
}
public Watch parse(String name, boolean includeStatus, BytesReference source) throws IOException {
return parse(name, includeStatus, false, source, new DateTime(clock.millis(), UTC));
public Watch parse(String name, boolean includeStatus, BytesReference source, XContentType xContentType) throws IOException {
return parse(name, includeStatus, false, source, new DateTime(clock.millis(), UTC), xContentType);
}
public Watch parse(String name, boolean includeStatus, BytesReference source, DateTime now) throws IOException {
return parse(name, includeStatus, false, source, now);
public Watch parse(String name, boolean includeStatus, BytesReference source, DateTime now,
XContentType xContentType) throws IOException {
return parse(name, includeStatus, false, source, now, xContentType);
}
/**
@ -238,19 +241,21 @@ public class Watch implements TriggerEngine.Job, ToXContentObject {
* of the watch in the system will be use secrets for sensitive data.
*
*/
public Watch parseWithSecrets(String id, boolean includeStatus, BytesReference source, DateTime now) throws IOException {
return parse(id, includeStatus, true, source, now);
public Watch parseWithSecrets(String id, boolean includeStatus, BytesReference source, DateTime now, XContentType xContentType)
throws IOException {
return parse(id, includeStatus, true, source, now, xContentType);
}
private Watch parse(String id, boolean includeStatus, boolean withSecrets, BytesReference source, DateTime now) throws IOException {
private Watch parse(String id, boolean includeStatus, boolean withSecrets, BytesReference source, DateTime now,
XContentType xContentType) throws IOException {
if (logger.isTraceEnabled()) {
logger.trace("parsing watch [{}] ", source.utf8ToString());
}
XContentParser parser = null;
try {
// EMPTY is safe here because we never use namedObject
parser = new WatcherXContentParser(createParser(NamedXContentRegistry.EMPTY, source), new HaltedClock(now),
withSecrets ? cryptoService : null);
parser = new WatcherXContentParser(xContentType.xContent().createParser(NamedXContentRegistry.EMPTY, source),
new HaltedClock(now), withSecrets ? cryptoService : null);
parser.nextToken();
return parse(id, includeStatus, parser);
} catch (IOException ioe) {

View File

@ -16,6 +16,7 @@ import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.client.Response;
import org.elasticsearch.common.network.NetworkModule;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.test.SecurityIntegTestCase;
import org.elasticsearch.test.SecuritySettingsSource;
import org.elasticsearch.xpack.XPackSettings;
@ -41,7 +42,7 @@ public class BulkUpdateTests extends SecurityIntegTestCase {
public void testThatBulkUpdateDoesNotLoseFields() {
assertEquals(DocWriteResponse.Result.CREATED,
client().prepareIndex("index1", "type").setSource("{\"test\": \"test\"}").setId("1").get().getResult());
client().prepareIndex("index1", "type").setSource("{\"test\": \"test\"}", XContentType.JSON).setId("1").get().getResult());
GetResponse getResponse = internalCluster().transportClient().prepareGet("index1", "type", "1").get();
assertEquals("test", getResponse.getSource().get("test"));
@ -51,7 +52,7 @@ public class BulkUpdateTests extends SecurityIntegTestCase {
// update with a new field
assertEquals(DocWriteResponse.Result.UPDATED, internalCluster().transportClient().prepareUpdate("index1", "type", "1")
.setDoc("{\"not test\": \"not test\"}").get().getResult());
.setDoc("{\"not test\": \"not test\"}", XContentType.JSON).get().getResult());
getResponse = internalCluster().transportClient().prepareGet("index1", "type", "1").get();
assertEquals("test", getResponse.getSource().get("test"));
assertEquals("not test", getResponse.getSource().get("not test"));
@ -62,7 +63,7 @@ public class BulkUpdateTests extends SecurityIntegTestCase {
// do it in a bulk
BulkResponse response = internalCluster().transportClient().prepareBulk().add(client().prepareUpdate("index1", "type", "1")
.setDoc("{\"bulk updated\": \"bulk updated\"}")).get();
.setDoc("{\"bulk updated\": \"bulk updated\"}", XContentType.JSON)).get();
assertEquals(DocWriteResponse.Result.UPDATED, response.getItems()[0].getResponse().getResult());
getResponse = internalCluster().transportClient().prepareGet("index1", "type", "1").get();
assertEquals("test", getResponse.getSource().get("test"));

View File

@ -15,6 +15,7 @@ import org.elasticsearch.action.search.MultiSearchResponse;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.update.UpdateResponse;
import org.elasticsearch.client.Client;
import org.elasticsearch.client.Requests;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.query.QueryBuilders;
@ -87,7 +88,7 @@ public class DateMathExpressionIntegTests extends SecurityIntegTestCase {
assertThat(multiSearchResponse.getResponses()[0].getResponse().getHits().getTotalHits(), is(1L));
UpdateResponse updateResponse = client.prepareUpdate(expression, "type", response.getId())
.setDoc("new", "field")
.setDoc(Requests.INDEX_CONTENT_TYPE, "new", "field")
.setRefreshPolicy(refeshOnOperation ? IMMEDIATE : NONE)
.get();
assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult());

View File

@ -17,6 +17,7 @@ import org.elasticsearch.action.termvectors.MultiTermVectorsResponse;
import org.elasticsearch.action.termvectors.TermVectorsRequest;
import org.elasticsearch.action.termvectors.TermVectorsResponse;
import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.client.Requests;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.query.QueryBuilder;
@ -794,7 +795,7 @@ public class DocumentLevelSecurityTests extends SecurityIntegTestCase {
// With document level security enabled the update is not allowed:
try {
client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD)))
.prepareUpdate("test", "type", "1").setDoc("field1", "value2")
.prepareUpdate("test", "type", "1").setDoc(Requests.INDEX_CONTENT_TYPE, "field1", "value2")
.get();
fail("failed, because update request shouldn't be allowed if document level security is enabled");
} catch (ElasticsearchSecurityException e) {
@ -804,7 +805,7 @@ public class DocumentLevelSecurityTests extends SecurityIntegTestCase {
assertThat(client().prepareGet("test", "type", "1").get().getSource().get("field1").toString(), equalTo("value1"));
// With no document level security enabled the update is allowed:
client().prepareUpdate("test", "type", "1").setDoc("field1", "value2")
client().prepareUpdate("test", "type", "1").setDoc(Requests.INDEX_CONTENT_TYPE, "field1", "value2")
.get();
assertThat(client().prepareGet("test", "type", "1").get().getSource().get("field1").toString(), equalTo("value2"));
@ -812,7 +813,7 @@ public class DocumentLevelSecurityTests extends SecurityIntegTestCase {
BulkResponse bulkResponse = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue
("user1", USERS_PASSWD)))
.prepareBulk()
.add(new UpdateRequest("test", "type", "1").doc("field1", "value3"))
.add(new UpdateRequest("test", "type", "1").doc(Requests.INDEX_CONTENT_TYPE, "field1", "value3"))
.get();
assertEquals(1, bulkResponse.getItems().length);
BulkItemResponse bulkItem = bulkResponse.getItems()[0];
@ -826,7 +827,7 @@ public class DocumentLevelSecurityTests extends SecurityIntegTestCase {
assertThat(client().prepareGet("test", "type", "1").get().getSource().get("field1").toString(), equalTo("value2"));
client().prepareBulk()
.add(new UpdateRequest("test", "type", "1").doc("field1", "value3"))
.add(new UpdateRequest("test", "type", "1").doc(Requests.INDEX_CONTENT_TYPE, "field1", "value3"))
.get();
assertThat(client().prepareGet("test", "type", "1").get().getSource().get("field1").toString(), equalTo("value3"));
}

View File

@ -18,8 +18,10 @@ import org.elasticsearch.action.termvectors.MultiTermVectorsResponse;
import org.elasticsearch.action.termvectors.TermVectorsRequest;
import org.elasticsearch.action.termvectors.TermVectorsResponse;
import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.client.Requests;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexModule;
import org.elasticsearch.index.query.QueryBuilders;
import org.elasticsearch.indices.IndicesRequestCache;
@ -1283,7 +1285,7 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase {
ensureGreen();
// index simple data
client().prepareIndex("test", "parent", "p1").setSource("{}").get();
client().prepareIndex("test", "parent", "p1").setSource("{}", XContentType.JSON).get();
client().prepareIndex("test", "child", "c1").setSource("field1", "red").setParent("p1").get();
client().prepareIndex("test", "child", "c2").setSource("field1", "yellow").setParent("p1").get();
refresh();
@ -1317,7 +1319,7 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase {
// With field level security enabled the update is not allowed:
try {
client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD)))
.prepareUpdate("test", "type", "1").setDoc("field2", "value2")
.prepareUpdate("test", "type", "1").setDoc(Requests.INDEX_CONTENT_TYPE, "field2", "value2")
.get();
fail("failed, because update request shouldn't be allowed if field level security is enabled");
} catch (ElasticsearchSecurityException e) {
@ -1327,7 +1329,7 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase {
assertThat(client().prepareGet("test", "type", "1").get().getSource().get("field2").toString(), equalTo("value1"));
// With no field level security enabled the update is allowed:
client().prepareUpdate("test", "type", "1").setDoc("field2", "value2")
client().prepareUpdate("test", "type", "1").setDoc(Requests.INDEX_CONTENT_TYPE, "field2", "value2")
.get();
assertThat(client().prepareGet("test", "type", "1").get().getSource().get("field2").toString(), equalTo("value2"));
@ -1335,7 +1337,7 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase {
BulkResponse bulkResponse = client().filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue
("user1", USERS_PASSWD)))
.prepareBulk()
.add(new UpdateRequest("test", "type", "1").doc("field2", "value3"))
.add(new UpdateRequest("test", "type", "1").doc(Requests.INDEX_CONTENT_TYPE, "field2", "value3"))
.get();
assertEquals(1, bulkResponse.getItems().length);
BulkItemResponse bulkItem = bulkResponse.getItems()[0];
@ -1349,7 +1351,7 @@ public class FieldLevelSecurityTests extends SecurityIntegTestCase {
assertThat(client().prepareGet("test", "type", "1").get().getSource().get("field2").toString(), equalTo("value2"));
client().prepareBulk()
.add(new UpdateRequest("test", "type", "1").doc("field2", "value3"))
.add(new UpdateRequest("test", "type", "1").doc(Requests.INDEX_CONTENT_TYPE, "field2", "value3"))
.get();
assertThat(client().prepareGet("test", "type", "1").get().getSource().get("field2").toString(), equalTo("value3"));
}

View File

@ -12,6 +12,7 @@ import org.elasticsearch.action.search.ClearScrollResponse;
import org.elasticsearch.action.search.MultiSearchRequestBuilder;
import org.elasticsearch.action.search.MultiSearchResponse;
import org.elasticsearch.action.search.SearchPhaseExecutionException;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.xpack.security.Security;
import org.elasticsearch.xpack.security.authc.support.Hasher;
import org.elasticsearch.xpack.security.authc.support.SecuredString;
@ -66,7 +67,8 @@ public class SecurityClearScrollTests extends SecurityIntegTestCase {
public void indexRandomDocuments() {
BulkRequestBuilder bulkRequestBuilder = client().prepareBulk().setRefreshPolicy(IMMEDIATE);
for (int i = 0; i < randomIntBetween(10, 50); i++) {
bulkRequestBuilder.add(client().prepareIndex("index", "type", String.valueOf(i)).setSource("{ \"foo\" : \"bar\" }"));
bulkRequestBuilder.add(client().prepareIndex("index", "type",
String.valueOf(i)).setSource("{ \"foo\" : \"bar\" }", XContentType.JSON));
}
BulkResponse bulkItemResponses = bulkRequestBuilder.get();
assertThat(bulkItemResponses.hasFailures(), is(false));

View File

@ -5,6 +5,7 @@
*/
package org.elasticsearch.license;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
@ -26,7 +27,7 @@ public class LicenseSerializationTests extends ESTestCase {
String issueDate = TestUtils.dateMathString("now", now);
String expiryDate = TestUtils.dateMathString("now+10d/d", now);
String licenseSpecs = TestUtils.generateLicenseSpecString(new TestUtils.LicenseSpec(issueDate, expiryDate));
License generatedLicense = License.fromSource(licenseSpecs.getBytes(StandardCharsets.UTF_8));
License generatedLicense = License.fromSource(new BytesArray(licenseSpecs.getBytes(StandardCharsets.UTF_8)), XContentType.JSON);
assertThat(generatedLicense.issueDate(), equalTo(DateUtils.beginningOfTheDay(issueDate)));
assertThat(generatedLicense.expiryDate(), equalTo(DateUtils.endOfTheDay(expiryDate)));
}
@ -34,7 +35,8 @@ public class LicenseSerializationTests extends ESTestCase {
public void testLicensesFields() throws Exception {
TestUtils.LicenseSpec randomLicenseSpec = TestUtils.generateRandomLicenseSpec(License.VERSION_START);
String licenseSpecsSource = TestUtils.generateLicenseSpecString(randomLicenseSpec);
final License fromSource = License.fromSource(licenseSpecsSource.getBytes(StandardCharsets.UTF_8));
final License fromSource =
License.fromSource(new BytesArray(licenseSpecsSource.getBytes(StandardCharsets.UTF_8)), XContentType.JSON);
TestUtils.assertLicenseSpec(randomLicenseSpec, fromSource);
}
@ -49,7 +51,7 @@ public class LicenseSerializationTests extends ESTestCase {
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
license.toXContent(builder, new ToXContent.MapParams(Collections.singletonMap(License.REST_VIEW_MODE, "true")));
builder.flush();
Map<String, Object> map = XContentHelper.convertToMap(builder.bytes(), false).v2();
Map<String, Object> map = XContentHelper.convertToMap(builder.bytes(), false, builder.contentType()).v2();
// should have an extra status field, human readable issue_data and expiry_date
assertThat(map.get("status"), notNullValue());
@ -59,14 +61,14 @@ public class LicenseSerializationTests extends ESTestCase {
builder = XContentFactory.contentBuilder(XContentType.JSON);
license.toXContent(builder, ToXContent.EMPTY_PARAMS);
builder.flush();
map = XContentHelper.convertToMap(builder.bytes(), false).v2();
map = XContentHelper.convertToMap(builder.bytes(), false, builder.contentType()).v2();
assertThat(map.get("status"), nullValue());
license = TestUtils.generateLicenses(new TestUtils.LicenseSpec(validLicenseIssueDate, validLicenseExpiryDate));
builder = XContentFactory.contentBuilder(XContentType.JSON);
license.toXContent(builder, new ToXContent.MapParams(Collections.singletonMap(License.REST_VIEW_MODE, "true")));
builder.flush();
map = XContentHelper.convertToMap(builder.bytes(), false).v2();
map = XContentHelper.convertToMap(builder.bytes(), false, builder.contentType()).v2();
// should have an extra status field, human readable issue_data and expiry_date
assertThat(map.get("status"), notNullValue());
@ -76,14 +78,14 @@ public class LicenseSerializationTests extends ESTestCase {
builder = XContentFactory.contentBuilder(XContentType.JSON);
license.toXContent(builder, ToXContent.EMPTY_PARAMS);
builder.flush();
map = XContentHelper.convertToMap(builder.bytes(), false).v2();
map = XContentHelper.convertToMap(builder.bytes(), false, builder.contentType()).v2();
assertThat(map.get("status"), nullValue());
license = TestUtils.generateLicenses(new TestUtils.LicenseSpec(invalidLicenseIssueDate, validLicenseExpiryDate));
builder = XContentFactory.contentBuilder(XContentType.JSON);
license.toXContent(builder, new ToXContent.MapParams(Collections.singletonMap(License.REST_VIEW_MODE, "true")));
builder.flush();
map = XContentHelper.convertToMap(builder.bytes(), false).v2();
map = XContentHelper.convertToMap(builder.bytes(), false, builder.contentType()).v2();
// should have an extra status field, human readable issue_data and expiry_date
assertThat(map.get("status"), notNullValue());
@ -93,7 +95,7 @@ public class LicenseSerializationTests extends ESTestCase {
builder = XContentFactory.contentBuilder(XContentType.JSON);
license.toXContent(builder, ToXContent.EMPTY_PARAMS);
builder.flush();
map = XContentHelper.convertToMap(builder.bytes(), false).v2();
map = XContentHelper.convertToMap(builder.bytes(), false, builder.contentType()).v2();
assertThat(map.get("status"), nullValue());
}
}

View File

@ -6,14 +6,17 @@
package org.elasticsearch.license;
import org.elasticsearch.action.ActionFuture;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.node.Node;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESSingleNodeTestCase;
import org.elasticsearch.xpack.XPackPlugin;
import org.elasticsearch.xpack.XPackSettings;
import java.nio.charset.StandardCharsets;
import java.util.Collection;
import java.util.Collections;
import java.util.UUID;
@ -84,7 +87,8 @@ public class LicensesTransportTests extends ESSingleNodeTestCase {
// put license source
PutLicenseRequestBuilder putLicenseRequestBuilder =
new PutLicenseRequestBuilder(client().admin().cluster(), PutLicenseAction.INSTANCE).setLicense(licenseString)
new PutLicenseRequestBuilder(client().admin().cluster(), PutLicenseAction.INSTANCE)
.setLicense(new BytesArray(licenseString.getBytes(StandardCharsets.UTF_8)), XContentType.JSON)
.setAcknowledge(true);
PutLicenseResponse putLicenseResponse = putLicenseRequestBuilder.get();
assertThat(putLicenseResponse.isAcknowledged(), equalTo(true));

View File

@ -36,7 +36,7 @@ public class PutLicenseResponseTests extends ESTestCase {
response.toXContent(contentBuilder, ToXContent.EMPTY_PARAMS);
contentBuilder.endObject();
Map<String, Object> map = XContentHelper.convertToMap(contentBuilder.bytes(), false).v2();
Map<String, Object> map = XContentHelper.convertToMap(contentBuilder.bytes(), false, contentBuilder.contentType()).v2();
assertThat(map.containsKey("acknowledged"), equalTo(true));
boolean actualAcknowledged = (boolean) map.get("acknowledged");
assertThat(actualAcknowledged, equalTo(acknowledged));

View File

@ -7,6 +7,7 @@ package org.elasticsearch.xpack.monitoring;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.license.TribeTransportTestCase;
import org.elasticsearch.xpack.monitoring.action.MonitoringBulkAction;
import org.elasticsearch.xpack.monitoring.action.MonitoringBulkDoc;
@ -40,7 +41,7 @@ public class MonitoringTribeTests extends TribeTransportTestCase {
private static void assertMonitoringTransportActionsWorks(Client client) throws Exception {
MonitoringBulkDoc doc = new MonitoringBulkDoc(randomAsciiOfLength(2), randomAsciiOfLength(2));
doc.setType(randomAsciiOfLength(5));
doc.setSource(new BytesArray("{\"key\" : \"value\"}"));
doc.setSource(new BytesArray("{\"key\" : \"value\"}"), XContentType.JSON);
client.execute(MonitoringBulkAction.INSTANCE, new MonitoringBulkRequest());
}

View File

@ -9,10 +9,12 @@ import org.elasticsearch.Version;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.monitoring.exporter.MonitoringDoc;
import java.io.IOException;
import java.util.Base64;
import java.util.HashMap;
import java.util.Map;
@ -37,7 +39,7 @@ public class MonitoringBulkDocTests extends ESTestCase {
doc.writeTo(output);
StreamInput streamInput = output.bytes().streamInput();
streamInput.setVersion(randomVersion(random()));
streamInput.setVersion(outputVersion);
MonitoringBulkDoc doc2 = new MonitoringBulkDoc(streamInput);
assertThat(doc2.getMonitoringId(), equalTo(doc.getMonitoringId()));
@ -48,6 +50,7 @@ public class MonitoringBulkDocTests extends ESTestCase {
assertThat(doc2.getIndex(), equalTo(doc.getIndex()));
assertThat(doc2.getType(), equalTo(doc.getType()));
assertThat(doc2.getId(), equalTo(doc.getId()));
assertThat(doc2.getXContentType(), equalTo(doc.getXContentType()));
if (doc.getSource() == null) {
assertThat(doc2.getSource(), equalTo(BytesArray.EMPTY));
} else {
@ -56,6 +59,29 @@ public class MonitoringBulkDocTests extends ESTestCase {
}
}
public void testSerializationBwc() throws IOException {
final byte[] data = Base64.getDecoder().decode("AQNtSWQBBTUuMS4yAAAAAQEEdHlwZQECaWQNeyJmb28iOiJiYXIifQAAAAAAAAAA");
final Version version = randomFrom(Version.V_5_0_0, Version.V_5_0_1, Version.V_5_0_2,
Version.V_5_0_3_UNRELEASED, Version.V_5_1_1_UNRELEASED, Version.V_5_1_2_UNRELEASED, Version.V_5_2_0_UNRELEASED);
try (StreamInput in = StreamInput.wrap(data)) {
in.setVersion(version);
MonitoringBulkDoc bulkDoc = new MonitoringBulkDoc(in);
assertEquals(XContentType.JSON, bulkDoc.getXContentType());
assertEquals("mId", bulkDoc.getMonitoringId());
assertEquals("5.1.2", bulkDoc.getMonitoringVersion());
assertEquals(MonitoringIndex.TIMESTAMPED, bulkDoc.getIndex());
assertEquals("{\"foo\":\"bar\"}", bulkDoc.getSource().utf8ToString());
assertEquals("type", bulkDoc.getType());
assertEquals("id", bulkDoc.getId());
try (BytesStreamOutput out = new BytesStreamOutput()) {
out.setVersion(version);
bulkDoc.writeTo(out);
assertArrayEquals(data, out.bytes().toBytesRef().bytes);
}
}
}
private MonitoringBulkDoc newRandomMonitoringBulkDoc() {
MonitoringBulkDoc doc = new MonitoringBulkDoc(randomAsciiOfLength(2), randomAsciiOfLength(2));
if (frequently()) {
@ -64,7 +90,7 @@ public class MonitoringBulkDocTests extends ESTestCase {
}
if (randomBoolean()) {
doc.setTimestamp(System.currentTimeMillis());
doc.setSource(new BytesArray("{\"key\" : \"value\"}"));
doc.setSource(new BytesArray("{\"key\" : \"value\"}"), XContentType.JSON);
}
if (rarely()) {
doc.setIndex(MonitoringIndex.DATA);

View File

@ -53,7 +53,7 @@ public class MonitoringBulkRequestTests extends ESTestCase {
doc.setType("type");
assertValidationErrors(new MonitoringBulkRequest().add(doc), hasItems("source is missing for monitoring document [0]"));
doc.setSource(SOURCE);
doc.setSource(SOURCE, XContentType.JSON);
assertValidationErrors(new MonitoringBulkRequest().add(doc), nullValue());
}
@ -64,30 +64,30 @@ public class MonitoringBulkRequestTests extends ESTestCase {
// Doc0 is complete
MonitoringBulkDoc doc0 = new MonitoringBulkDoc(randomAsciiOfLength(2), randomAsciiOfLength(2));
doc0.setType(randomAsciiOfLength(5));
doc0.setSource(SOURCE);
doc0.setSource(SOURCE, XContentType.JSON);
request.add(doc0);
// Doc1 has no type
MonitoringBulkDoc doc1 = new MonitoringBulkDoc(randomAsciiOfLength(2), randomAsciiOfLength(2));
doc1.setSource(SOURCE);
doc1.setSource(SOURCE, XContentType.JSON);
request.add(doc1);
// Doc2 has no source
MonitoringBulkDoc doc2 = new MonitoringBulkDoc(randomAsciiOfLength(2), randomAsciiOfLength(2));
doc2.setType(randomAsciiOfLength(5));
doc2.setSource(BytesArray.EMPTY);
doc2.setSource(BytesArray.EMPTY, XContentType.JSON);
request.add(doc2);
// Doc3 has no version
MonitoringBulkDoc doc3 = new MonitoringBulkDoc(randomAsciiOfLength(2), null);
doc3.setType(randomAsciiOfLength(5));
doc3.setSource(SOURCE);
doc3.setSource(SOURCE, XContentType.JSON);
request.add(doc3);
// Doc4 has no id
MonitoringBulkDoc doc4 = new MonitoringBulkDoc(null, randomAsciiOfLength(2));
doc4.setType(randomAsciiOfLength(5));
doc4.setSource(SOURCE);
doc4.setSource(SOURCE, XContentType.JSON);
request.add(doc4);
assertValidationErrors(request, hasItems("type is missing for monitoring document [1]",
@ -139,7 +139,7 @@ public class MonitoringBulkRequestTests extends ESTestCase {
String defaultType = rarely() ? randomAsciiOfLength(4) : null;
MonitoringBulkRequest request = new MonitoringBulkRequest();
request.add(content.bytes(), defaultMonitoringId, defaultMonitoringVersion, defaultType);
request.add(content.bytes(), defaultMonitoringId, defaultMonitoringVersion, defaultType, xContentType);
assertThat(request.getDocs(), hasSize(nbDocs));
i = 0;
@ -164,7 +164,7 @@ public class MonitoringBulkRequestTests extends ESTestCase {
for (int i = 0; i < numDocs; i++) {
MonitoringBulkDoc doc = new MonitoringBulkDoc(randomAsciiOfLength(2), randomVersion(random()).toString());
doc.setType(randomFrom("type1", "type2", "type3"));
doc.setSource(SOURCE);
doc.setSource(SOURCE, XContentType.JSON);
if (randomBoolean()) {
doc.setIndex(MonitoringIndex.DATA);
}

View File

@ -8,6 +8,7 @@ package org.elasticsearch.xpack.monitoring.action;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.AbstractRunnable;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.xpack.monitoring.MonitoredSystem;
import org.elasticsearch.xpack.monitoring.exporter.MonitoringTemplateUtils;
@ -43,7 +44,7 @@ public class MonitoringBulkTests extends MonitoringIntegTestCase {
for (int i = 0; i < numDocs; i++) {
MonitoringBulkDoc doc = new MonitoringBulkDoc(MonitoredSystem.KIBANA.getSystem(), MonitoringTemplateUtils.TEMPLATE_VERSION);
doc.setType(randomFrom(types));
doc.setSource(jsonBuilder().startObject().field("num", numDocs).endObject().bytes());
doc.setSource(jsonBuilder().startObject().field("num", numDocs).endObject().bytes(), XContentType.JSON);
requestBuilder.add(doc);
}
@ -96,7 +97,7 @@ public class MonitoringBulkTests extends MonitoringIntegTestCase {
MonitoringBulkDoc doc =
new MonitoringBulkDoc(MonitoredSystem.KIBANA.getSystem(), MonitoringTemplateUtils.TEMPLATE_VERSION);
doc.setType("concurrent");
doc.setSource(jsonBuilder().startObject().field("num", k).endObject().bytes());
doc.setSource(jsonBuilder().startObject().field("num", k).endObject().bytes(), XContentType.JSON);
requestBuilder.add(doc);
}
@ -138,7 +139,7 @@ public class MonitoringBulkTests extends MonitoringIntegTestCase {
doc = new MonitoringBulkDoc(MonitoredSystem.KIBANA.getSystem(), MonitoringTemplateUtils.TEMPLATE_VERSION);
}
doc.setType(randomFrom(types));
doc.setSource(jsonBuilder().startObject().field("num", i).endObject().bytes());
doc.setSource(jsonBuilder().startObject().field("num", i).endObject().bytes(), XContentType.JSON);
requestBuilder.add(doc);
}

View File

@ -20,6 +20,7 @@ import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Setting;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.discovery.DiscoverySettings;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.transport.CapturingTransport;
@ -245,7 +246,7 @@ public class TransportMonitoringBulkActionTests extends ESTestCase {
MonitoringBulkDoc doc = new MonitoringBulkDoc(randomFrom(MonitoredSystem.values()).getSystem(),
randomVersion(random()).toString());
doc.setType(randomFrom("type1", "type2"));
doc.setSource(jsonBuilder().startObject().field("num", i).endObject().bytes());
doc.setSource(jsonBuilder().startObject().field("num", i).endObject().bytes(), XContentType.JSON);
request.add(doc);
}
return request;

View File

@ -337,7 +337,7 @@ public class HttpExporterIT extends MonitoringIntegTestCase {
String indexName = new ResolversRegistry(Settings.EMPTY).getResolver(doc).index(doc);
byte[] bytes = recordedRequest.getBody().getBytes(StandardCharsets.UTF_8);
Map<String, Object> data = XContentHelper.convertToMap(new BytesArray(bytes), false).v2();
Map<String, Object> data = XContentHelper.convertToMap(new BytesArray(bytes), false, XContentType.JSON).v2();
@SuppressWarnings("unchecked")
Map<String, Object> index = (Map<String, Object>) data.get("index");
assertThat(index.get("_index"), equalTo(indexName));
@ -360,7 +360,7 @@ public class HttpExporterIT extends MonitoringIntegTestCase {
recordedRequest = assertBulk(webServer);
bytes = recordedRequest.getBody().getBytes(StandardCharsets.UTF_8);
data = XContentHelper.convertToMap(new BytesArray(bytes), false).v2();
data = XContentHelper.convertToMap(new BytesArray(bytes), false, XContentType.JSON).v2();
@SuppressWarnings("unchecked")
final Map<String, Object> newIndex = (Map<String, Object>) data.get("index");
assertThat(newIndex.get("_index"), equalTo(expectedMonitoringIndex));

View File

@ -217,7 +217,8 @@ public class LocalExporterTemplateTests extends MonitoringIntegTestCase {
private void putPipeline(String name) throws Exception {
waitNoPendingTasksOnAll();
assertAcked(client().admin().cluster().preparePutPipeline(name, Exporter.emptyPipeline(XContentType.JSON).bytes()).get());
assertAcked(client().admin().cluster().preparePutPipeline(name, Exporter.emptyPipeline(XContentType.JSON).bytes(),
XContentType.JSON).get());
}
private void assertTemplateExists(String name) throws Exception {

View File

@ -125,11 +125,12 @@ public abstract class MonitoringIndexNameResolverTestCase<M extends MonitoringDo
@SuppressWarnings("unchecked")
public void testSource() throws IOException {
MonitoringIndexNameResolver resolver = newResolver();
BytesReference source = resolver.source(newMonitoringDoc(), randomFrom(XContentType.values()));
final XContentType xContentType = randomFrom(XContentType.values());
BytesReference source = resolver.source(newMonitoringDoc(), xContentType);
assertNotNull(source);
assertThat(source.length(), greaterThan(0));
assertSource(source, resolver.filters());
assertSource(source, resolver.filters(), xContentType);
}
@SuppressWarnings("unchecked")
@ -150,8 +151,8 @@ public abstract class MonitoringIndexNameResolverTestCase<M extends MonitoringDo
}
}
protected void assertSource(BytesReference source, Set<String> fields) {
Map<String, Object> sourceFields = XContentHelper.convertToMap(source, false).v2();
protected void assertSource(BytesReference source, Set<String> fields, XContentType xContentType) {
Map<String, Object> sourceFields = XContentHelper.convertToMap(source, false, xContentType).v2();
assertNotNull(sourceFields);
String[] commons = new String[]{

View File

@ -32,7 +32,7 @@ public class MonitoringBulkDataResolverTests extends MonitoringIndexNameResolver
protected MonitoringBulkDoc newMonitoringDoc() {
MonitoringBulkDoc doc = new MonitoringBulkDoc(MonitoredSystem.KIBANA.getSystem(), MonitoringTemplateUtils.TEMPLATE_VERSION,
MonitoringIndex.DATA, "kibana", id,
new BytesArray("{\"field1\" : \"value1\"}"));
new BytesArray("{\"field1\" : \"value1\"}"), XContentType.JSON);
if (randomBoolean()) {
doc.setClusterUUID(randomAsciiOfLength(5));
@ -70,6 +70,6 @@ public class MonitoringBulkDataResolverTests extends MonitoringIndexNameResolver
"timestamp",
"source_node",
"kibana",
"kibana.field1"));
"kibana.field1"), XContentType.JSON);
}
}

View File

@ -31,7 +31,7 @@ public class MonitoringBulkTimestampedResolverTests
protected MonitoringBulkDoc newMonitoringDoc() {
MonitoringBulkDoc doc = new MonitoringBulkDoc(MonitoredSystem.KIBANA.getSystem(), MonitoringTemplateUtils.TEMPLATE_VERSION,
MonitoringIndex.TIMESTAMPED, "kibana_stats", null,
new BytesArray("{\"field1\" : \"value1\"}"));
new BytesArray("{\"field1\" : \"value1\"}"), XContentType.JSON);
doc.setTimestamp(1437580442979L);
if (randomBoolean()) {
@ -70,6 +70,6 @@ public class MonitoringBulkTimestampedResolverTests
"timestamp",
"source_node",
"kibana_stats",
"kibana_stats.field1"));
"kibana_stats.field1"), XContentType.JSON);
}
}

View File

@ -85,6 +85,6 @@ public class ClusterInfoResolverTests extends MonitoringIndexNameResolverTestCas
"version",
"license",
"cluster_stats",
"stack_stats.xpack"));
"stack_stats.xpack"), XContentType.JSON);
}
}

View File

@ -64,6 +64,6 @@ public class ClusterStateNodeResolverTests extends
"timestamp",
"source_node",
"state_uuid",
"node.id"));
"node.id"), XContentType.JSON);
}
}

View File

@ -62,6 +62,6 @@ public class ClusterStateResolverTests extends MonitoringIndexNameResolverTestCa
"cluster_uuid",
"timestamp",
"source_node",
"cluster_state"));
"cluster_state"), XContentType.JSON);
}
}

View File

@ -88,7 +88,7 @@ public class ClusterStatsResolverTests extends MonitoringIndexNameResolverTestCa
"cluster_uuid",
"timestamp",
"source_node",
"cluster_stats"));
"cluster_stats"), XContentType.JSON);
}
/**

View File

@ -71,6 +71,6 @@ public class IndexRecoveryResolverTests extends MonitoringIndexNameResolverTestC
"cluster_uuid",
"timestamp",
"source_node",
"index_recovery"));
"index_recovery"), XContentType.JSON);
}
}

View File

@ -72,7 +72,7 @@ public class IndexStatsResolverTests extends MonitoringIndexNameResolverTestCase
"cluster_uuid",
"timestamp",
"source_node",
"index_stats"));
"index_stats"), XContentType.JSON);
}
/**

View File

@ -71,7 +71,8 @@ public class IndicesStatsResolverTests extends MonitoringIndexNameResolverTestCa
assertThat(resolver.type(doc), equalTo(IndicesStatsResolver.TYPE));
assertThat(resolver.id(doc), nullValue());
assertSource(resolver.source(doc, XContentType.JSON), Sets.newHashSet("cluster_uuid", "timestamp", "source_node", "indices_stats"));
assertSource(resolver.source(doc, XContentType.JSON),
Sets.newHashSet("cluster_uuid", "timestamp", "source_node", "indices_stats"), XContentType.JSON);
}
/**

View File

@ -57,6 +57,6 @@ public class DiscoveryNodeResolverTests extends MonitoringIndexNameResolverTestC
"node.id",
"node.name",
"node.transport_address",
"node.attributes"));
"node.attributes"), XContentType.JSON);
}
}

View File

@ -123,7 +123,7 @@ public class NodeStatsResolverTests extends MonitoringIndexNameResolverTestCase<
"cluster_uuid",
"timestamp",
"source_node",
"node_stats"));
"node_stats"), XContentType.JSON);
}
/**

View File

@ -69,7 +69,7 @@ public class ShardsResolverTests extends MonitoringIndexNameResolverTestCase<Sha
"shard.node",
"shard.relocating_node",
"shard.shard",
"shard.index"));
"shard.index"), XContentType.JSON);
final String index = "test-" + randomIntBetween(0, 100);
final int shardId = randomIntBetween(0, 500);
@ -101,7 +101,7 @@ public class ShardsResolverTests extends MonitoringIndexNameResolverTestCase<Sha
"shard.node",
"shard.relocating_node",
"shard.shard",
"shard.index"));
"shard.index"), XContentType.JSON);
}
public void testShardId() {

View File

@ -6,8 +6,10 @@
package org.elasticsearch.xpack.monitoring.security;
import org.elasticsearch.action.ActionRequestBuilder;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.xpack.monitoring.MonitoringSettings;
import org.elasticsearch.xpack.monitoring.test.MonitoringIntegTestCase;
import org.elasticsearch.xpack.security.InternalClient;
@ -44,13 +46,15 @@ public class MonitoringInternalClientTests extends MonitoringIntegTestCase {
assertAccessIsAllowed(internalClient.admin().indices().prepareDelete(MONITORING_INDICES_PREFIX + "*"));
assertAccessIsAllowed(internalClient.admin().indices().prepareCreate(MONITORING_INDICES_PREFIX + "test"));
assertAccessIsAllowed(internalClient.admin().indices().preparePutTemplate("foo").setSource(randomTemplateSource()));
assertAccessIsAllowed(internalClient.admin().indices().preparePutTemplate("foo")
.setSource(new BytesArray(randomTemplateSource()), XContentType.JSON));
assertAccessIsAllowed(internalClient.admin().indices().prepareGetTemplates("foo"));
}
public void testAllowAllAccess() {
InternalClient internalClient = internalCluster().getInstance(InternalClient.class);
assertAcked(internalClient.admin().indices().preparePutTemplate("foo").setSource(randomTemplateSource()).get());
assertAcked(internalClient.admin().indices().preparePutTemplate("foo")
.setSource(new BytesArray(randomTemplateSource()), XContentType.JSON).get());
assertAccessIsAllowed(internalClient.admin().indices().prepareDeleteTemplate("foo"));
assertAccessIsAllowed(internalClient.admin().cluster().prepareGetRepositories());

View File

@ -33,6 +33,7 @@ import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.threadpool.TestThreadPool;
import org.elasticsearch.threadpool.ThreadPool;
@ -330,7 +331,7 @@ public class SecurityTemplateServiceTests extends ESTestCase {
String template = TemplateUtils.loadTemplate(templateString, Version.CURRENT.toString()
, SECURITY_INDEX_TEMPLATE_VERSION_PATTERN);
PutIndexTemplateRequest request = new PutIndexTemplateRequest();
request.source(template);
request.source(template, XContentType.JSON);
IndexMetaData.Builder indexMetaData = IndexMetaData.builder(SECURITY_INDEX_NAME);
indexMetaData.settings(Settings.builder()
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
@ -360,7 +361,7 @@ public class SecurityTemplateServiceTests extends ESTestCase {
String template = TemplateUtils.loadTemplate(templateString, Version.CURRENT.toString()
, SECURITY_INDEX_TEMPLATE_VERSION_PATTERN);
PutIndexTemplateRequest request = new PutIndexTemplateRequest();
request.source(template);
request.source(template, XContentType.JSON);
IndexTemplateMetaData.Builder templateBuilder = IndexTemplateMetaData.builder(SECURITY_TEMPLATE_NAME);
for (Map.Entry<String, String> entry : request.mappings().entrySet()) {
templateBuilder.putMapping(entry.getKey(), entry.getValue());

View File

@ -8,6 +8,7 @@ package org.elasticsearch.xpack.security.action.role;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.client.NoOpClient;
@ -24,8 +25,8 @@ public class PutRoleBuilderTests extends ESTestCase {
byte[] bytes = Files.readAllBytes(path);
String roleString = new String(bytes, Charset.defaultCharset());
try (Client client = new NoOpClient("testBWCFieldPermissions")) {
ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> new PutRoleRequestBuilder(client)
.source("role1", new BytesArray(roleString)));
ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class,
() -> new PutRoleRequestBuilder(client).source("role1", new BytesArray(roleString), XContentType.JSON));
assertThat(e.getDetailedMessage(), containsString("\"fields\": [...]] format has changed for field permissions in role " +
"[role1], use [\"field_security\": {\"grant\":[...],\"except\":[...]}] instead"));
}

View File

@ -8,6 +8,7 @@ package org.elasticsearch.xpack.security.action.user;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.test.ESTestCase;
import java.io.IOException;
@ -32,7 +33,7 @@ public class PutUserRequestBuilderTests extends ESTestCase {
"}";
PutUserRequestBuilder builder = new PutUserRequestBuilder(mock(Client.class));
builder.source("kibana4", new BytesArray(json.getBytes(StandardCharsets.UTF_8)));
builder.source("kibana4", new BytesArray(json.getBytes(StandardCharsets.UTF_8)), XContentType.JSON);
PutUserRequest request = builder.request();
assertThat(request.username(), is("kibana4"));
@ -52,7 +53,7 @@ public class PutUserRequestBuilderTests extends ESTestCase {
"}";
PutUserRequestBuilder builder = new PutUserRequestBuilder(mock(Client.class));
builder.source("kibana4", new BytesArray(json.getBytes(StandardCharsets.UTF_8)));
builder.source("kibana4", new BytesArray(json.getBytes(StandardCharsets.UTF_8)), XContentType.JSON);
PutUserRequest request = builder.request();
assertThat(request.username(), is("kibana4"));
@ -73,7 +74,7 @@ public class PutUserRequestBuilderTests extends ESTestCase {
"}";
PutUserRequestBuilder builder = new PutUserRequestBuilder(mock(Client.class));
builder.source("kibana4", new BytesArray(json.getBytes(StandardCharsets.UTF_8)));
builder.source("kibana4", new BytesArray(json.getBytes(StandardCharsets.UTF_8)), XContentType.JSON);
PutUserRequest request = builder.request();
assertThat(request.username(), is("kibana4"));
@ -95,7 +96,7 @@ public class PutUserRequestBuilderTests extends ESTestCase {
PutUserRequestBuilder builder = new PutUserRequestBuilder(mock(Client.class));
ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class,
() -> builder.source("kibana4", new BytesArray(json.getBytes(StandardCharsets.UTF_8))));
() -> builder.source("kibana4", new BytesArray(json.getBytes(StandardCharsets.UTF_8)), XContentType.JSON));
assertThat(e.getMessage(), containsString("expected field [full_name] to be of type string"));
}
@ -111,7 +112,7 @@ public class PutUserRequestBuilderTests extends ESTestCase {
PutUserRequestBuilder builder = new PutUserRequestBuilder(mock(Client.class));
ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class,
() -> builder.source("kibana4", new BytesArray(json.getBytes(StandardCharsets.UTF_8))));
() -> builder.source("kibana4", new BytesArray(json.getBytes(StandardCharsets.UTF_8)), XContentType.JSON));
assertThat(e.getMessage(), containsString("expected field [email] to be of type string"));
}
@ -127,7 +128,8 @@ public class PutUserRequestBuilderTests extends ESTestCase {
"}";
PutUserRequestBuilder builder = new PutUserRequestBuilder(mock(Client.class));
PutUserRequest request = builder.source("kibana4", new BytesArray(json.getBytes(StandardCharsets.UTF_8))).request();
PutUserRequest request =
builder.source("kibana4", new BytesArray(json.getBytes(StandardCharsets.UTF_8)), XContentType.JSON).request();
assertFalse(request.enabled());
}
}

View File

@ -14,12 +14,17 @@ import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.network.NetworkAddress;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.transport.TransportAddress;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.rest.RestRequest;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.rest.FakeRestRequest;
import org.elasticsearch.test.rest.FakeRestRequest.Builder;
import org.elasticsearch.transport.TransportMessage;
import org.elasticsearch.xpack.security.audit.AuditUtil;
import org.elasticsearch.xpack.security.authc.AuthenticationToken;
@ -33,6 +38,7 @@ import org.junit.Before;
import java.io.IOException;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
@ -147,11 +153,10 @@ public class LoggingAuditTrailTests extends ESTestCase {
}
public void testAnonymousAccessDeniedRest() throws Exception {
RestRequest request = mock(RestRequest.class);
InetAddress address = forge("_hostname", randomBoolean() ? "127.0.0.1" : "::1");
when(request.getRemoteAddress()).thenReturn(new InetSocketAddress(address, 9200));
when(request.uri()).thenReturn("_uri");
String expectedMessage = prepareRestContent(request);
Tuple<RestContent, RestRequest> tuple = prepareRestContent("_uri", new InetSocketAddress(address, 9200));
String expectedMessage = tuple.v1().expectedMessage();
RestRequest request = tuple.v2();
Logger logger = CapturingLogger.newCapturingLogger(Level.INFO);
LoggingAuditTrail auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext);
auditTrail.anonymousAccessDenied(request);
@ -217,11 +222,10 @@ public class LoggingAuditTrailTests extends ESTestCase {
}
public void testAuthenticationFailedRest() throws Exception {
RestRequest request = mock(RestRequest.class);
InetAddress address = forge("_hostname", randomBoolean() ? "127.0.0.1" : "::1");
when(request.getRemoteAddress()).thenReturn(new InetSocketAddress(address, 9200));
when(request.uri()).thenReturn("_uri");
String expectedMessage = prepareRestContent(request);
Tuple<RestContent, RestRequest> tuple = prepareRestContent("_uri", new InetSocketAddress(address, 9200));
String expectedMessage = tuple.v1().expectedMessage();
RestRequest request = tuple.v2();
Logger logger = CapturingLogger.newCapturingLogger(Level.INFO);
LoggingAuditTrail auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext);
auditTrail.authenticationFailed(new MockToken(), request);
@ -243,11 +247,10 @@ public class LoggingAuditTrailTests extends ESTestCase {
}
public void testAuthenticationFailedRestNoToken() throws Exception {
RestRequest request = mock(RestRequest.class);
InetAddress address = forge("_hostname", randomBoolean() ? "127.0.0.1" : "::1");
when(request.getRemoteAddress()).thenReturn(new InetSocketAddress(address, 9200));
when(request.uri()).thenReturn("_uri");
String expectedMessage = prepareRestContent(request);
Tuple<RestContent, RestRequest> tuple = prepareRestContent("_uri", new InetSocketAddress(address, 9200));
String expectedMessage = tuple.v1().expectedMessage();
RestRequest request = tuple.v2();
Logger logger = CapturingLogger.newCapturingLogger(Level.INFO);
LoggingAuditTrail auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext);
auditTrail.authenticationFailed(request);
@ -291,11 +294,10 @@ public class LoggingAuditTrailTests extends ESTestCase {
}
public void testAuthenticationFailedRealmRest() throws Exception {
RestRequest request = mock(RestRequest.class);
InetAddress address = forge("_hostname", randomBoolean() ? "127.0.0.1" : "::1");
when(request.getRemoteAddress()).thenReturn(new InetSocketAddress(address, 9200));
when(request.uri()).thenReturn("_uri");
String expectedMessage = prepareRestContent(request);
Tuple<RestContent, RestRequest> tuple = prepareRestContent("_uri", new InetSocketAddress(address, 9200));
String expectedMessage = tuple.v1().expectedMessage();
RestRequest request = tuple.v2();
Logger logger = CapturingLogger.newCapturingLogger(Level.INFO);
LoggingAuditTrail auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext);
auditTrail.authenticationFailed("_realm", new MockToken(), request);
@ -432,11 +434,10 @@ public class LoggingAuditTrailTests extends ESTestCase {
}
public void testTamperedRequestRest() throws Exception {
RestRequest request = mock(RestRequest.class);
InetAddress address = forge("_hostname", randomBoolean() ? "127.0.0.1" : "::1");
when(request.getRemoteAddress()).thenReturn(new InetSocketAddress(address, 9200));
when(request.uri()).thenReturn("_uri");
String expectedMessage = prepareRestContent(request);
Tuple<RestContent, RestRequest> tuple = prepareRestContent("_uri", new InetSocketAddress(address, 9200));
String expectedMessage = tuple.v1().expectedMessage();
RestRequest request = tuple.v2();
Logger logger = CapturingLogger.newCapturingLogger(Level.INFO);
LoggingAuditTrail auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext);
auditTrail.tamperedRequest(request);
@ -598,14 +599,12 @@ public class LoggingAuditTrailTests extends ESTestCase {
}
public void testAuthenticationSuccessRest() throws Exception {
RestRequest request = mock(RestRequest.class);
InetAddress address = forge("_hostname", randomBoolean() ? "127.0.0.1" : "::1");
when(request.getRemoteAddress()).thenReturn(new InetSocketAddress(address, 9200));
when(request.uri()).thenReturn("_uri");
Map<String, String> params = new HashMap<>();
params.put("foo", "bar");
when(request.params()).thenReturn(params);
String expectedMessage = prepareRestContent(request);
InetAddress address = forge("_hostname", randomBoolean() ? "127.0.0.1" : "::1");
Tuple<RestContent, RestRequest> tuple = prepareRestContent("_uri", new InetSocketAddress(address, 9200), params);
String expectedMessage = tuple.v1().expectedMessage();
RestRequest request = tuple.v2();
boolean runAs = randomBoolean();
User user;
if (runAs) {
@ -686,13 +685,20 @@ public class LoggingAuditTrailTests extends ESTestCase {
assertThat(CapturingLogger.isEmpty(logger.getName()), is(true));
}
private String prepareRestContent(RestRequest mock) {
private Tuple<RestContent, RestRequest> prepareRestContent(String uri, InetSocketAddress remoteAddress) {
return prepareRestContent(uri, remoteAddress, Collections.emptyMap());
}
private Tuple<RestContent, RestRequest> prepareRestContent(String uri, InetSocketAddress remoteAddress, Map<String, String> params) {
RestContent content = randomFrom(RestContent.values());
when(mock.hasContent()).thenReturn(content.hasContent());
FakeRestRequest.Builder builder = new Builder(NamedXContentRegistry.EMPTY);
if (content.hasContent()) {
when(mock.content()).thenReturn(content.content());
builder.withContent(content.content(), XContentType.JSON);
}
return content.expectedMessage();
builder.withPath(uri);
builder.withRemoteAddress(remoteAddress);
builder.withParams(params);
return new Tuple<>(content, builder.build());
}
/** creates address without any lookups. hostname can be null, for missing */

View File

@ -13,9 +13,9 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.xpack.security.support.MetadataUtils;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.security.authz.permission.FieldPermissions;
import java.util.Collections;
import java.util.Map;
@ -62,21 +62,21 @@ public class RoleDescriptorTests extends ESTestCase {
Map<String, Object> metadata = randomBoolean() ? MetadataUtils.DEFAULT_RESERVED_METADATA : null;
RoleDescriptor descriptor = new RoleDescriptor("test", new String[] { "all", "none" }, groups, new String[] { "sudo" }, metadata);
XContentBuilder builder = descriptor.toXContent(jsonBuilder(), ToXContent.EMPTY_PARAMS);
RoleDescriptor parsed = RoleDescriptor.parse("test", builder.bytes(), false);
RoleDescriptor parsed = RoleDescriptor.parse("test", builder.bytes(), false, XContentType.JSON);
assertEquals(parsed, descriptor);
}
public void testParse() throws Exception {
String q = "{\"cluster\":[\"a\", \"b\"]}";
RoleDescriptor rd = RoleDescriptor.parse("test", new BytesArray(q), false);
RoleDescriptor rd = RoleDescriptor.parse("test", new BytesArray(q), false, XContentType.JSON);
assertEquals("test", rd.getName());
assertArrayEquals(new String[] { "a", "b" }, rd.getClusterPrivileges());
assertEquals(0, rd.getIndicesPrivileges().length);
assertArrayEquals(Strings.EMPTY_ARRAY, rd.getRunAs());
q = "{\"cluster\":[\"a\", \"b\"], \"run_as\": [\"m\", \"n\"]}";
rd = RoleDescriptor.parse("test", new BytesArray(q), false);
rd = RoleDescriptor.parse("test", new BytesArray(q), false, XContentType.JSON);
assertEquals("test", rd.getName());
assertArrayEquals(new String[] { "a", "b" }, rd.getClusterPrivileges());
assertEquals(0, rd.getIndicesPrivileges().length);
@ -87,7 +87,7 @@ public class RoleDescriptorTests extends ESTestCase {
"{\"grant\": [\"f1\", \"f2\"]}}, {\"names\": " +
"\"idx2\", " +
"\"privileges\": [\"p3\"], \"field_security\": {\"grant\": [\"f1\", \"f2\"]}, \"query\": \"{\\\"match_all\\\": {}}\"}]}";
rd = RoleDescriptor.parse("test", new BytesArray(q), false);
rd = RoleDescriptor.parse("test", new BytesArray(q), false, XContentType.JSON);
assertEquals("test", rd.getName());
assertArrayEquals(new String[] { "a", "b" }, rd.getClusterPrivileges());
assertEquals(3, rd.getIndicesPrivileges().length);
@ -95,7 +95,7 @@ public class RoleDescriptorTests extends ESTestCase {
q = "{\"cluster\":[\"a\", \"b\"], \"run_as\": [\"m\", \"n\"], \"indices\": [{\"names\": [\"idx1\",\"idx2\"], \"privileges\": " +
"[\"p1\", \"p2\"]}]}";
rd = RoleDescriptor.parse("test", new BytesArray(q), false);
rd = RoleDescriptor.parse("test", new BytesArray(q), false, XContentType.JSON);
assertEquals("test", rd.getName());
assertArrayEquals(new String[] { "a", "b" }, rd.getClusterPrivileges());
assertEquals(1, rd.getIndicesPrivileges().length);
@ -104,7 +104,7 @@ public class RoleDescriptorTests extends ESTestCase {
assertNull(rd.getIndicesPrivileges()[0].getQuery());
q = "{\"cluster\":[\"a\", \"b\"], \"metadata\":{\"foo\":\"bar\"}}";
rd = RoleDescriptor.parse("test", new BytesArray(q), false);
rd = RoleDescriptor.parse("test", new BytesArray(q), false, XContentType.JSON);
assertEquals("test", rd.getName());
assertArrayEquals(new String[] { "a", "b" }, rd.getClusterPrivileges());
assertEquals(0, rd.getIndicesPrivileges().length);
@ -136,7 +136,7 @@ public class RoleDescriptorTests extends ESTestCase {
public void testParseEmptyQuery() throws Exception {
String json = "{\"cluster\":[\"a\", \"b\"], \"run_as\": [\"m\", \"n\"], \"indices\": [{\"names\": [\"idx1\",\"idx2\"], " +
"\"privileges\": [\"p1\", \"p2\"], \"query\": \"\"}]}";
RoleDescriptor rd = RoleDescriptor.parse("test", new BytesArray(json), false);
RoleDescriptor rd = RoleDescriptor.parse("test", new BytesArray(json), false, XContentType.JSON);
assertEquals("test", rd.getName());
assertArrayEquals(new String[] { "a", "b" }, rd.getClusterPrivileges());
assertEquals(1, rd.getIndicesPrivileges().length);
@ -150,7 +150,7 @@ public class RoleDescriptorTests extends ESTestCase {
Collections.singletonMap("_unlicensed_feature", true), Collections.singletonMap("foo", "bar"));
XContentBuilder b = jsonBuilder();
descriptor.toXContent(b, ToXContent.EMPTY_PARAMS);
RoleDescriptor parsed = RoleDescriptor.parse("test", b.bytes(), false);
RoleDescriptor parsed = RoleDescriptor.parse("test", b.bytes(), false, XContentType.JSON);
assertNotNull(parsed);
assertEquals(1, parsed.getTransientMetadata().size());
assertEquals(true, parsed.getTransientMetadata().get("enabled"));

View File

@ -13,6 +13,7 @@ import org.elasticsearch.action.delete.DeleteRequest;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.update.UpdateAction;
import org.elasticsearch.action.update.UpdateRequest;
import org.elasticsearch.client.Requests;
import org.elasticsearch.index.engine.DocumentMissingException;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.test.SecurityIntegTestCase;
@ -68,33 +69,35 @@ public class WriteActionsTests extends SecurityIntegTestCase {
public void testUpdate() {
createIndex("test1", "index1");
client().prepareIndex("test1", "type", "id").setSource("field", "value").get();
assertEquals(RestStatus.OK, client().prepareUpdate("test1", "type", "id").setDoc("field2", "value2").get().status());
assertEquals(RestStatus.OK, client().prepareUpdate("test1", "type", "id")
.setDoc(Requests.INDEX_CONTENT_TYPE, "field2", "value2").get().status());
assertThrowsAuthorizationExceptionDefaultUsers(client().prepareUpdate("index1", "type", "id").setDoc("field2", "value2")::get,
UpdateAction.NAME);
assertThrowsAuthorizationExceptionDefaultUsers(client().prepareUpdate("index1", "type", "id")
.setDoc(Requests.INDEX_CONTENT_TYPE, "field2", "value2")::get, UpdateAction.NAME);
expectThrows(DocumentMissingException.class, () -> client().prepareUpdate("test4", "type", "id").setDoc("field2", "value2").get());
expectThrows(DocumentMissingException.class, () -> client().prepareUpdate("test4", "type", "id")
.setDoc(Requests.INDEX_CONTENT_TYPE, "field2", "value2").get());
assertThrowsAuthorizationExceptionDefaultUsers(client().prepareUpdate("missing", "type", "id").setDoc("field2", "value2")::get,
UpdateAction.NAME);
assertThrowsAuthorizationExceptionDefaultUsers(client().prepareUpdate("missing", "type", "id")
.setDoc(Requests.INDEX_CONTENT_TYPE, "field2", "value2")::get, UpdateAction.NAME);
}
public void testBulk() {
createIndex("test1", "test2", "test3", "index1");
BulkResponse bulkResponse = client().prepareBulk()
.add(new IndexRequest("test1", "type", "id").source("field", "value"))
.add(new IndexRequest("index1", "type", "id").source("field", "value"))
.add(new IndexRequest("test4", "type", "id").source("field", "value"))
.add(new IndexRequest("missing", "type", "id").source("field", "value"))
.add(new IndexRequest("test1", "type", "id").source(Requests.INDEX_CONTENT_TYPE, "field", "value"))
.add(new IndexRequest("index1", "type", "id").source(Requests.INDEX_CONTENT_TYPE, "field", "value"))
.add(new IndexRequest("test4", "type", "id").source(Requests.INDEX_CONTENT_TYPE, "field", "value"))
.add(new IndexRequest("missing", "type", "id").source(Requests.INDEX_CONTENT_TYPE, "field", "value"))
.add(new DeleteRequest("test1", "type", "id"))
.add(new DeleteRequest("index1", "type", "id"))
.add(new DeleteRequest("test4", "type", "id"))
.add(new DeleteRequest("missing", "type", "id"))
.add(new IndexRequest("test1", "type", "id").source("field", "value"))
.add(new UpdateRequest("test1", "type", "id").doc("field", "value"))
.add(new UpdateRequest("index1", "type", "id").doc("field", "value"))
.add(new UpdateRequest("test4", "type", "id").doc("field", "value"))
.add(new UpdateRequest("missing", "type", "id").doc("field", "value")).get();
.add(new IndexRequest("test1", "type", "id").source(Requests.INDEX_CONTENT_TYPE, "field", "value"))
.add(new UpdateRequest("test1", "type", "id").doc(Requests.INDEX_CONTENT_TYPE, "field", "value"))
.add(new UpdateRequest("index1", "type", "id").doc(Requests.INDEX_CONTENT_TYPE, "field", "value"))
.add(new UpdateRequest("test4", "type", "id").doc(Requests.INDEX_CONTENT_TYPE, "field", "value"))
.add(new UpdateRequest("missing", "type", "id").doc(Requests.INDEX_CONTENT_TYPE, "field", "value")).get();
assertTrue(bulkResponse.hasFailures());
assertEquals(13, bulkResponse.getItems().length);
assertFalse(bulkResponse.getItems()[0].isFailed());

View File

@ -9,6 +9,7 @@ import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.security.authz.RoleDescriptor;
@ -28,7 +29,7 @@ public class FieldPermissionTests extends ESTestCase {
"\"grant\": [\"f1\", \"f2\", \"f3\", \"f4\"]," +
"\"except\": [\"f3\",\"f4\"]" +
"}}]}";
RoleDescriptor rd = RoleDescriptor.parse("test", new BytesArray(q), false);
RoleDescriptor rd = RoleDescriptor.parse("test", new BytesArray(q), false, XContentType.JSON);
assertArrayEquals(rd.getIndicesPrivileges()[0].getGrantedFields(), new String[] { "f1", "f2", "f3", "f4" });
assertArrayEquals(rd.getIndicesPrivileges()[0].getDeniedFields(), new String[] { "f3", "f4" });
@ -37,7 +38,7 @@ public class FieldPermissionTests extends ESTestCase {
"\"except\": [\"f3\",\"f4\"]," +
"\"grant\": [\"f1\", \"f2\", \"f3\", \"f4\"]" +
"}}]}";
rd = RoleDescriptor.parse("test", new BytesArray(q), false);
rd = RoleDescriptor.parse("test", new BytesArray(q), false, XContentType.JSON);
assertArrayEquals(rd.getIndicesPrivileges()[0].getGrantedFields(), new String[] { "f1", "f2", "f3", "f4" });
assertArrayEquals(rd.getIndicesPrivileges()[0].getDeniedFields(), new String[] { "f3", "f4" });
@ -45,7 +46,7 @@ public class FieldPermissionTests extends ESTestCase {
"\"field_security\": {" +
"\"grant\": [\"f1\", \"f2\"]" +
"}}]}";
rd = RoleDescriptor.parse("test", new BytesArray(q), false);
rd = RoleDescriptor.parse("test", new BytesArray(q), false, XContentType.JSON);
assertArrayEquals(rd.getIndicesPrivileges()[0].getGrantedFields(), new String[] { "f1", "f2" });
assertNull(rd.getIndicesPrivileges()[0].getDeniedFields());
@ -53,7 +54,7 @@ public class FieldPermissionTests extends ESTestCase {
"\"field_security\": {" +
"\"grant\": []" +
"}}]}";
rd = RoleDescriptor.parse("test", new BytesArray(q), false);
rd = RoleDescriptor.parse("test", new BytesArray(q), false, XContentType.JSON);
assertArrayEquals(rd.getIndicesPrivileges()[0].getGrantedFields(), new String[] {});
assertNull(rd.getIndicesPrivileges()[0].getDeniedFields());
@ -62,7 +63,7 @@ public class FieldPermissionTests extends ESTestCase {
"\"except\": []," +
"\"grant\": []" +
"}}]}";
rd = RoleDescriptor.parse("test", new BytesArray(q), false);
rd = RoleDescriptor.parse("test", new BytesArray(q), false, XContentType.JSON);
assertArrayEquals(rd.getIndicesPrivileges()[0].getGrantedFields(), new String[] {});
assertArrayEquals(rd.getIndicesPrivileges()[0].getDeniedFields(), new String[] {});
@ -70,8 +71,8 @@ public class FieldPermissionTests extends ESTestCase {
"\"field_security\": {" +
"\"except\": [\"f1\"]" +
"}}]}";
ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> RoleDescriptor.parse("test", new BytesArray
(exceptWithoutGrant), false));
ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> RoleDescriptor.parse("test",
new BytesArray(exceptWithoutGrant), false, XContentType.JSON));
assertThat(e.getDetailedMessage(), containsString("failed to parse indices privileges for role [test]. field_security requires " +
"grant if except is given"));
@ -79,8 +80,8 @@ public class FieldPermissionTests extends ESTestCase {
"\"field_security\": {" +
"\"grant\": null" +
"}}]}";
e = expectThrows(ElasticsearchParseException.class, () -> RoleDescriptor.parse("test", new BytesArray
(grantNull), false));
e = expectThrows(ElasticsearchParseException.class,
() -> RoleDescriptor.parse("test", new BytesArray(grantNull), false, XContentType.JSON));
assertThat(e.getDetailedMessage(), containsString("failed to parse indices privileges for role [test]. grant must not be null."));
final String exceptNull = "{\"indices\": [ {\"names\": \"idx2\", \"privileges\": [\"p3\"], " +
@ -88,8 +89,8 @@ public class FieldPermissionTests extends ESTestCase {
"\"grant\": [\"*\"]," +
"\"except\": null" +
"}}]}";
e = expectThrows(ElasticsearchParseException.class, () -> RoleDescriptor.parse("test", new BytesArray
(exceptNull), false));
e = expectThrows(ElasticsearchParseException.class,
() -> RoleDescriptor.parse("test", new BytesArray(exceptNull), false, XContentType.JSON));
assertThat(e.getDetailedMessage(), containsString("failed to parse indices privileges for role [test]. except must not be null."));
final String exceptGrantNull = "{\"indices\": [ {\"names\": \"idx2\", \"privileges\": [\"p3\"], " +
@ -97,15 +98,15 @@ public class FieldPermissionTests extends ESTestCase {
"\"grant\": null," +
"\"except\": null" +
"}}]}";
e = expectThrows(ElasticsearchParseException.class, () -> RoleDescriptor.parse("test", new BytesArray
(exceptGrantNull), false));
e = expectThrows(ElasticsearchParseException.class,
() -> RoleDescriptor.parse("test", new BytesArray(exceptGrantNull), false, XContentType.JSON));
assertThat(e.getDetailedMessage(), containsString("failed to parse indices privileges for role [test]. grant must not be null."));
final String bothFieldsMissing = "{\"indices\": [ {\"names\": \"idx2\", \"privileges\": [\"p3\"], " +
"\"field_security\": {" +
"}}]}";
e = expectThrows(ElasticsearchParseException.class, () -> RoleDescriptor.parse("test", new BytesArray
(bothFieldsMissing), false));
e = expectThrows(ElasticsearchParseException.class,
() -> RoleDescriptor.parse("test", new BytesArray(bothFieldsMissing), false, XContentType.JSON));
assertThat(e.getDetailedMessage(), containsString("failed to parse indices privileges for role [test]. \"field_security\" " +
"must not be empty."));
@ -119,7 +120,7 @@ public class FieldPermissionTests extends ESTestCase {
" \"grant\": [\"*\"], \n" +
" \"except\": [\"f2\"]}," +
"\"privileges\": [\"p3\"]}]}";
rd = RoleDescriptor.parse("test", new BytesArray(q), false);
rd = RoleDescriptor.parse("test", new BytesArray(q), false, XContentType.JSON);
assertArrayEquals(rd.getIndicesPrivileges()[0].getGrantedFields(), new String[] {});
assertNull(rd.getIndicesPrivileges()[0].getDeniedFields());
assertArrayEquals(rd.getIndicesPrivileges()[1].getGrantedFields(), new String[] {"*"});
@ -131,36 +132,37 @@ public class FieldPermissionTests extends ESTestCase {
String q = "{\"indices\": [ {\"names\": \"idx2\", \"privileges\": [\"p3\"], " +
"\"fields\": [\"f1\", \"f2\"]" +
"}]}";
RoleDescriptor rd = RoleDescriptor.parse("test", new BytesArray(q), true);
RoleDescriptor rd = RoleDescriptor.parse("test", new BytesArray(q), true, XContentType.JSON);
assertArrayEquals(rd.getIndicesPrivileges()[0].getGrantedFields(), new String[]{"f1", "f2"});
assertNull(rd.getIndicesPrivileges()[0].getDeniedFields());
final String failingQuery = q;
ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> RoleDescriptor.parse("test", new BytesArray
(failingQuery), false));
ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class,
() -> RoleDescriptor.parse("test", new BytesArray(failingQuery), false, XContentType.JSON));
assertThat(e.getDetailedMessage(), containsString("[\"fields\": [...]] format has changed for field permissions in role [test]" +
", use [\"field_security\": {\"grant\":[...],\"except\":[...]}] instead"));
q = "{\"indices\": [ {\"names\": \"idx2\", \"privileges\": [\"p3\"], " +
"\"fields\": []" +
"}]}";
rd = RoleDescriptor.parse("test", new BytesArray(q), true);
rd = RoleDescriptor.parse("test", new BytesArray(q), true, XContentType.JSON);
assertArrayEquals(rd.getIndicesPrivileges()[0].getGrantedFields(), new String[]{});
assertNull(rd.getIndicesPrivileges()[0].getDeniedFields());
final String failingQuery2 = q;
e = expectThrows(ElasticsearchParseException.class, () -> RoleDescriptor.parse("test", new BytesArray
(failingQuery2), false));
e = expectThrows(ElasticsearchParseException.class,
() -> RoleDescriptor.parse("test", new BytesArray(failingQuery2), false, XContentType.JSON));
assertThat(e.getDetailedMessage(), containsString("[\"fields\": [...]] format has changed for field permissions in role [test]" +
", use [\"field_security\": {\"grant\":[...],\"except\":[...]}] instead"));
q = "{\"indices\": [ {\"names\": \"idx2\", \"privileges\": [\"p3\"], " +
"\"fields\": null" +
"}]}";
rd = RoleDescriptor.parse("test", new BytesArray(q), true);
rd = RoleDescriptor.parse("test", new BytesArray(q), true, XContentType.JSON);
assertNull(rd.getIndicesPrivileges()[0].getGrantedFields());
assertNull(rd.getIndicesPrivileges()[0].getDeniedFields());
final String failingQuery3 = q;
e = expectThrows(ElasticsearchParseException.class, () -> RoleDescriptor.parse("test", new BytesArray(failingQuery3), false));
e = expectThrows(ElasticsearchParseException.class,
() -> RoleDescriptor.parse("test", new BytesArray(failingQuery3), false, XContentType.JSON));
assertThat(e.getDetailedMessage(), containsString("[\"fields\": [...]] format has changed for field permissions in role [test]" +
", use [\"field_security\": {\"grant\":[...],\"except\":[...]}] instead"));
}

View File

@ -22,7 +22,8 @@ public class RestRequestFilterTests extends ESTestCase {
public void testFilteringItemsInSubLevels() throws IOException {
BytesReference content = new BytesArray("{\"root\": {\"second\": {\"third\": \"password\", \"foo\": \"bar\"}}}");
RestRequestFilter filter = () -> Collections.singleton("root.second.third");
FakeRestRequest restRequest = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withContent(content).build();
FakeRestRequest restRequest =
new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withContent(content, XContentType.JSON).build();
RestRequest filtered = filter.getFilteredRequest(restRequest);
assertNotEquals(content, filtered.content());
@ -38,7 +39,8 @@ public class RestRequestFilterTests extends ESTestCase {
public void testFilteringItemsInSubLevelsWithWildCard() throws IOException {
BytesReference content = new BytesArray("{\"root\": {\"second\": {\"third\": \"password\", \"foo\": \"bar\"}}}");
RestRequestFilter filter = () -> Collections.singleton("root.*.third");
FakeRestRequest restRequest = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withContent(content).build();
FakeRestRequest restRequest =
new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withContent(content, XContentType.JSON).build();
RestRequest filtered = filter.getFilteredRequest(restRequest);
assertNotEquals(content, filtered.content());
@ -54,7 +56,8 @@ public class RestRequestFilterTests extends ESTestCase {
public void testFilteringItemsInSubLevelsWithLeadingWildCard() throws IOException {
BytesReference content = new BytesArray("{\"root\": {\"second\": {\"third\": \"password\", \"foo\": \"bar\"}}}");
RestRequestFilter filter = () -> Collections.singleton("*.third");
FakeRestRequest restRequest = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withContent(content).build();
FakeRestRequest restRequest =
new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withContent(content, XContentType.JSON).build();
RestRequest filtered = filter.getFilteredRequest(restRequest);
assertNotEquals(content, filtered.content());

View File

@ -40,7 +40,6 @@ import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
import static org.mockito.Mockito.verifyZeroInteractions;
import static org.mockito.Mockito.when;
@ -114,7 +113,7 @@ public class SecurityRestFilterTests extends ESTestCase {
public void testProcessFiltersBodyCorrectly() throws Exception {
FakeRestRequest restRequest = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY)
.withContent(new BytesArray("{\"password\": \"changeme\", \"foo\": \"bar\"}")).build();
.withContent(new BytesArray("{\"password\": \"changeme\", \"foo\": \"bar\"}"), XContentType.JSON).build();
when(channel.request()).thenReturn(restRequest);
SetOnce<RestRequest> handlerRequest = new SetOnce<>();
restHandler = new FilteredRestHandler() {

View File

@ -147,6 +147,7 @@ public class WebhookIntegrationTests extends AbstractWatcherIntegrationTestCase
.path(new TextTemplate("/%3Clogstash-%7Bnow%2Fd%7D%3E/log/1"))
.body(new TextTemplate("{\"foo\":\"bar\"}"))
.auth(new BasicAuth("test", "changeme".toCharArray()))
.putHeader("Content-Type", new TextTemplate("application/json"))
.method(HttpMethod.PUT);
watcherClient().preparePutWatch("_id")

View File

@ -768,7 +768,7 @@ public class ExecutionServiceTests extends ESTestCase {
when(getResponse.isExists()).thenReturn(true);
when(getResponse.getId()).thenReturn("foo");
when(client.getWatch(any())).thenReturn(getResponse);
when(parser.parseWithSecrets(eq("foo"), eq(true), any(), any())).thenReturn(watch);
when(parser.parseWithSecrets(eq("foo"), eq(true), any(), any(), any())).thenReturn(watch);
// execute needs to fail as well as storing the history
doThrow(new EsRejectedExecutionException()).when(executor).execute(any());

View File

@ -8,6 +8,7 @@ package org.elasticsearch.xpack.watcher.input.chain;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.common.network.NetworkModule;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexNotFoundException;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.transport.Netty4Plugin;
@ -54,7 +55,7 @@ public class ChainIntegrationTests extends AbstractWatcherIntegrationTestCase {
public void testChainedInputsAreWorking() throws Exception {
String index = "the-most-awesome-index-ever";
createIndex(index);
client().prepareIndex(index, "type", "id").setSource("{}").setRefreshPolicy(IMMEDIATE).get();
client().prepareIndex(index, "type", "id").setSource("{}", XContentType.JSON).setRefreshPolicy(IMMEDIATE).get();
InetSocketAddress address = internalCluster().httpAddresses()[0];
HttpInput.Builder httpInputBuilder = httpInput(HttpRequestTemplate.builder(address.getHostString(), address.getPort())

View File

@ -10,6 +10,7 @@ import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.common.network.NetworkModule;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.transport.Netty4Plugin;
import org.elasticsearch.xpack.common.http.HttpRequestTemplate;
@ -58,7 +59,7 @@ public class HttpInputIntegrationTests extends AbstractWatcherIntegrationTestCas
public void testHttpInput() throws Exception {
createIndex("index");
client().prepareIndex("index", "type", "id").setSource("{}").setRefreshPolicy(IMMEDIATE).get();
client().prepareIndex("index", "type", "id").setSource("{}", XContentType.JSON).setRefreshPolicy(IMMEDIATE).get();
InetSocketAddress address = internalCluster().httpAddresses()[0];
watcherClient().preparePutWatch("_name")
@ -67,6 +68,7 @@ public class HttpInputIntegrationTests extends AbstractWatcherIntegrationTestCas
.input(httpInput(HttpRequestTemplate.builder(address.getHostString(), address.getPort())
.path("/index/_search")
.body(jsonBuilder().startObject().field("size", 1).endObject().string())
.putHeader("Content-Type", new TextTemplate("application/json"))
.auth(securityEnabled() ? new BasicAuth("test", "changeme".toCharArray()) : null)))
.condition(new CompareCondition("ctx.payload.hits.total", CompareCondition.Op.EQ, 1L))
.addAction("_id", loggingAction("anything")))

View File

@ -7,15 +7,19 @@ package org.elasticsearch.xpack.watcher.rest.action;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.rest.RestController;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.rest.FakeRestRequest;
import org.elasticsearch.test.rest.FakeRestRequest.Builder;
import org.elasticsearch.xpack.watcher.client.WatcherClient;
import org.elasticsearch.xpack.watcher.transport.actions.execute.ExecuteWatchRequestBuilder;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import static org.hamcrest.core.Is.is;
import static org.mockito.Mockito.mock;
@ -49,24 +53,16 @@ public class RestExecuteWatchActionTests extends ESTestCase {
}
private FakeRestRequest createFakeRestRequest(String randomId, String recordExecution, String ignoreCondition, String debugCondition) {
FakeRestRequest restRequest = new FakeRestRequest() {
@Override
public boolean hasContent() {
return true;
}
@Override
public BytesReference content() {
return new BytesArray("{}");
}
};
restRequest.params().put("id", randomId);
FakeRestRequest.Builder builder = new Builder(NamedXContentRegistry.EMPTY);
builder.withContent(new BytesArray("{}"), XContentType.JSON);
Map<String, String> params = new HashMap<>();
params.put("id", randomId);
// make sure we test true/false/no params
if (recordExecution != null) restRequest.params().put("record_execution", recordExecution);
if (ignoreCondition != null) restRequest.params().put("ignore_condition", ignoreCondition);
if (debugCondition != null) restRequest.params().put("debug", debugCondition);
if (recordExecution != null) params.put("record_execution", recordExecution);
if (ignoreCondition != null) params.put("ignore_condition", ignoreCondition);
if (debugCondition != null) params.put("debug", debugCondition);
return restRequest;
builder.withParams(params);
return builder.build();
}
}

View File

@ -8,6 +8,7 @@ package org.elasticsearch.xpack.watcher.test.bench;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.node.MockNode;
import org.elasticsearch.node.Node;
import org.elasticsearch.script.Script;
@ -72,7 +73,7 @@ public class WatcherExecutorServiceBenchmark {
public static void main(String[] args) throws Exception {
start();
client.admin().indices().prepareCreate("test").get();
client.prepareIndex("test", "test", "1").setSource("{}").get();
client.prepareIndex("test", "test", "1").setSource("{}", XContentType.JSON).get();
int numAlerts = 1000;
for (int i = 0; i < numAlerts; i++) {

View File

@ -101,7 +101,7 @@ public class WatcherScheduleEngineBenchmark {
client.admin().indices().prepareDelete("_all").get();
client.admin().indices().prepareCreate("test").get();
client.prepareIndex("test", "test", "1").setSource("{}").get();
client.prepareIndex("test", "test", "1").setSource("{}", XContentType.JSON).get();
System.out.println("===============> indexing [" + numWatches + "] watches");
for (int i = 0; i < numWatches; i++) {
@ -112,7 +112,7 @@ public class WatcherScheduleEngineBenchmark {
.input(searchInput(templateRequest(new SearchSourceBuilder(), "test")))
.condition(new ScriptCondition(new Script("ctx.payload.hits.total > 0")))
.addAction("logging", ActionBuilders.loggingAction("test").setLevel(LoggingLevel.TRACE))
.buildAsBytes(XContentType.JSON)
.buildAsBytes(XContentType.JSON), XContentType.JSON
).get();
}
client.admin().indices().prepareFlush(Watch.INDEX, "test").get();

View File

@ -10,6 +10,7 @@ import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.SearchType;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptType;
import org.elasticsearch.search.builder.SearchSourceBuilder;
@ -263,7 +264,7 @@ public class BasicWatcherTests extends AbstractWatcherIntegrationTestCase {
assertAcked(client().admin().cluster().preparePutStoredScript()
.setLang("mustache")
.setId("my-template")
.setContent(jsonBuilder().startObject().field("template").value(searchSourceBuilder).endObject().bytes())
.setContent(jsonBuilder().startObject().field("template").value(searchSourceBuilder).endObject().bytes(), XContentType.JSON)
.get());
Script template = new Script(ScriptType.STORED, "mustache", "my-template", Collections.emptyMap());

View File

@ -175,7 +175,7 @@ public class BootStrapTests extends AbstractWatcherIntegrationTestCase {
.trigger(schedule(cron("0 0/5 * * * ? 2050")))
.input(searchInput(request))
.condition(new CompareCondition("ctx.payload.hits.total", CompareCondition.Op.EQ, 1L))
.buildAsBytes(XContentType.JSON)
.buildAsBytes(XContentType.JSON), XContentType.JSON
)
.setWaitForActiveShards(ActiveShardCount.ALL)
.get();

View File

@ -13,6 +13,7 @@ import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.action.index.IndexResponse;
import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.xpack.watcher.actions.ActionStatus;
import org.elasticsearch.xpack.watcher.client.WatcherClient;
import org.elasticsearch.xpack.watcher.condition.CompareCondition;
@ -120,7 +121,7 @@ public class WatchAckTests extends AbstractWatcherIntegrationTestCase {
GetWatchResponse getWatchResponse = watcherClient.prepareGetWatch("_id").get();
assertThat(getWatchResponse.isFound(), is(true));
Watch parsedWatch = watchParser().parse(getWatchResponse.getId(), true, getWatchResponse.getSource().getBytes());
Watch parsedWatch = watchParser().parse(getWatchResponse.getId(), true, getWatchResponse.getSource().getBytes(), XContentType.JSON);
assertThat(parsedWatch.status().actionStatus("_a1").ackStatus().state(),
is(ActionStatus.AckStatus.State.AWAITS_SUCCESSFUL_EXECUTION));
assertThat(parsedWatch.status().actionStatus("_a2").ackStatus().state(),
@ -187,7 +188,7 @@ public class WatchAckTests extends AbstractWatcherIntegrationTestCase {
GetWatchResponse getWatchResponse = watcherClient.prepareGetWatch("_id").get();
assertThat(getWatchResponse.isFound(), is(true));
Watch parsedWatch = watchParser().parse(getWatchResponse.getId(), true, getWatchResponse.getSource().getBytes());
Watch parsedWatch = watchParser().parse(getWatchResponse.getId(), true, getWatchResponse.getSource().getBytes(), XContentType.JSON);
assertThat(parsedWatch.status().actionStatus("_a1").ackStatus().state(),
is(ActionStatus.AckStatus.State.AWAITS_SUCCESSFUL_EXECUTION));
assertThat(parsedWatch.status().actionStatus("_a2").ackStatus().state(),
@ -227,7 +228,7 @@ public class WatchAckTests extends AbstractWatcherIntegrationTestCase {
refresh();
GetResponse getResponse = client().get(new GetRequest(Watch.INDEX, Watch.DOC_TYPE, "_name")).actionGet();
Watch indexedWatch = watchParser().parse("_name", true, getResponse.getSourceAsBytesRef());
Watch indexedWatch = watchParser().parse("_name", true, getResponse.getSourceAsBytesRef(), XContentType.JSON);
assertThat(watchResponse.getStatus().actionStatus("_id").ackStatus().state(),
equalTo(indexedWatch.status().actionStatus("_id").ackStatus().state()));

View File

@ -10,6 +10,7 @@ import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.env.Environment;
import org.elasticsearch.plugins.Plugin;
@ -127,7 +128,7 @@ public class TransformIntegrationTests extends AbstractWatcherIntegrationTestCas
assertAcked(client().admin().cluster().preparePutStoredScript()
.setId("my-script")
.setLang("painless")
.setContent(new BytesArray("{\"script\" : \"['key3' : ctx.payload.key1 + ctx.payload.key2]\"}"))
.setContent(new BytesArray("{\"script\" : \"['key3' : ctx.payload.key1 + ctx.payload.key2]\"}"), XContentType.JSON)
.get());
script = new Script(ScriptType.STORED, "painless", "my-script", Collections.emptyMap());
} else {

View File

@ -8,6 +8,7 @@ package org.elasticsearch.xpack.watcher.transport.action;
import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.watcher.execution.ActionExecutionMode;
import org.elasticsearch.xpack.watcher.transport.actions.ack.AckWatchRequest;
@ -120,7 +121,7 @@ public class WatchRequestValidationTests extends ESTestCase {
public void testExecuteWatchWatchIdAndSource() {
ExecuteWatchRequest request = new ExecuteWatchRequest("foo");
request.setWatchSource(BytesArray.EMPTY);
request.setWatchSource(BytesArray.EMPTY, XContentType.JSON);
ActionRequestValidationException e = request.validate();
assertThat(e, is(notNullValue()));
assertThat(e.validationErrors(),
@ -129,7 +130,7 @@ public class WatchRequestValidationTests extends ESTestCase {
public void testExecuteWatchSourceAndRecordExecution() {
ExecuteWatchRequest request = new ExecuteWatchRequest();
request.setWatchSource(BytesArray.EMPTY);
request.setWatchSource(BytesArray.EMPTY, XContentType.JSON);
request.setRecordExecution(true);
ActionRequestValidationException e = request.validate();
assertThat(e, is(notNullValue()));

View File

@ -139,7 +139,7 @@ public class ActivateWatchTests extends AbstractWatcherIntegrationTestCase {
// now that we filtered out the watch status state, lets put it back in
IndexResponse indexResponse = client().prepareIndex(".watches", "watch", "_id")
.setSource(builder.bytes())
.setSource(builder.bytes(), XContentType.JSON)
.get();
assertThat(indexResponse.getId(), is("_id"));

View File

@ -0,0 +1,54 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.watcher.transport.action.execute;
import org.elasticsearch.Version;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.watcher.transport.actions.execute.ExecuteWatchRequest;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.Base64;
public class ExecuteWatchRequestTests extends ESTestCase {
public void testSerialization() throws IOException {
ExecuteWatchRequest request = new ExecuteWatchRequest("1");
request.setWatchSource(new BytesArray("{}".getBytes(StandardCharsets.UTF_8)), XContentType.JSON);
assertEquals(XContentType.JSON, request.getXContentType());
BytesStreamOutput out = new BytesStreamOutput();
request.writeTo(out);
StreamInput in = StreamInput.wrap(out.bytes().toBytesRef().bytes);
ExecuteWatchRequest serialized = new ExecuteWatchRequest();
serialized.readFrom(in);
assertEquals(XContentType.JSON, serialized.getXContentType());
assertEquals("{}", serialized.getWatchSource().utf8ToString());
}
public void testSerializationBwc() throws IOException {
final byte[] data = Base64.getDecoder().decode("ADwDAAAAAAAAAAAAAAAAAAABDnsid2F0Y2giOiJtZSJ9AAAAAAAAAA==");
final Version version = randomFrom(Version.V_5_0_0, Version.V_5_0_1, Version.V_5_0_2,
Version.V_5_0_3_UNRELEASED, Version.V_5_1_1_UNRELEASED, Version.V_5_1_2_UNRELEASED, Version.V_5_2_0_UNRELEASED);
try (StreamInput in = StreamInput.wrap(data)) {
in.setVersion(version);
ExecuteWatchRequest request = new ExecuteWatchRequest();
request.readFrom(in);
assertEquals(XContentType.JSON, request.getXContentType());
assertEquals("{\"watch\":\"me\"}", request.getWatchSource().utf8ToString());
try (BytesStreamOutput out = new BytesStreamOutput()) {
out.setVersion(version);
request.writeTo(out);
assertArrayEquals(data, out.bytes().toBytesRef().bytes);
}
}
}
}

View File

@ -5,11 +5,18 @@
*/
package org.elasticsearch.xpack.watcher.transport.action.put;
import org.elasticsearch.Version;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.watcher.transport.actions.put.PutWatchRequest;
import java.io.IOException;
import java.util.Base64;
import static org.hamcrest.Matchers.is;
public class PutWatchSerializationTests extends ESTestCase {
@ -19,7 +26,9 @@ public class PutWatchSerializationTests extends ESTestCase {
PutWatchRequest request = new PutWatchRequest();
request.setId(randomAsciiOfLength(10));
request.setActive(randomBoolean());
request.setSource(new BytesArray(randomAsciiOfLength(20)));
request.setSource(
new BytesArray(JsonXContent.contentBuilder().startObject().field("foo", randomAsciiOfLength(20)).endObject().string()),
XContentType.JSON);
BytesStreamOutput streamOutput = new BytesStreamOutput();
request.writeTo(streamOutput);
@ -29,6 +38,48 @@ public class PutWatchSerializationTests extends ESTestCase {
assertThat(readRequest.isActive(), is(request.isActive()));
assertThat(readRequest.getId(), is(request.getId()));
assertThat(readRequest.getSource(), is(request.getSource()));
assertThat(readRequest.xContentType(), is(request.xContentType()));
}
public void testPutWatchSerializationXContent() throws Exception {
PutWatchRequest request = new PutWatchRequest();
request.setId(randomAsciiOfLength(10));
request.setActive(randomBoolean());
request.setSource(
new BytesArray(JsonXContent.contentBuilder().startObject().field("foo", randomAsciiOfLength(20)).endObject().string()),
XContentType.JSON);
assertEquals(XContentType.JSON, request.xContentType());
BytesStreamOutput streamOutput = new BytesStreamOutput();
request.writeTo(streamOutput);
PutWatchRequest readRequest = new PutWatchRequest();
StreamInput input = streamOutput.bytes().streamInput();
readRequest.readFrom(input);
assertThat(readRequest.isActive(), is(request.isActive()));
assertThat(readRequest.getId(), is(request.getId()));
assertThat(readRequest.getSource(), is(request.getSource()));
assertThat(readRequest.xContentType(), is(XContentType.JSON));
}
public void testPutWatchSerializationXContentBwc() throws IOException {
final byte[] data = Base64.getDecoder().decode("ADwDAmlkDXsiZm9vIjoiYmFyIn0BAAAA");
final Version version = randomFrom(Version.V_5_0_0, Version.V_5_0_1, Version.V_5_0_2,
Version.V_5_0_3_UNRELEASED, Version.V_5_1_1_UNRELEASED, Version.V_5_1_2_UNRELEASED, Version.V_5_2_0_UNRELEASED);
try (StreamInput in = StreamInput.wrap(data)) {
in.setVersion(version);
PutWatchRequest request = new PutWatchRequest();
request.readFrom(in);
assertEquals(XContentType.JSON, request.xContentType());
assertEquals("id", request.getId());
assertTrue(request.isActive());
assertEquals("{\"foo\":\"bar\"}", request.getSource().utf8ToString());
try (BytesStreamOutput out = new BytesStreamOutput()) {
out.setVersion(version);
request.writeTo(out);
assertArrayEquals(data, out.bytes().toBytesRef().bytes);
}
}
}
}

View File

@ -16,6 +16,7 @@ import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.MatchAllQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryParseContext;
@ -204,7 +205,7 @@ public class WatchTests extends ESTestCase {
logger.info("{}", bytes.utf8ToString());
Watch.Parser watchParser = new Watch.Parser(settings, triggerService, actionRegistry, inputRegistry, null, clock);
Watch parsedWatch = watchParser.parse("_name", includeStatus, bytes);
Watch parsedWatch = watchParser.parse("_name", includeStatus, bytes, XContentType.JSON);
if (includeStatus) {
assertThat(parsedWatch.status(), equalTo(watchStatus));
@ -240,7 +241,7 @@ public class WatchTests extends ESTestCase {
.endObject();
Watch.Parser watchParser = new Watch.Parser(settings, triggerService, actionRegistry, inputRegistry, null, clock);
try {
watchParser.parse("failure", false, jsonBuilder.bytes());
watchParser.parse("failure", false, jsonBuilder.bytes(), XContentType.JSON);
fail("This watch should fail to parse as actions is an array");
} catch (ElasticsearchParseException pe) {
assertThat(pe.getMessage().contains("could not parse actions for watch [failure]"), is(true));
@ -265,7 +266,7 @@ public class WatchTests extends ESTestCase {
.endObject();
builder.endObject();
Watch.Parser watchParser = new Watch.Parser(settings, triggerService, actionRegistry, inputRegistry, null, Clock.systemUTC());
Watch watch = watchParser.parse("failure", false, builder.bytes());
Watch watch = watchParser.parse("failure", false, builder.bytes(), XContentType.JSON);
assertThat(watch, notNullValue());
assertThat(watch.trigger(), instanceOf(ScheduleTrigger.class));
assertThat(watch.input(), instanceOf(ExecutableNoneInput.class));
@ -331,7 +332,7 @@ public class WatchTests extends ESTestCase {
builder.endObject();
// parse in default mode:
Watch watch = watchParser.parse("_id", false, builder.bytes());
Watch watch = watchParser.parse("_id", false, builder.bytes(), XContentType.JSON);
assertThat(((ScriptCondition) watch.condition()).getScript().getLang(), equalTo(Script.DEFAULT_SCRIPT_LANG));
WatcherSearchTemplateRequest request = ((SearchInput) watch.input().input()).getRequest();
SearchRequest searchRequest = searchTemplateService.toSearchRequest(request);

View File

@ -5,6 +5,7 @@
*/
package org.elasticsearch.license.licensor.tools;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
@ -15,6 +16,7 @@ import org.elasticsearch.cli.ExitCodes;
import org.elasticsearch.cli.UserException;
import org.elasticsearch.cli.Terminal;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
@ -69,13 +71,14 @@ public class LicenseGeneratorTool extends Command {
final License licenseSpec;
if (options.has(licenseOption)) {
licenseSpec = License.fromSource(licenseOption.value(options));
licenseSpec =
License.fromSource(new BytesArray(licenseOption.value(options).getBytes(StandardCharsets.UTF_8)), XContentType.JSON);
} else if (options.has(licenseFileOption)) {
Path licenseSpecPath = parsePath(licenseFileOption.value(options));
if (Files.exists(licenseSpecPath) == false) {
throw new UserException(ExitCodes.USAGE, licenseSpecPath + " does not exist");
}
licenseSpec = License.fromSource(Files.readAllBytes(licenseSpecPath));
licenseSpec = License.fromSource(new BytesArray(Files.readAllBytes(licenseSpecPath)), XContentType.JSON);
} else {
throw new UserException(ExitCodes.USAGE, "Must specify either --license or --licenseFile");
}

Some files were not shown because too many files have changed in this diff Show More