Decouple XContentBuilder from BytesReference ()

* Decouple XContentBuilder from BytesReference

This commit removes all mentions of `BytesReference` from `XContentBuilder`.
This is needed so that we can completely decouple the XContent code and move it
into its own dependency.

While this change appears large, it is due to two main changes, moving
`.bytes()` and `.string()` out of XContentBuilder itself into static methods
`BytesReference.bytes` and `Strings.toString` respectively. The rest of the
change is code reacting to these changes (the majority of it in tests).

Relates to 
This commit is contained in:
Lee Hinman 2018-03-14 13:47:57 -06:00 committed by GitHub
parent ef6fc1e9fd
commit 8e8fdc4f0e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
301 changed files with 3563 additions and 3369 deletions
client/rest-high-level/src
main/java/org/elasticsearch/client
test/java/org/elasticsearch/client
modules
ingest-common/src
main/java/org/elasticsearch/ingest/common
test/java/org/elasticsearch/ingest/common
lang-mustache/src
mapper-extras/src/test/java/org/elasticsearch/index/mapper
parent-join/src/test/java/org/elasticsearch/join
percolator/src
rank-eval/src/test/java/org/elasticsearch/index/rankeval
reindex/src
transport-netty4/src/test/java/org/elasticsearch/rest
plugins
analysis-icu/src/test/java/org/elasticsearch/index/mapper
mapper-murmur3/src/test/java/org/elasticsearch/index/mapper/murmur3
mapper-size/src/test/java/org/elasticsearch/index/mapper/size
repository-gcs/src/test/java/org/elasticsearch/repositories/gcs
qa
ccs-unavailable-clusters/src/test/java/org/elasticsearch/search
full-cluster-restart/src/test/java/org/elasticsearch/upgrades
mixed-cluster/src/test/java/org/elasticsearch/backwards
query-builder-bwc/src/test/java/org/elasticsearch/bwc
smoke-test-http/src/test/java/org/elasticsearch/http
wildfly/src/test/java/org/elasticsearch/wildfly
server/src/main/java/org/elasticsearch

@ -328,7 +328,7 @@ public final class Request {
} }
metadata.endObject(); metadata.endObject();
BytesRef metadataSource = metadata.bytes().toBytesRef(); BytesRef metadataSource = BytesReference.bytes(metadata).toBytesRef();
content.write(metadataSource.bytes, metadataSource.offset, metadataSource.length); content.write(metadataSource.bytes, metadataSource.offset, metadataSource.length);
content.write(separator); content.write(separator);
} }
@ -343,7 +343,7 @@ public final class Request {
LoggingDeprecationHandler.INSTANCE, indexSource, indexXContentType)) { LoggingDeprecationHandler.INSTANCE, indexSource, indexXContentType)) {
try (XContentBuilder builder = XContentBuilder.builder(bulkContentType.xContent())) { try (XContentBuilder builder = XContentBuilder.builder(bulkContentType.xContent())) {
builder.copyCurrentStructure(parser); builder.copyCurrentStructure(parser);
source = builder.bytes().toBytesRef(); source = BytesReference.bytes(builder).toBytesRef();
} }
} }
} else if (opType == DocWriteRequest.OpType.UPDATE) { } else if (opType == DocWriteRequest.OpType.UPDATE) {

@ -617,7 +617,8 @@ public class CrudIT extends ESRestHighLevelClientTestCase {
bulkRequest.add(deleteRequest); bulkRequest.add(deleteRequest);
} else { } else {
BytesReference source = XContentBuilder.builder(xContentType.xContent()).startObject().field("id", i).endObject().bytes(); BytesReference source = BytesReference.bytes(XContentBuilder.builder(xContentType.xContent())
.startObject().field("id", i).endObject());
if (opType == DocWriteRequest.OpType.INDEX) { if (opType == DocWriteRequest.OpType.INDEX) {
IndexRequest indexRequest = new IndexRequest("index", "test", id).source(source, xContentType); IndexRequest indexRequest = new IndexRequest("index", "test", id).source(source, xContentType);
if (erroneous) { if (erroneous) {

@ -55,6 +55,7 @@ import org.elasticsearch.action.search.SearchScrollRequest;
import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.action.search.ShardSearchFailure;
import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.CheckedFunction;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
@ -272,7 +273,7 @@ public class RestHighLevelClientTests extends ESTestCase {
builder.startObject(); builder.startObject();
builder.field("field", "value"); builder.field("field", "value");
builder.endObject(); builder.endObject();
return new ByteArrayEntity(builder.bytes().toBytesRef().bytes, contentType); return new ByteArrayEntity(BytesReference.bytes(builder).toBytesRef().bytes, contentType);
} }
} }

@ -34,6 +34,7 @@ import org.elasticsearch.action.search.MultiSearchResponse;
import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.SearchScrollRequest; import org.elasticsearch.action.search.SearchScrollRequest;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.query.MatchQueryBuilder; import org.elasticsearch.index.query.MatchQueryBuilder;
@ -478,7 +479,7 @@ public class SearchIT extends ESRestHighLevelClientTestCase {
for (int i = 0; i < 100; i++) { for (int i = 0; i < 100; i++) {
XContentBuilder builder = jsonBuilder().startObject().field("field", i).endObject(); XContentBuilder builder = jsonBuilder().startObject().field("field", i).endObject();
HttpEntity entity = new NStringEntity(builder.string(), ContentType.APPLICATION_JSON); HttpEntity entity = new NStringEntity(Strings.toString(builder), ContentType.APPLICATION_JSON);
client().performRequest(HttpPut.METHOD_NAME, "test/type1/" + Integer.toString(i), Collections.emptyMap(), entity); client().performRequest(HttpPut.METHOD_NAME, "test/type1/" + Integer.toString(i), Collections.emptyMap(), entity);
} }
client().performRequest(HttpPost.METHOD_NAME, "/test/_refresh"); client().performRequest(HttpPost.METHOD_NAME, "/test/_refresh");

@ -266,13 +266,13 @@ public class CRUDDocumentationIT extends ESRestHighLevelClientTestCase {
assertSame(indexResponse.status(), RestStatus.CREATED); assertSame(indexResponse.status(), RestStatus.CREATED);
XContentType xContentType = XContentType.JSON; XContentType xContentType = XContentType.JSON;
String script = XContentBuilder.builder(xContentType.xContent()) String script = Strings.toString(XContentBuilder.builder(xContentType.xContent())
.startObject() .startObject()
.startObject("script") .startObject("script")
.field("lang", "painless") .field("lang", "painless")
.field("code", "ctx._source.field += params.count") .field("code", "ctx._source.field += params.count")
.endObject() .endObject()
.endObject().string(); .endObject());
HttpEntity body = new NStringEntity(script, ContentType.create(xContentType.mediaType())); HttpEntity body = new NStringEntity(script, ContentType.create(xContentType.mediaType()));
Response response = client().performRequest(HttpPost.METHOD_NAME, "/_scripts/increment-field", emptyMap(), body); Response response = client().performRequest(HttpPost.METHOD_NAME, "/_scripts/increment-field", emptyMap(), body);
assertEquals(response.getStatusLine().getStatusCode(), RestStatus.OK.getStatus()); assertEquals(response.getStatusLine().getStatusCode(), RestStatus.OK.getStatus());

@ -33,6 +33,7 @@ import org.elasticsearch.client.ESRestHighLevelClientTestCase;
import org.elasticsearch.client.Response; import org.elasticsearch.client.Response;
import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.health.ClusterHealthStatus;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentHelper;
@ -75,7 +76,7 @@ public class MigrationDocumentationIT extends ESRestHighLevelClientTestCase {
.put(SETTING_NUMBER_OF_REPLICAS, 0) .put(SETTING_NUMBER_OF_REPLICAS, 0)
.build(); .build();
String payload = XContentFactory.jsonBuilder() // <2> String payload = Strings.toString(XContentFactory.jsonBuilder() // <2>
.startObject() .startObject()
.startObject("settings") // <3> .startObject("settings") // <3>
.value(indexSettings) .value(indexSettings)
@ -89,7 +90,7 @@ public class MigrationDocumentationIT extends ESRestHighLevelClientTestCase {
.endObject() .endObject()
.endObject() .endObject()
.endObject() .endObject()
.endObject().string(); .endObject());
HttpEntity entity = new NStringEntity(payload, ContentType.APPLICATION_JSON); // <5> HttpEntity entity = new NStringEntity(payload, ContentType.APPLICATION_JSON); // <5>

@ -21,13 +21,13 @@ package org.elasticsearch.ingest.common;
import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonFactory;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.common.xcontent.json.JsonXContentParser;
import org.elasticsearch.ingest.AbstractProcessor; import org.elasticsearch.ingest.AbstractProcessor;
import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.IngestDocument;
import org.elasticsearch.ingest.Processor; import org.elasticsearch.ingest.Processor;
@ -99,7 +99,7 @@ public final class ScriptProcessor extends AbstractProcessor {
public ScriptProcessor create(Map<String, Processor.Factory> registry, String processorTag, public ScriptProcessor create(Map<String, Processor.Factory> registry, String processorTag,
Map<String, Object> config) throws Exception { Map<String, Object> config) throws Exception {
try (XContentBuilder builder = XContentBuilder.builder(JsonXContent.jsonXContent).map(config); try (XContentBuilder builder = XContentBuilder.builder(JsonXContent.jsonXContent).map(config);
InputStream stream = builder.bytes().streamInput(); InputStream stream = BytesReference.bytes(builder).streamInput();
XContentParser parser = XContentType.JSON.xContent().createParser(NamedXContentRegistry.EMPTY, XContentParser parser = XContentType.JSON.xContent().createParser(NamedXContentRegistry.EMPTY,
LoggingDeprecationHandler.INSTANCE, stream)) { LoggingDeprecationHandler.INSTANCE, stream)) {
Script script = Script.parse(parser); Script script = Script.parse(parser);

@ -19,6 +19,7 @@
package org.elasticsearch.ingest.common; package org.elasticsearch.ingest.common;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContent;
@ -63,7 +64,7 @@ public class GrokProcessorGetActionTests extends ESTestCase {
GrokProcessorGetAction.Response response = new GrokProcessorGetAction.Response(TEST_PATTERNS); GrokProcessorGetAction.Response response = new GrokProcessorGetAction.Response(TEST_PATTERNS);
try (XContentBuilder builder = JsonXContent.contentBuilder()) { try (XContentBuilder builder = JsonXContent.contentBuilder()) {
response.toXContent(builder, ToXContent.EMPTY_PARAMS); response.toXContent(builder, ToXContent.EMPTY_PARAMS);
Map<String, Object> converted = XContentHelper.convertToMap(builder.bytes(), false, builder.contentType()).v2(); Map<String, Object> converted = XContentHelper.convertToMap(BytesReference.bytes(builder), false, builder.contentType()).v2();
Map<String, String> patterns = (Map<String, String>) converted.get("patterns"); Map<String, String> patterns = (Map<String, String>) converted.get("patterns");
assertThat(patterns.size(), equalTo(1)); assertThat(patterns.size(), equalTo(1));
assertThat(patterns.get("PATTERN"), equalTo("foo")); assertThat(patterns.get("PATTERN"), equalTo("foo"));

@ -19,6 +19,7 @@
package org.elasticsearch.ingest.common; package org.elasticsearch.ingest.common;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
@ -48,7 +49,7 @@ public class JsonProcessorTests extends ESTestCase {
Map<String, Object> randomJsonMap = RandomDocumentPicks.randomSource(random()); Map<String, Object> randomJsonMap = RandomDocumentPicks.randomSource(random());
XContentBuilder builder = JsonXContent.contentBuilder().map(randomJsonMap); XContentBuilder builder = JsonXContent.contentBuilder().map(randomJsonMap);
String randomJson = XContentHelper.convertToJson(builder.bytes(), false, XContentType.JSON); String randomJson = XContentHelper.convertToJson(BytesReference.bytes(builder), false, XContentType.JSON);
document.put(randomField, randomJson); document.put(randomField, randomJson);
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document);

@ -30,6 +30,7 @@ import com.github.mustachejava.TemplateContext;
import com.github.mustachejava.codes.DefaultMustache; import com.github.mustachejava.codes.DefaultMustache;
import com.github.mustachejava.codes.IterableCode; import com.github.mustachejava.codes.IterableCode;
import com.github.mustachejava.codes.WriteCode; import com.github.mustachejava.codes.WriteCode;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
@ -215,7 +216,7 @@ public class CustomMustacheFactory extends DefaultMustacheFactory {
// Do not handle as JSON // Do not handle as JSON
return oh.stringify(resolved); return oh.stringify(resolved);
} }
return builder.string(); return Strings.toString(builder);
} catch (IOException e) { } catch (IOException e) {
throw new MustacheException("Failed to convert object to JSON", e); throw new MustacheException("Failed to convert object to JSON", e);
} }

@ -23,6 +23,7 @@ import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.client.node.NodeClient;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
@ -63,7 +64,7 @@ public class RestSearchTemplateAction extends BaseRestHandler {
if (parser.currentToken() == XContentParser.Token.START_OBJECT) { if (parser.currentToken() == XContentParser.Token.START_OBJECT) {
//convert the template to json which is the only supported XContentType (see CustomMustacheFactory#createEncoder) //convert the template to json which is the only supported XContentType (see CustomMustacheFactory#createEncoder)
try (XContentBuilder builder = XContentFactory.jsonBuilder()) { try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
request.setScript(builder.copyCurrentStructure(parser).string()); request.setScript(Strings.toString(builder.copyCurrentStructure(parser)));
} catch (IOException e) { } catch (IOException e) {
throw new ParsingException(parser.getTokenLocation(), "Could not parse inline template", e); throw new ParsingException(parser.getTokenLocation(), "Could not parse inline template", e);
} }

@ -30,6 +30,7 @@ import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.RestStatus;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream;
public class SearchTemplateResponse extends ActionResponse implements StatusToXContentObject { public class SearchTemplateResponse extends ActionResponse implements StatusToXContentObject {
@ -83,7 +84,9 @@ public class SearchTemplateResponse extends ActionResponse implements StatusToX
} else { } else {
builder.startObject(); builder.startObject();
//we can assume the template is always json as we convert it before compiling it //we can assume the template is always json as we convert it before compiling it
builder.rawField("template_output", source, XContentType.JSON); try (InputStream stream = source.streamInput()) {
builder.rawField("template_output", stream, XContentType.JSON);
}
builder.endObject(); builder.endObject();
} }
return builder; return builder;

@ -21,6 +21,7 @@ package org.elasticsearch.script.mustache;
import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.common.Strings;
import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexNotFoundException;
import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.script.ScriptType; import org.elasticsearch.script.ScriptType;
@ -61,13 +62,13 @@ public class MultiSearchTemplateIT extends ESIntegTestCase {
} }
indexRandom(true, indexRequestBuilders); indexRandom(true, indexRequestBuilders);
final String template = jsonBuilder().startObject() final String template = Strings.toString(jsonBuilder().startObject()
.startObject("query") .startObject("query")
.startObject("{{query_type}}") .startObject("{{query_type}}")
.field("{{field_name}}", "{{field_value}}") .field("{{field_name}}", "{{field_value}}")
.endObject() .endObject()
.endObject() .endObject()
.endObject().string(); .endObject());
MultiSearchTemplateRequest multiRequest = new MultiSearchTemplateRequest(); MultiSearchTemplateRequest multiRequest = new MultiSearchTemplateRequest();

@ -30,6 +30,7 @@ import java.util.Map;
import java.util.Set; import java.util.Set;
import com.github.mustachejava.MustacheException; import com.github.mustachejava.MustacheException;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.script.ScriptEngine; import org.elasticsearch.script.ScriptEngine;
@ -248,7 +249,7 @@ public class MustacheTests extends ESTestCase {
.endObject(); .endObject();
Map<String, Object> ctx = Map<String, Object> ctx =
singletonMap("ctx", XContentHelper.convertToMap(builder.bytes(), false, builder.contentType()).v2()); singletonMap("ctx", XContentHelper.convertToMap(BytesReference.bytes(builder), false, builder.contentType()).v2());
assertScript("{{#ctx.bulks}}{{#toJson}}.{{/toJson}}{{/ctx.bulks}}", ctx, assertScript("{{#ctx.bulks}}{{#toJson}}.{{/toJson}}{{/ctx.bulks}}", ctx,
equalTo("{\"index\":\"index-1\",\"id\":1,\"type\":\"type-1\"}{\"index\":\"index-2\",\"id\":2,\"type\":\"type-2\"}")); equalTo("{\"index\":\"index-1\",\"id\":1,\"type\":\"type-1\"}{\"index\":\"index-2\",\"id\":2,\"type\":\"type-2\"}"));
@ -290,7 +291,7 @@ public class MustacheTests extends ESTestCase {
.endObject(); .endObject();
Map<String, Object> ctx = Map<String, Object> ctx =
singletonMap("ctx", XContentHelper.convertToMap(builder.bytes(), false, builder.contentType()).v2()); singletonMap("ctx", XContentHelper.convertToMap(BytesReference.bytes(builder), false, builder.contentType()).v2());
assertScript("{{#join}}ctx.people.0.emails{{/join}}", ctx, assertScript("{{#join}}ctx.people.0.emails{{/join}}", ctx,
equalTo("john@smith.com,john.smith@email.com,jsmith@email.com")); equalTo("john@smith.com,john.smith@email.com,jsmith@email.com"));

@ -23,6 +23,7 @@ import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRespo
import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkRequestBuilder;
import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.Plugin;
@ -317,7 +318,8 @@ public class SearchTemplateIT extends ESSingleNodeTestCase {
assertAcked( assertAcked(
client().admin().cluster().preparePutStoredScript() client().admin().cluster().preparePutStoredScript()
.setId("4") .setId("4")
.setContent(jsonBuilder().startObject().field("template", multiQuery).endObject().bytes(), XContentType.JSON) .setContent(BytesReference.bytes(jsonBuilder().startObject().field("template", multiQuery).endObject()),
XContentType.JSON)
); );
BulkRequestBuilder bulkRequestBuilder = client().prepareBulk(); BulkRequestBuilder bulkRequestBuilder = client().prepareBulk();
bulkRequestBuilder.add(client().prepareIndex("test", "type", "1").setSource("{\"theField\":\"foo\"}", XContentType.JSON)); bulkRequestBuilder.add(client().prepareIndex("test", "type", "1").setSource("{\"theField\":\"foo\"}", XContentType.JSON));

@ -21,6 +21,8 @@ package org.elasticsearch.index.mapper;
import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.IndexableField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
@ -54,20 +56,20 @@ public class ScaledFloatFieldMapperTests extends ESSingleNodeTestCase {
} }
public void testDefaults() throws Exception { public void testDefaults() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "scaled_float") .startObject("properties").startObject("field").field("type", "scaled_float")
.field("scaling_factor", 10.0).endObject().endObject() .field("scaling_factor", 10.0).endObject().endObject()
.endObject().endObject().string(); .endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString()); assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.startObject() .bytes(XContentFactory.jsonBuilder()
.field("field", 123) .startObject()
.endObject() .field("field", 123)
.bytes(), .endObject()),
XContentType.JSON)); XContentType.JSON));
IndexableField[] fields = doc.rootDoc().getFields("field"); IndexableField[] fields = doc.rootDoc().getFields("field");
@ -83,9 +85,9 @@ public class ScaledFloatFieldMapperTests extends ESSingleNodeTestCase {
} }
public void testMissingScalingFactor() throws IOException { public void testMissingScalingFactor() throws IOException {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "scaled_float").endObject().endObject() .startObject("properties").startObject("field").field("type", "scaled_float").endObject().endObject()
.endObject().endObject().string(); .endObject().endObject());
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> parser.parse("type", new CompressedXContent(mapping))); () -> parser.parse("type", new CompressedXContent(mapping)));
@ -93,10 +95,10 @@ public class ScaledFloatFieldMapperTests extends ESSingleNodeTestCase {
} }
public void testIllegalScalingFactor() throws IOException { public void testIllegalScalingFactor() throws IOException {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "scaled_float") .startObject("properties").startObject("field").field("type", "scaled_float")
.field("scaling_factor", -1).endObject().endObject() .field("scaling_factor", -1).endObject().endObject()
.endObject().endObject().string(); .endObject().endObject());
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> parser.parse("type", new CompressedXContent(mapping))); () -> parser.parse("type", new CompressedXContent(mapping)));
@ -104,20 +106,20 @@ public class ScaledFloatFieldMapperTests extends ESSingleNodeTestCase {
} }
public void testNotIndexed() throws Exception { public void testNotIndexed() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "scaled_float") .startObject("properties").startObject("field").field("type", "scaled_float")
.field("index", false).field("scaling_factor", 10.0).endObject().endObject() .field("index", false).field("scaling_factor", 10.0).endObject().endObject()
.endObject().endObject().string(); .endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString()); assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.startObject() .bytes(XContentFactory.jsonBuilder()
.field("field", 123) .startObject()
.endObject() .field("field", 123)
.bytes(), .endObject()),
XContentType.JSON)); XContentType.JSON));
IndexableField[] fields = doc.rootDoc().getFields("field"); IndexableField[] fields = doc.rootDoc().getFields("field");
@ -128,20 +130,20 @@ public class ScaledFloatFieldMapperTests extends ESSingleNodeTestCase {
} }
public void testNoDocValues() throws Exception { public void testNoDocValues() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "scaled_float") .startObject("properties").startObject("field").field("type", "scaled_float")
.field("doc_values", false).field("scaling_factor", 10.0).endObject().endObject() .field("doc_values", false).field("scaling_factor", 10.0).endObject().endObject()
.endObject().endObject().string(); .endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString()); assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.startObject() .bytes(XContentFactory.jsonBuilder()
.field("field", 123) .startObject()
.endObject() .field("field", 123)
.bytes(), .endObject()),
XContentType.JSON)); XContentType.JSON));
IndexableField[] fields = doc.rootDoc().getFields("field"); IndexableField[] fields = doc.rootDoc().getFields("field");
@ -152,20 +154,20 @@ public class ScaledFloatFieldMapperTests extends ESSingleNodeTestCase {
} }
public void testStore() throws Exception { public void testStore() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "scaled_float") .startObject("properties").startObject("field").field("type", "scaled_float")
.field("store", true).field("scaling_factor", 10.0).endObject().endObject() .field("store", true).field("scaling_factor", 10.0).endObject().endObject()
.endObject().endObject().string(); .endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString()); assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.startObject() .bytes(XContentFactory.jsonBuilder()
.field("field", 123) .startObject()
.endObject() .field("field", 123)
.bytes(), .endObject()),
XContentType.JSON)); XContentType.JSON));
IndexableField[] fields = doc.rootDoc().getFields("field"); IndexableField[] fields = doc.rootDoc().getFields("field");
@ -181,20 +183,20 @@ public class ScaledFloatFieldMapperTests extends ESSingleNodeTestCase {
} }
public void testCoerce() throws Exception { public void testCoerce() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "scaled_float") .startObject("properties").startObject("field").field("type", "scaled_float")
.field("scaling_factor", 10.0).endObject().endObject() .field("scaling_factor", 10.0).endObject().endObject()
.endObject().endObject().string(); .endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString()); assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.startObject() .bytes(XContentFactory.jsonBuilder()
.field("field", "123") .startObject()
.endObject() .field("field", "123")
.bytes(), .endObject()),
XContentType.JSON)); XContentType.JSON));
IndexableField[] fields = doc.rootDoc().getFields("field"); IndexableField[] fields = doc.rootDoc().getFields("field");
@ -205,20 +207,20 @@ public class ScaledFloatFieldMapperTests extends ESSingleNodeTestCase {
IndexableField dvField = fields[1]; IndexableField dvField = fields[1];
assertEquals(DocValuesType.SORTED_NUMERIC, dvField.fieldType().docValuesType()); assertEquals(DocValuesType.SORTED_NUMERIC, dvField.fieldType().docValuesType());
mapping = XContentFactory.jsonBuilder().startObject().startObject("type") mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "scaled_float") .startObject("properties").startObject("field").field("type", "scaled_float")
.field("scaling_factor", 10.0).field("coerce", false).endObject().endObject() .field("scaling_factor", 10.0).field("coerce", false).endObject().endObject()
.endObject().endObject().string(); .endObject().endObject());
DocumentMapper mapper2 = parser.parse("type", new CompressedXContent(mapping)); DocumentMapper mapper2 = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper2.mappingSource().toString()); assertEquals(mapping, mapper2.mappingSource().toString());
ThrowingRunnable runnable = () -> mapper2.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() ThrowingRunnable runnable = () -> mapper2.parse(SourceToParse.source("test", "type", "1", BytesReference
.startObject() .bytes(XContentFactory.jsonBuilder()
.field("field", "123") .startObject()
.endObject() .field("field", "123")
.bytes(), .endObject()),
XContentType.JSON)); XContentType.JSON));
MapperParsingException e = expectThrows(MapperParsingException.class, runnable); MapperParsingException e = expectThrows(MapperParsingException.class, runnable);
assertThat(e.getCause().getMessage(), containsString("passed as String")); assertThat(e.getCause().getMessage(), containsString("passed as String"));
@ -234,36 +236,36 @@ public class ScaledFloatFieldMapperTests extends ESSingleNodeTestCase {
} }
private void doTestIgnoreMalformed(String value, String exceptionMessageContains) throws Exception { private void doTestIgnoreMalformed(String value, String exceptionMessageContains) throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "scaled_float") .startObject("properties").startObject("field").field("type", "scaled_float")
.field("scaling_factor", 10.0).endObject().endObject() .field("scaling_factor", 10.0).endObject().endObject()
.endObject().endObject().string(); .endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString()); assertEquals(mapping, mapper.mappingSource().toString());
ThrowingRunnable runnable = () -> mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() ThrowingRunnable runnable = () -> mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.startObject() .bytes(XContentFactory.jsonBuilder()
.field("field", value) .startObject()
.endObject() .field("field", value)
.bytes(), .endObject()),
XContentType.JSON)); XContentType.JSON));
MapperParsingException e = expectThrows(MapperParsingException.class, runnable); MapperParsingException e = expectThrows(MapperParsingException.class, runnable);
assertThat(e.getCause().getMessage(), containsString(exceptionMessageContains)); assertThat(e.getCause().getMessage(), containsString(exceptionMessageContains));
mapping = XContentFactory.jsonBuilder().startObject().startObject("type") mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", "scaled_float") .startObject("properties").startObject("field").field("type", "scaled_float")
.field("scaling_factor", 10.0).field("ignore_malformed", true).endObject().endObject() .field("scaling_factor", 10.0).field("ignore_malformed", true).endObject().endObject()
.endObject().endObject().string(); .endObject().endObject());
DocumentMapper mapper2 = parser.parse("type", new CompressedXContent(mapping)); DocumentMapper mapper2 = parser.parse("type", new CompressedXContent(mapping));
ParsedDocument doc = mapper2.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() ParsedDocument doc = mapper2.parse(SourceToParse.source("test", "type", "1", BytesReference
.startObject() .bytes(XContentFactory.jsonBuilder()
.field("field", value) .startObject()
.endObject() .field("field", value)
.bytes(), .endObject()),
XContentType.JSON)); XContentType.JSON));
IndexableField[] fields = doc.rootDoc().getFields("field"); IndexableField[] fields = doc.rootDoc().getFields("field");
@ -271,7 +273,7 @@ public class ScaledFloatFieldMapperTests extends ESSingleNodeTestCase {
} }
public void testNullValue() throws IOException { public void testNullValue() throws IOException {
String mapping = XContentFactory.jsonBuilder().startObject() String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject()
.startObject("type") .startObject("type")
.startObject("properties") .startObject("properties")
.startObject("field") .startObject("field")
@ -279,20 +281,20 @@ public class ScaledFloatFieldMapperTests extends ESSingleNodeTestCase {
.field("scaling_factor", 10.0) .field("scaling_factor", 10.0)
.endObject() .endObject()
.endObject() .endObject()
.endObject().endObject().string(); .endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString()); assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.startObject() .bytes(XContentFactory.jsonBuilder()
.nullField("field") .startObject()
.endObject() .nullField("field")
.bytes(), .endObject()),
XContentType.JSON)); XContentType.JSON));
assertArrayEquals(new IndexableField[0], doc.rootDoc().getFields("field")); assertArrayEquals(new IndexableField[0], doc.rootDoc().getFields("field"));
mapping = XContentFactory.jsonBuilder().startObject() mapping = Strings.toString(XContentFactory.jsonBuilder().startObject()
.startObject("type") .startObject("type")
.startObject("properties") .startObject("properties")
.startObject("field") .startObject("field")
@ -301,16 +303,16 @@ public class ScaledFloatFieldMapperTests extends ESSingleNodeTestCase {
.field("null_value", 2.5) .field("null_value", 2.5)
.endObject() .endObject()
.endObject() .endObject()
.endObject().endObject().string(); .endObject().endObject());
mapper = parser.parse("type", new CompressedXContent(mapping)); mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString()); assertEquals(mapping, mapper.mappingSource().toString());
doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.startObject() .bytes(XContentFactory.jsonBuilder()
.nullField("field") .startObject()
.endObject() .nullField("field")
.bytes(), .endObject()),
XContentType.JSON)); XContentType.JSON));
IndexableField[] fields = doc.rootDoc().getFields("field"); IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(2, fields.length); assertEquals(2, fields.length);
@ -325,11 +327,11 @@ public class ScaledFloatFieldMapperTests extends ESSingleNodeTestCase {
public void testEmptyName() throws IOException { public void testEmptyName() throws IOException {
// after 5.x // after 5.x
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("") .startObject("properties").startObject("")
.field("type", "scaled_float") .field("type", "scaled_float")
.field("scaling_factor", 10.0).endObject().endObject() .field("scaling_factor", 10.0).endObject().endObject()
.endObject().endObject().string(); .endObject().endObject());
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> parser.parse("type", new CompressedXContent(mapping)) () -> parser.parse("type", new CompressedXContent(mapping))
@ -341,13 +343,13 @@ public class ScaledFloatFieldMapperTests extends ESSingleNodeTestCase {
* `index_options` was deprecated and is rejected as of 7.0 * `index_options` was deprecated and is rejected as of 7.0
*/ */
public void testRejectIndexOptions() throws IOException { public void testRejectIndexOptions() throws IOException {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties") .startObject("properties")
.startObject("foo") .startObject("foo")
.field("type", "scaled_float") .field("type", "scaled_float")
.field("index_options", randomFrom(new String[] { "docs", "freqs", "positions", "offsets" })) .field("index_options", randomFrom(new String[] { "docs", "freqs", "positions", "offsets" }))
.endObject() .endObject()
.endObject().endObject().endObject().string(); .endObject().endObject().endObject());
MapperParsingException e = expectThrows(MapperParsingException.class, () -> parser.parse("type", new CompressedXContent(mapping))); MapperParsingException e = expectThrows(MapperParsingException.class, () -> parser.parse("type", new CompressedXContent(mapping)));
assertThat(e.getMessage(), containsString("index_options not allowed in field [foo] of type [scaled_float]")); assertThat(e.getMessage(), containsString("index_options not allowed in field [foo] of type [scaled_float]"));
} }

@ -24,6 +24,7 @@ import org.apache.lucene.analysis.CannedTokenStream;
import org.apache.lucene.analysis.MockTokenizer; import org.apache.lucene.analysis.MockTokenizer;
import org.apache.lucene.analysis.Token; import org.apache.lucene.analysis.Token;
import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.TokenStream;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
@ -52,7 +53,7 @@ public class TokenCountFieldMapperTests extends ESSingleNodeTestCase {
} }
public void testMerge() throws IOException { public void testMerge() throws IOException {
String stage1Mapping = XContentFactory.jsonBuilder().startObject() String stage1Mapping = Strings.toString(XContentFactory.jsonBuilder().startObject()
.startObject("person") .startObject("person")
.startObject("properties") .startObject("properties")
.startObject("tc") .startObject("tc")
@ -60,12 +61,12 @@ public class TokenCountFieldMapperTests extends ESSingleNodeTestCase {
.field("analyzer", "keyword") .field("analyzer", "keyword")
.endObject() .endObject()
.endObject() .endObject()
.endObject().endObject().string(); .endObject().endObject());
MapperService mapperService = createIndex("test").mapperService(); MapperService mapperService = createIndex("test").mapperService();
DocumentMapper stage1 = mapperService.merge("person", DocumentMapper stage1 = mapperService.merge("person",
new CompressedXContent(stage1Mapping), MapperService.MergeReason.MAPPING_UPDATE); new CompressedXContent(stage1Mapping), MapperService.MergeReason.MAPPING_UPDATE);
String stage2Mapping = XContentFactory.jsonBuilder().startObject() String stage2Mapping = Strings.toString(XContentFactory.jsonBuilder().startObject()
.startObject("person") .startObject("person")
.startObject("properties") .startObject("properties")
.startObject("tc") .startObject("tc")
@ -73,7 +74,7 @@ public class TokenCountFieldMapperTests extends ESSingleNodeTestCase {
.field("analyzer", "standard") .field("analyzer", "standard")
.endObject() .endObject()
.endObject() .endObject()
.endObject().endObject().string(); .endObject().endObject());
DocumentMapper stage2 = mapperService.merge("person", DocumentMapper stage2 = mapperService.merge("person",
new CompressedXContent(stage2Mapping), MapperService.MergeReason.MAPPING_UPDATE); new CompressedXContent(stage2Mapping), MapperService.MergeReason.MAPPING_UPDATE);
@ -131,7 +132,7 @@ public class TokenCountFieldMapperTests extends ESSingleNodeTestCase {
public void testEmptyName() throws IOException { public void testEmptyName() throws IOException {
IndexService indexService = createIndex("test"); IndexService indexService = createIndex("test");
DocumentMapperParser parser = indexService.mapperService().documentMapperParser(); DocumentMapperParser parser = indexService.mapperService().documentMapperParser();
String mapping = XContentFactory.jsonBuilder().startObject() String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject()
.startObject("type") .startObject("type")
.startObject("properties") .startObject("properties")
.startObject("") .startObject("")
@ -139,7 +140,7 @@ public class TokenCountFieldMapperTests extends ESSingleNodeTestCase {
.field("analyzer", "standard") .field("analyzer", "standard")
.endObject() .endObject()
.endObject() .endObject()
.endObject().endObject().string(); .endObject().endObject());
// Empty name not allowed in index created after 5.0 // Empty name not allowed in index created after 5.0
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
@ -167,7 +168,7 @@ public class TokenCountFieldMapperTests extends ESSingleNodeTestCase {
} }
private DocumentMapper createIndexWithTokenCountField() throws IOException { private DocumentMapper createIndexWithTokenCountField() throws IOException {
final String content = XContentFactory.jsonBuilder().startObject() final String content = Strings.toString(XContentFactory.jsonBuilder().startObject()
.startObject("person") .startObject("person")
.startObject("properties") .startObject("properties")
.startObject("test") .startObject("test")
@ -180,16 +181,16 @@ public class TokenCountFieldMapperTests extends ESSingleNodeTestCase {
.endObject() .endObject()
.endObject() .endObject()
.endObject() .endObject()
.endObject().endObject().string(); .endObject().endObject());
return createIndex("test").mapperService().documentMapperParser().parse("person", new CompressedXContent(content)); return createIndex("test").mapperService().documentMapperParser().parse("person", new CompressedXContent(content));
} }
private SourceToParse createDocument(String fieldValue) throws Exception { private SourceToParse createDocument(String fieldValue) throws Exception {
BytesReference request = XContentFactory.jsonBuilder() BytesReference request = BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject() .startObject()
.field("test", fieldValue) .field("test", fieldValue)
.endObject().bytes(); .endObject());
return SourceToParse.source("test", "person", "1", request, XContentType.JSON); return SourceToParse.source("test", "person", "1", request, XContentType.JSON);
} }

@ -19,6 +19,8 @@
package org.elasticsearch.join.mapper; package org.elasticsearch.join.mapper;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
@ -45,7 +47,7 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
} }
public void testSingleLevel() throws Exception { public void testSingleLevel() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject() String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject()
.startObject("properties") .startObject("properties")
.startObject("join_field") .startObject("join_field")
.field("type", "join") .field("type", "join")
@ -54,7 +56,7 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
.endObject() .endObject()
.endObject() .endObject()
.endObject() .endObject()
.endObject().string(); .endObject());
IndexService service = createIndex("test"); IndexService service = createIndex("test");
DocumentMapper docMapper = service.mapperService().merge("type", new CompressedXContent(mapping), DocumentMapper docMapper = service.mapperService().merge("type", new CompressedXContent(mapping),
MapperService.MergeReason.MAPPING_UPDATE); MapperService.MergeReason.MAPPING_UPDATE);
@ -62,39 +64,39 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
// Doc without join // Doc without join
ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "0", ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "0",
XContentFactory.jsonBuilder().startObject().endObject().bytes(), XContentType.JSON)); BytesReference.bytes(XContentFactory.jsonBuilder().startObject().endObject()), XContentType.JSON));
assertNull(doc.rootDoc().getBinaryValue("join_field")); assertNull(doc.rootDoc().getBinaryValue("join_field"));
// Doc parent // Doc parent
doc = docMapper.parse(SourceToParse.source("test", "type", "1", doc = docMapper.parse(SourceToParse.source("test", "type", "1",
XContentFactory.jsonBuilder().startObject() BytesReference.bytes(XContentFactory.jsonBuilder().startObject()
.field("join_field", "parent") .field("join_field", "parent")
.endObject().bytes(), XContentType.JSON)); .endObject()), XContentType.JSON));
assertEquals("1", doc.rootDoc().getBinaryValue("join_field#parent").utf8ToString()); assertEquals("1", doc.rootDoc().getBinaryValue("join_field#parent").utf8ToString());
assertEquals("parent", doc.rootDoc().getBinaryValue("join_field").utf8ToString()); assertEquals("parent", doc.rootDoc().getBinaryValue("join_field").utf8ToString());
// Doc child // Doc child
doc = docMapper.parse(SourceToParse.source("test", "type", "2", doc = docMapper.parse(SourceToParse.source("test", "type", "2",
XContentFactory.jsonBuilder().startObject() BytesReference.bytes(XContentFactory.jsonBuilder().startObject()
.startObject("join_field") .startObject("join_field")
.field("name", "child") .field("name", "child")
.field("parent", "1") .field("parent", "1")
.endObject() .endObject()
.endObject().bytes(), XContentType.JSON).routing("1")); .endObject()), XContentType.JSON).routing("1"));
assertEquals("1", doc.rootDoc().getBinaryValue("join_field#parent").utf8ToString()); assertEquals("1", doc.rootDoc().getBinaryValue("join_field#parent").utf8ToString());
assertEquals("child", doc.rootDoc().getBinaryValue("join_field").utf8ToString()); assertEquals("child", doc.rootDoc().getBinaryValue("join_field").utf8ToString());
// Unkwnown join name // Unkwnown join name
MapperException exc = expectThrows(MapperParsingException.class, MapperException exc = expectThrows(MapperParsingException.class,
() -> docMapper.parse(SourceToParse.source("test", "type", "1", () -> docMapper.parse(SourceToParse.source("test", "type", "1",
XContentFactory.jsonBuilder().startObject() BytesReference.bytes(XContentFactory.jsonBuilder().startObject()
.field("join_field", "unknown") .field("join_field", "unknown")
.endObject().bytes(), XContentType.JSON))); .endObject()), XContentType.JSON)));
assertThat(exc.getRootCause().getMessage(), containsString("unknown join name [unknown] for field [join_field]")); assertThat(exc.getRootCause().getMessage(), containsString("unknown join name [unknown] for field [join_field]"));
} }
public void testParentIdSpecifiedAsNumber() throws Exception { public void testParentIdSpecifiedAsNumber() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject() String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject()
.startObject("properties") .startObject("properties")
.startObject("join_field") .startObject("join_field")
.field("type", "join") .field("type", "join")
@ -103,32 +105,32 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
.endObject() .endObject()
.endObject() .endObject()
.endObject() .endObject()
.endObject().string(); .endObject());
IndexService service = createIndex("test"); IndexService service = createIndex("test");
DocumentMapper docMapper = service.mapperService().merge("type", new CompressedXContent(mapping), DocumentMapper docMapper = service.mapperService().merge("type", new CompressedXContent(mapping),
MapperService.MergeReason.MAPPING_UPDATE); MapperService.MergeReason.MAPPING_UPDATE);
ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "2", ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "2",
XContentFactory.jsonBuilder().startObject() BytesReference.bytes(XContentFactory.jsonBuilder().startObject()
.startObject("join_field") .startObject("join_field")
.field("name", "child") .field("name", "child")
.field("parent", 1) .field("parent", 1)
.endObject() .endObject()
.endObject().bytes(), XContentType.JSON).routing("1")); .endObject()), XContentType.JSON).routing("1"));
assertEquals("1", doc.rootDoc().getBinaryValue("join_field#parent").utf8ToString()); assertEquals("1", doc.rootDoc().getBinaryValue("join_field#parent").utf8ToString());
assertEquals("child", doc.rootDoc().getBinaryValue("join_field").utf8ToString()); assertEquals("child", doc.rootDoc().getBinaryValue("join_field").utf8ToString());
doc = docMapper.parse(SourceToParse.source("test", "type", "2", doc = docMapper.parse(SourceToParse.source("test", "type", "2",
XContentFactory.jsonBuilder().startObject() BytesReference.bytes(XContentFactory.jsonBuilder().startObject()
.startObject("join_field") .startObject("join_field")
.field("name", "child") .field("name", "child")
.field("parent", 1.0) .field("parent", 1.0)
.endObject() .endObject()
.endObject().bytes(), XContentType.JSON).routing("1")); .endObject()), XContentType.JSON).routing("1"));
assertEquals("1.0", doc.rootDoc().getBinaryValue("join_field#parent").utf8ToString()); assertEquals("1.0", doc.rootDoc().getBinaryValue("join_field#parent").utf8ToString());
assertEquals("child", doc.rootDoc().getBinaryValue("join_field").utf8ToString()); assertEquals("child", doc.rootDoc().getBinaryValue("join_field").utf8ToString());
} }
public void testMultipleLevels() throws Exception { public void testMultipleLevels() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject() String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject()
.startObject("properties") .startObject("properties")
.startObject("join_field") .startObject("join_field")
.field("type", "join") .field("type", "join")
@ -138,7 +140,7 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
.endObject() .endObject()
.endObject() .endObject()
.endObject() .endObject()
.endObject().string(); .endObject());
IndexService service = createIndex("test"); IndexService service = createIndex("test");
DocumentMapper docMapper = service.mapperService().merge("type", new CompressedXContent(mapping), DocumentMapper docMapper = service.mapperService().merge("type", new CompressedXContent(mapping),
MapperService.MergeReason.MAPPING_UPDATE); MapperService.MergeReason.MAPPING_UPDATE);
@ -146,26 +148,26 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
// Doc without join // Doc without join
ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "0", ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "0",
XContentFactory.jsonBuilder().startObject().endObject().bytes(), XContentType.JSON)); BytesReference.bytes(XContentFactory.jsonBuilder().startObject().endObject()), XContentType.JSON));
assertNull(doc.rootDoc().getBinaryValue("join_field")); assertNull(doc.rootDoc().getBinaryValue("join_field"));
// Doc parent // Doc parent
doc = docMapper.parse(SourceToParse.source("test", "type", "1", doc = docMapper.parse(SourceToParse.source("test", "type", "1",
XContentFactory.jsonBuilder() BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject() .startObject()
.field("join_field", "parent") .field("join_field", "parent")
.endObject().bytes(), XContentType.JSON)); .endObject()), XContentType.JSON));
assertEquals("1", doc.rootDoc().getBinaryValue("join_field#parent").utf8ToString()); assertEquals("1", doc.rootDoc().getBinaryValue("join_field#parent").utf8ToString());
assertEquals("parent", doc.rootDoc().getBinaryValue("join_field").utf8ToString()); assertEquals("parent", doc.rootDoc().getBinaryValue("join_field").utf8ToString());
// Doc child // Doc child
doc = docMapper.parse(SourceToParse.source("test", "type", "2", doc = docMapper.parse(SourceToParse.source("test", "type", "2",
XContentFactory.jsonBuilder().startObject() BytesReference.bytes(XContentFactory.jsonBuilder().startObject()
.startObject("join_field") .startObject("join_field")
.field("name", "child") .field("name", "child")
.field("parent", "1") .field("parent", "1")
.endObject() .endObject()
.endObject().bytes(), XContentType.JSON).routing("1")); .endObject()), XContentType.JSON).routing("1"));
assertEquals("1", doc.rootDoc().getBinaryValue("join_field#parent").utf8ToString()); assertEquals("1", doc.rootDoc().getBinaryValue("join_field#parent").utf8ToString());
assertEquals("2", doc.rootDoc().getBinaryValue("join_field#child").utf8ToString()); assertEquals("2", doc.rootDoc().getBinaryValue("join_field#child").utf8ToString());
assertEquals("child", doc.rootDoc().getBinaryValue("join_field").utf8ToString()); assertEquals("child", doc.rootDoc().getBinaryValue("join_field").utf8ToString());
@ -173,44 +175,44 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
// Doc child missing parent // Doc child missing parent
MapperException exc = expectThrows(MapperParsingException.class, MapperException exc = expectThrows(MapperParsingException.class,
() -> docMapper.parse(SourceToParse.source("test", "type", "2", () -> docMapper.parse(SourceToParse.source("test", "type", "2",
XContentFactory.jsonBuilder().startObject() BytesReference.bytes(XContentFactory.jsonBuilder().startObject()
.field("join_field", "child") .field("join_field", "child")
.endObject().bytes(), XContentType.JSON).routing("1"))); .endObject()), XContentType.JSON).routing("1")));
assertThat(exc.getRootCause().getMessage(), containsString("[parent] is missing for join field [join_field]")); assertThat(exc.getRootCause().getMessage(), containsString("[parent] is missing for join field [join_field]"));
// Doc child missing routing // Doc child missing routing
exc = expectThrows(MapperParsingException.class, exc = expectThrows(MapperParsingException.class,
() -> docMapper.parse(SourceToParse.source("test", "type", "2", () -> docMapper.parse(SourceToParse.source("test", "type", "2",
XContentFactory.jsonBuilder().startObject() BytesReference.bytes(XContentFactory.jsonBuilder().startObject()
.startObject("join_field") .startObject("join_field")
.field("name", "child") .field("name", "child")
.field("parent", "1") .field("parent", "1")
.endObject() .endObject()
.endObject().bytes(), XContentType.JSON))); .endObject()), XContentType.JSON)));
assertThat(exc.getRootCause().getMessage(), containsString("[routing] is missing for join field [join_field]")); assertThat(exc.getRootCause().getMessage(), containsString("[routing] is missing for join field [join_field]"));
// Doc grand_child // Doc grand_child
doc = docMapper.parse(SourceToParse.source("test", "type", "3", doc = docMapper.parse(SourceToParse.source("test", "type", "3",
XContentFactory.jsonBuilder().startObject() BytesReference.bytes(XContentFactory.jsonBuilder().startObject()
.startObject("join_field") .startObject("join_field")
.field("name", "grand_child") .field("name", "grand_child")
.field("parent", "2") .field("parent", "2")
.endObject() .endObject()
.endObject().bytes(), XContentType.JSON).routing("1")); .endObject()), XContentType.JSON).routing("1"));
assertEquals("2", doc.rootDoc().getBinaryValue("join_field#child").utf8ToString()); assertEquals("2", doc.rootDoc().getBinaryValue("join_field#child").utf8ToString());
assertEquals("grand_child", doc.rootDoc().getBinaryValue("join_field").utf8ToString()); assertEquals("grand_child", doc.rootDoc().getBinaryValue("join_field").utf8ToString());
// Unkwnown join name // Unkwnown join name
exc = expectThrows(MapperParsingException.class, exc = expectThrows(MapperParsingException.class,
() -> docMapper.parse(SourceToParse.source("test", "type", "1", () -> docMapper.parse(SourceToParse.source("test", "type", "1",
XContentFactory.jsonBuilder().startObject() BytesReference.bytes(XContentFactory.jsonBuilder().startObject()
.field("join_field", "unknown") .field("join_field", "unknown")
.endObject().bytes(), XContentType.JSON))); .endObject()), XContentType.JSON)));
assertThat(exc.getRootCause().getMessage(), containsString("unknown join name [unknown] for field [join_field]")); assertThat(exc.getRootCause().getMessage(), containsString("unknown join name [unknown] for field [join_field]"));
} }
public void testUpdateRelations() throws Exception { public void testUpdateRelations() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("properties") String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties")
.startObject("join_field") .startObject("join_field")
.field("type", "join") .field("type", "join")
.startObject("relations") .startObject("relations")
@ -218,21 +220,21 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
.array("child", "grand_child1", "grand_child2") .array("child", "grand_child1", "grand_child2")
.endObject() .endObject()
.endObject() .endObject()
.endObject().endObject().string(); .endObject().endObject());
IndexService indexService = createIndex("test"); IndexService indexService = createIndex("test");
DocumentMapper docMapper = indexService.mapperService().merge("type", new CompressedXContent(mapping), DocumentMapper docMapper = indexService.mapperService().merge("type", new CompressedXContent(mapping),
MapperService.MergeReason.MAPPING_UPDATE); MapperService.MergeReason.MAPPING_UPDATE);
assertTrue(docMapper.mappers().getMapper("join_field") == ParentJoinFieldMapper.getMapper(indexService.mapperService())); assertTrue(docMapper.mappers().getMapper("join_field") == ParentJoinFieldMapper.getMapper(indexService.mapperService()));
{ {
final String updateMapping = XContentFactory.jsonBuilder().startObject().startObject("properties") final String updateMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties")
.startObject("join_field") .startObject("join_field")
.field("type", "join") .field("type", "join")
.startObject("relations") .startObject("relations")
.array("child", "grand_child1", "grand_child2") .array("child", "grand_child1", "grand_child2")
.endObject() .endObject()
.endObject() .endObject()
.endObject().endObject().string(); .endObject().endObject());
IllegalStateException exc = expectThrows(IllegalStateException.class, IllegalStateException exc = expectThrows(IllegalStateException.class,
() -> indexService.mapperService().merge("type", new CompressedXContent(updateMapping), () -> indexService.mapperService().merge("type", new CompressedXContent(updateMapping),
MapperService.MergeReason.MAPPING_UPDATE)); MapperService.MergeReason.MAPPING_UPDATE));
@ -240,7 +242,7 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
} }
{ {
final String updateMapping = XContentFactory.jsonBuilder().startObject().startObject("properties") final String updateMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties")
.startObject("join_field") .startObject("join_field")
.field("type", "join") .field("type", "join")
.startObject("relations") .startObject("relations")
@ -248,7 +250,7 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
.field("child", "grand_child1") .field("child", "grand_child1")
.endObject() .endObject()
.endObject() .endObject()
.endObject().endObject().string(); .endObject().endObject());
IllegalStateException exc = expectThrows(IllegalStateException.class, IllegalStateException exc = expectThrows(IllegalStateException.class,
() -> indexService.mapperService().merge("type", new CompressedXContent(updateMapping), () -> indexService.mapperService().merge("type", new CompressedXContent(updateMapping),
MapperService.MergeReason.MAPPING_UPDATE)); MapperService.MergeReason.MAPPING_UPDATE));
@ -256,7 +258,7 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
} }
{ {
final String updateMapping = XContentFactory.jsonBuilder().startObject().startObject("properties") final String updateMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties")
.startObject("join_field") .startObject("join_field")
.field("type", "join") .field("type", "join")
.startObject("relations") .startObject("relations")
@ -265,7 +267,7 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
.array("child", "grand_child1", "grand_child2") .array("child", "grand_child1", "grand_child2")
.endObject() .endObject()
.endObject() .endObject()
.endObject().endObject().string(); .endObject().endObject());
IllegalStateException exc = expectThrows(IllegalStateException.class, IllegalStateException exc = expectThrows(IllegalStateException.class,
() -> indexService.mapperService().merge("type", new CompressedXContent(updateMapping), () -> indexService.mapperService().merge("type", new CompressedXContent(updateMapping),
MapperService.MergeReason.MAPPING_UPDATE)); MapperService.MergeReason.MAPPING_UPDATE));
@ -273,7 +275,7 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
} }
{ {
final String updateMapping = XContentFactory.jsonBuilder().startObject().startObject("properties") final String updateMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties")
.startObject("join_field") .startObject("join_field")
.field("type", "join") .field("type", "join")
.startObject("relations") .startObject("relations")
@ -282,7 +284,7 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
.field("grand_child2", "grand_grand_child") .field("grand_child2", "grand_grand_child")
.endObject() .endObject()
.endObject() .endObject()
.endObject().endObject().string(); .endObject().endObject());
IllegalStateException exc = expectThrows(IllegalStateException.class, IllegalStateException exc = expectThrows(IllegalStateException.class,
() -> indexService.mapperService().merge("type", new CompressedXContent(updateMapping), () -> indexService.mapperService().merge("type", new CompressedXContent(updateMapping),
MapperService.MergeReason.MAPPING_UPDATE)); MapperService.MergeReason.MAPPING_UPDATE));
@ -290,7 +292,7 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
} }
{ {
final String updateMapping = XContentFactory.jsonBuilder().startObject().startObject("properties") final String updateMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties")
.startObject("join_field") .startObject("join_field")
.field("type", "join") .field("type", "join")
.startObject("relations") .startObject("relations")
@ -298,7 +300,7 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
.array("child", "grand_child1", "grand_child2") .array("child", "grand_child1", "grand_child2")
.endObject() .endObject()
.endObject() .endObject()
.endObject().endObject().string(); .endObject().endObject());
docMapper = indexService.mapperService().merge("type", new CompressedXContent(updateMapping), docMapper = indexService.mapperService().merge("type", new CompressedXContent(updateMapping),
MapperService.MergeReason.MAPPING_UPDATE); MapperService.MergeReason.MAPPING_UPDATE);
assertTrue(docMapper.mappers().getMapper("join_field") == ParentJoinFieldMapper.getMapper(indexService.mapperService())); assertTrue(docMapper.mappers().getMapper("join_field") == ParentJoinFieldMapper.getMapper(indexService.mapperService()));
@ -310,7 +312,7 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
} }
{ {
final String updateMapping = XContentFactory.jsonBuilder().startObject().startObject("properties") final String updateMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties")
.startObject("join_field") .startObject("join_field")
.field("type", "join") .field("type", "join")
.startObject("relations") .startObject("relations")
@ -319,7 +321,7 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
.array("other", "child_other1", "child_other2") .array("other", "child_other1", "child_other2")
.endObject() .endObject()
.endObject() .endObject()
.endObject().endObject().string(); .endObject().endObject());
docMapper = indexService.mapperService().merge("type", new CompressedXContent(updateMapping), docMapper = indexService.mapperService().merge("type", new CompressedXContent(updateMapping),
MapperService.MergeReason.MAPPING_UPDATE); MapperService.MergeReason.MAPPING_UPDATE);
assertTrue(docMapper.mappers().getMapper("join_field") == ParentJoinFieldMapper.getMapper(indexService.mapperService())); assertTrue(docMapper.mappers().getMapper("join_field") == ParentJoinFieldMapper.getMapper(indexService.mapperService()));
@ -334,7 +336,7 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
} }
public void testInvalidJoinFieldInsideObject() throws Exception { public void testInvalidJoinFieldInsideObject() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("properties") String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties")
.startObject("object") .startObject("object")
.startObject("properties") .startObject("properties")
.startObject("join_field") .startObject("join_field")
@ -345,7 +347,7 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
.endObject() .endObject()
.endObject() .endObject()
.endObject() .endObject()
.endObject().endObject().string(); .endObject().endObject());
IndexService indexService = createIndex("test"); IndexService indexService = createIndex("test");
MapperParsingException exc = expectThrows(MapperParsingException.class, MapperParsingException exc = expectThrows(MapperParsingException.class,
() -> indexService.mapperService().merge("type", new CompressedXContent(mapping), () -> indexService.mapperService().merge("type", new CompressedXContent(mapping),
@ -355,7 +357,7 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
} }
public void testInvalidJoinFieldInsideMultiFields() throws Exception { public void testInvalidJoinFieldInsideMultiFields() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("properties") String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("properties")
.startObject("number") .startObject("number")
.field("type", "integer") .field("type", "integer")
.startObject("fields") .startObject("fields")
@ -367,7 +369,7 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
.endObject() .endObject()
.endObject() .endObject()
.endObject() .endObject()
.endObject().endObject().string(); .endObject().endObject());
IndexService indexService = createIndex("test"); IndexService indexService = createIndex("test");
MapperParsingException exc = expectThrows(MapperParsingException.class, MapperParsingException exc = expectThrows(MapperParsingException.class,
() -> indexService.mapperService().merge("type", new CompressedXContent(mapping), () -> indexService.mapperService().merge("type", new CompressedXContent(mapping),
@ -379,7 +381,7 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
public void testMultipleJoinFields() throws Exception { public void testMultipleJoinFields() throws Exception {
IndexService indexService = createIndex("test"); IndexService indexService = createIndex("test");
{ {
String mapping = XContentFactory.jsonBuilder().startObject() String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject()
.startObject("properties") .startObject("properties")
.startObject("join_field") .startObject("join_field")
.field("type", "join") .field("type", "join")
@ -395,14 +397,14 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
.endObject() .endObject()
.endObject() .endObject()
.endObject() .endObject()
.endObject().string(); .endObject());
IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, () -> indexService.mapperService().merge("type", IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, () -> indexService.mapperService().merge("type",
new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE)); new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE));
assertThat(exc.getMessage(), containsString("Field [_parent_join] is defined twice in [type]")); assertThat(exc.getMessage(), containsString("Field [_parent_join] is defined twice in [type]"));
} }
{ {
String mapping = XContentFactory.jsonBuilder().startObject() String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject()
.startObject("properties") .startObject("properties")
.startObject("join_field") .startObject("join_field")
.field("type", "join") .field("type", "join")
@ -412,16 +414,16 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
.endObject() .endObject()
.endObject() .endObject()
.endObject() .endObject()
.endObject().string(); .endObject());
indexService.mapperService().merge("type", indexService.mapperService().merge("type",
new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE); new CompressedXContent(mapping), MapperService.MergeReason.MAPPING_UPDATE);
String updateMapping = XContentFactory.jsonBuilder().startObject() String updateMapping = Strings.toString(XContentFactory.jsonBuilder().startObject()
.startObject("properties") .startObject("properties")
.startObject("another_join_field") .startObject("another_join_field")
.field("type", "join") .field("type", "join")
.endObject() .endObject()
.endObject() .endObject()
.endObject().string(); .endObject());
IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, () -> indexService.mapperService().merge("type", IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, () -> indexService.mapperService().merge("type",
new CompressedXContent(updateMapping), MapperService.MergeReason.MAPPING_UPDATE)); new CompressedXContent(updateMapping), MapperService.MergeReason.MAPPING_UPDATE));
assertThat(exc.getMessage(), containsString("Field [_parent_join] is defined twice in [type]")); assertThat(exc.getMessage(), containsString("Field [_parent_join] is defined twice in [type]"));
@ -429,7 +431,7 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
} }
public void testEagerGlobalOrdinals() throws Exception { public void testEagerGlobalOrdinals() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject() String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject()
.startObject("properties") .startObject("properties")
.startObject("join_field") .startObject("join_field")
.field("type", "join") .field("type", "join")
@ -439,7 +441,7 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
.endObject() .endObject()
.endObject() .endObject()
.endObject() .endObject()
.endObject().string(); .endObject());
IndexService service = createIndex("test"); IndexService service = createIndex("test");
DocumentMapper docMapper = service.mapperService().merge("type", new CompressedXContent(mapping), DocumentMapper docMapper = service.mapperService().merge("type", new CompressedXContent(mapping),
MapperService.MergeReason.MAPPING_UPDATE); MapperService.MergeReason.MAPPING_UPDATE);
@ -450,7 +452,7 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
assertNotNull(service.mapperService().fullName("join_field#child")); assertNotNull(service.mapperService().fullName("join_field#child"));
assertTrue(service.mapperService().fullName("join_field#child").eagerGlobalOrdinals()); assertTrue(service.mapperService().fullName("join_field#child").eagerGlobalOrdinals());
mapping = XContentFactory.jsonBuilder().startObject() mapping = Strings.toString(XContentFactory.jsonBuilder().startObject()
.startObject("properties") .startObject("properties")
.startObject("join_field") .startObject("join_field")
.field("type", "join") .field("type", "join")
@ -461,7 +463,7 @@ public class ParentJoinFieldMapperTests extends ESSingleNodeTestCase {
.endObject() .endObject()
.endObject() .endObject()
.endObject() .endObject()
.endObject().string(); .endObject());
service.mapperService().merge("type", new CompressedXContent(mapping), service.mapperService().merge("type", new CompressedXContent(mapping),
MapperService.MergeReason.MAPPING_UPDATE); MapperService.MergeReason.MAPPING_UPDATE);
assertFalse(service.mapperService().fullName("join_field").eagerGlobalOrdinals()); assertFalse(service.mapperService().fullName("join_field").eagerGlobalOrdinals());

@ -33,6 +33,7 @@ import org.apache.lucene.search.similarities.PerFieldSimilarityWrapper;
import org.apache.lucene.search.similarities.Similarity; import org.apache.lucene.search.similarities.Similarity;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
@ -132,7 +133,7 @@ public class HasChildQueryBuilderTests extends AbstractQueryTestCase<HasChildQue
.endObject().endObject().endObject(); .endObject().endObject().endObject();
mapperService.merge(TYPE, mapperService.merge(TYPE,
new CompressedXContent(mapping.string()), MapperService.MergeReason.MAPPING_UPDATE); new CompressedXContent(Strings.toString(mapping)), MapperService.MergeReason.MAPPING_UPDATE);
} }
/** /**

@ -24,6 +24,7 @@ import org.apache.lucene.search.Query;
import org.apache.lucene.search.join.ScoreMode; import org.apache.lucene.search.join.ScoreMode;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
@ -112,7 +113,7 @@ public class HasParentQueryBuilderTests extends AbstractQueryTestCase<HasParentQ
.endObject().endObject().endObject(); .endObject().endObject().endObject();
mapperService.merge(TYPE, mapperService.merge(TYPE,
new CompressedXContent(mapping.string()), MapperService.MergeReason.MAPPING_UPDATE); new CompressedXContent(Strings.toString(mapping)), MapperService.MergeReason.MAPPING_UPDATE);
} }
/** /**

@ -34,6 +34,7 @@ import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
@ -89,7 +90,7 @@ public class LegacyHasChildQueryBuilderTests extends AbstractQueryTestCase<HasCh
similarity = randomFrom("classic", "BM25"); similarity = randomFrom("classic", "BM25");
// TODO: use a single type when inner hits have been changed to work with join field, // TODO: use a single type when inner hits have been changed to work with join field,
// this test randomly generates queries with inner hits // this test randomly generates queries with inner hits
mapperService.merge(PARENT_TYPE, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(PARENT_TYPE, mapperService.merge(PARENT_TYPE, new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef(PARENT_TYPE,
STRING_FIELD_NAME, "type=text", STRING_FIELD_NAME, "type=text",
STRING_FIELD_NAME_2, "type=keyword", STRING_FIELD_NAME_2, "type=keyword",
INT_FIELD_NAME, "type=integer", INT_FIELD_NAME, "type=integer",
@ -97,8 +98,8 @@ public class LegacyHasChildQueryBuilderTests extends AbstractQueryTestCase<HasCh
BOOLEAN_FIELD_NAME, "type=boolean", BOOLEAN_FIELD_NAME, "type=boolean",
DATE_FIELD_NAME, "type=date", DATE_FIELD_NAME, "type=date",
OBJECT_FIELD_NAME, "type=object" OBJECT_FIELD_NAME, "type=object"
).string()), MapperService.MergeReason.MAPPING_UPDATE); ))), MapperService.MergeReason.MAPPING_UPDATE);
mapperService.merge(CHILD_TYPE, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(CHILD_TYPE, mapperService.merge(CHILD_TYPE, new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef(CHILD_TYPE,
"_parent", "type=" + PARENT_TYPE, "_parent", "type=" + PARENT_TYPE,
STRING_FIELD_NAME, "type=text", STRING_FIELD_NAME, "type=text",
"custom_string", "type=text,similarity=" + similarity, "custom_string", "type=text,similarity=" + similarity,
@ -107,7 +108,7 @@ public class LegacyHasChildQueryBuilderTests extends AbstractQueryTestCase<HasCh
BOOLEAN_FIELD_NAME, "type=boolean", BOOLEAN_FIELD_NAME, "type=boolean",
DATE_FIELD_NAME, "type=date", DATE_FIELD_NAME, "type=date",
OBJECT_FIELD_NAME, "type=object" OBJECT_FIELD_NAME, "type=object"
).string()), MapperService.MergeReason.MAPPING_UPDATE); ))), MapperService.MergeReason.MAPPING_UPDATE);
} }
@Override @Override

@ -24,6 +24,7 @@ import org.apache.lucene.search.Query;
import org.apache.lucene.search.join.ScoreMode; import org.apache.lucene.search.join.ScoreMode;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
@ -80,7 +81,7 @@ public class LegacyHasParentQueryBuilderTests extends AbstractQueryTestCase<HasP
protected void initializeAdditionalMappings(MapperService mapperService) throws IOException { protected void initializeAdditionalMappings(MapperService mapperService) throws IOException {
// TODO: use a single type when inner hits have been changed to work with join field, // TODO: use a single type when inner hits have been changed to work with join field,
// this test randomly generates queries with inner hits // this test randomly generates queries with inner hits
mapperService.merge(PARENT_TYPE, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(PARENT_TYPE, mapperService.merge(PARENT_TYPE, new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef(PARENT_TYPE,
STRING_FIELD_NAME, "type=text", STRING_FIELD_NAME, "type=text",
STRING_FIELD_NAME_2, "type=keyword", STRING_FIELD_NAME_2, "type=keyword",
INT_FIELD_NAME, "type=integer", INT_FIELD_NAME, "type=integer",
@ -88,8 +89,8 @@ public class LegacyHasParentQueryBuilderTests extends AbstractQueryTestCase<HasP
BOOLEAN_FIELD_NAME, "type=boolean", BOOLEAN_FIELD_NAME, "type=boolean",
DATE_FIELD_NAME, "type=date", DATE_FIELD_NAME, "type=date",
OBJECT_FIELD_NAME, "type=object" OBJECT_FIELD_NAME, "type=object"
).string()), MapperService.MergeReason.MAPPING_UPDATE); ))), MapperService.MergeReason.MAPPING_UPDATE);
mapperService.merge(CHILD_TYPE, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(CHILD_TYPE, mapperService.merge(CHILD_TYPE, new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef(CHILD_TYPE,
"_parent", "type=" + PARENT_TYPE, "_parent", "type=" + PARENT_TYPE,
STRING_FIELD_NAME, "type=text", STRING_FIELD_NAME, "type=text",
STRING_FIELD_NAME_2, "type=keyword", STRING_FIELD_NAME_2, "type=keyword",
@ -98,9 +99,9 @@ public class LegacyHasParentQueryBuilderTests extends AbstractQueryTestCase<HasP
BOOLEAN_FIELD_NAME, "type=boolean", BOOLEAN_FIELD_NAME, "type=boolean",
DATE_FIELD_NAME, "type=date", DATE_FIELD_NAME, "type=date",
OBJECT_FIELD_NAME, "type=object" OBJECT_FIELD_NAME, "type=object"
).string()), MapperService.MergeReason.MAPPING_UPDATE); ))), MapperService.MergeReason.MAPPING_UPDATE);
mapperService.merge("just_a_type", new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef("just_a_type" mapperService.merge("just_a_type", new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef("just_a_type"
).string()), MapperService.MergeReason.MAPPING_UPDATE); ))), MapperService.MergeReason.MAPPING_UPDATE);
} }
/** /**

@ -26,6 +26,7 @@ import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TermQuery;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
@ -65,15 +66,15 @@ public class LegacyParentIdQueryBuilderTests extends AbstractQueryTestCase<Paren
@Override @Override
protected void initializeAdditionalMappings(MapperService mapperService) throws IOException { protected void initializeAdditionalMappings(MapperService mapperService) throws IOException {
mapperService.merge(PARENT_TYPE, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(PARENT_TYPE, mapperService.merge(PARENT_TYPE, new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef(PARENT_TYPE,
STRING_FIELD_NAME, "type=text", STRING_FIELD_NAME, "type=text",
INT_FIELD_NAME, "type=integer", INT_FIELD_NAME, "type=integer",
DOUBLE_FIELD_NAME, "type=double", DOUBLE_FIELD_NAME, "type=double",
BOOLEAN_FIELD_NAME, "type=boolean", BOOLEAN_FIELD_NAME, "type=boolean",
DATE_FIELD_NAME, "type=date", DATE_FIELD_NAME, "type=date",
OBJECT_FIELD_NAME, "type=object" OBJECT_FIELD_NAME, "type=object"
).string()), MapperService.MergeReason.MAPPING_UPDATE); ))), MapperService.MergeReason.MAPPING_UPDATE);
mapperService.merge(CHILD_TYPE, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(CHILD_TYPE, mapperService.merge(CHILD_TYPE, new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef(CHILD_TYPE,
"_parent", "type=" + PARENT_TYPE, "_parent", "type=" + PARENT_TYPE,
STRING_FIELD_NAME, "type=text", STRING_FIELD_NAME, "type=text",
INT_FIELD_NAME, "type=integer", INT_FIELD_NAME, "type=integer",
@ -81,7 +82,7 @@ public class LegacyParentIdQueryBuilderTests extends AbstractQueryTestCase<Paren
BOOLEAN_FIELD_NAME, "type=boolean", BOOLEAN_FIELD_NAME, "type=boolean",
DATE_FIELD_NAME, "type=date", DATE_FIELD_NAME, "type=date",
OBJECT_FIELD_NAME, "type=object" OBJECT_FIELD_NAME, "type=object"
).string()), MapperService.MergeReason.MAPPING_UPDATE); ))), MapperService.MergeReason.MAPPING_UPDATE);
} }
@Override @Override

@ -20,6 +20,7 @@ package org.elasticsearch.join.query;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.index.IndexRequestBuilder;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentHelper;
@ -83,7 +84,7 @@ public abstract class ParentChildTestCase extends ESIntegTestCase {
protected IndexRequestBuilder createIndexRequest(String index, String type, String id, String parentId, protected IndexRequestBuilder createIndexRequest(String index, String type, String id, String parentId,
XContentBuilder builder) throws IOException { XContentBuilder builder) throws IOException {
Map<String, Object> source = XContentHelper.convertToMap(JsonXContent.jsonXContent, builder.string(), false); Map<String, Object> source = XContentHelper.convertToMap(JsonXContent.jsonXContent, Strings.toString(builder), false);
return createIndexRequest(index, type, id, parentId, source); return createIndexRequest(index, type, id, parentId, source);
} }

@ -22,18 +22,16 @@ package org.elasticsearch.join.query;
import org.apache.lucene.index.Term; import org.apache.lucene.index.Term;
import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.BooleanQuery;
import org.apache.lucene.search.DocValuesTermsQuery;
import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.MatchNoDocsQuery;
import org.apache.lucene.search.Query; import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TermQuery;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.TypeFieldMapper;
import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.index.query.QueryShardException;
import org.elasticsearch.join.ParentJoinPlugin; import org.elasticsearch.join.ParentJoinPlugin;
import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.Plugin;
@ -104,7 +102,7 @@ public class ParentIdQueryBuilderTests extends AbstractQueryTestCase<ParentIdQue
.endObject().endObject().endObject(); .endObject().endObject().endObject();
mapperService.merge(TYPE, mapperService.merge(TYPE,
new CompressedXContent(mapping.string()), MapperService.MergeReason.MAPPING_UPDATE); new CompressedXContent(Strings.toString(mapping)), MapperService.MergeReason.MAPPING_UPDATE);
} }
@Override @Override

@ -416,7 +416,7 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder<PercolateQueryBu
try (XContentBuilder builder = XContentFactory.jsonBuilder()) { try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
builder.copyCurrentStructure(parser); builder.copyCurrentStructure(parser);
builder.flush(); builder.flush();
documents.add(builder.bytes()); documents.add(BytesReference.bytes(builder));
} }
} else { } else {
throw new ParsingException(parser.getTokenLocation(), "[" + PercolateQueryBuilder.NAME + throw new ParsingException(parser.getTokenLocation(), "[" + PercolateQueryBuilder.NAME +
@ -437,7 +437,7 @@ public class PercolateQueryBuilder extends AbstractQueryBuilder<PercolateQueryBu
try (XContentBuilder builder = XContentFactory.jsonBuilder()) { try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
builder.copyCurrentStructure(parser); builder.copyCurrentStructure(parser);
builder.flush(); builder.flush();
documents.add(builder.bytes()); documents.add(BytesReference.bytes(builder));
} }
} else { } else {
throw new ParsingException(parser.getTokenLocation(), "[" + PercolateQueryBuilder.NAME + throw new ParsingException(parser.getTokenLocation(), "[" + PercolateQueryBuilder.NAME +

@ -422,7 +422,7 @@ public class PercolatorFieldMapper extends FieldMapper {
try (XContentBuilder builder = XContentFactory.contentBuilder(QUERY_BUILDER_CONTENT_TYPE)) { try (XContentBuilder builder = XContentFactory.contentBuilder(QUERY_BUILDER_CONTENT_TYPE)) {
queryBuilder.toXContent(builder, new MapParams(Collections.emptyMap())); queryBuilder.toXContent(builder, new MapParams(Collections.emptyMap()));
builder.flush(); builder.flush();
byte[] queryBuilderAsBytes = BytesReference.toBytes(builder.bytes()); byte[] queryBuilderAsBytes = BytesReference.toBytes(BytesReference.bytes(builder));
context.doc().add(new Field(qbField.name(), queryBuilderAsBytes, qbField.fieldType())); context.doc().add(new Field(qbField.name(), queryBuilderAsBytes, qbField.fieldType()));
} }
} }

@ -75,6 +75,7 @@ import org.apache.lucene.store.RAMDirectory;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.CheckedFunction;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressedXContent;
@ -140,7 +141,7 @@ public class CandidateQueryTests extends ESSingleNodeTestCase {
IndexService indexService = createIndex(indexName, Settings.EMPTY); IndexService indexService = createIndex(indexName, Settings.EMPTY);
mapperService = indexService.mapperService(); mapperService = indexService.mapperService();
String mapper = XContentFactory.jsonBuilder().startObject().startObject("type") String mapper = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties") .startObject("properties")
.startObject("int_field").field("type", "integer").endObject() .startObject("int_field").field("type", "integer").endObject()
.startObject("long_field").field("type", "long").endObject() .startObject("long_field").field("type", "long").endObject()
@ -149,13 +150,13 @@ public class CandidateQueryTests extends ESSingleNodeTestCase {
.startObject("double_field").field("type", "double").endObject() .startObject("double_field").field("type", "double").endObject()
.startObject("ip_field").field("type", "ip").endObject() .startObject("ip_field").field("type", "ip").endObject()
.startObject("field").field("type", "keyword").endObject() .startObject("field").field("type", "keyword").endObject()
.endObject().endObject().endObject().string(); .endObject().endObject().endObject());
documentMapper = mapperService.merge("type", new CompressedXContent(mapper), MapperService.MergeReason.MAPPING_UPDATE); documentMapper = mapperService.merge("type", new CompressedXContent(mapper), MapperService.MergeReason.MAPPING_UPDATE);
String queryField = "query_field"; String queryField = "query_field";
String percolatorMapper = XContentFactory.jsonBuilder().startObject().startObject("type") String percolatorMapper = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject(queryField).field("type", "percolator").endObject().endObject() .startObject("properties").startObject(queryField).field("type", "percolator").endObject().endObject()
.endObject().endObject().string(); .endObject().endObject());
mapperService.merge("type", new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE); mapperService.merge("type", new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE);
fieldMapper = (PercolatorFieldMapper) mapperService.documentMapper("type").mappers().getMapper(queryField); fieldMapper = (PercolatorFieldMapper) mapperService.documentMapper("type").mappers().getMapper(queryField);
fieldType = (PercolatorFieldMapper.FieldType) fieldMapper.fieldType(); fieldType = (PercolatorFieldMapper.FieldType) fieldMapper.fieldType();

@ -31,6 +31,7 @@ import org.elasticsearch.Version;
import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetRequest;
import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressedXContent;
@ -96,12 +97,12 @@ public class PercolateQueryBuilderTests extends AbstractQueryTestCase<PercolateQ
protected void initializeAdditionalMappings(MapperService mapperService) throws IOException { protected void initializeAdditionalMappings(MapperService mapperService) throws IOException {
queryField = randomAlphaOfLength(4); queryField = randomAlphaOfLength(4);
String docType = "_doc"; String docType = "_doc";
mapperService.merge(docType, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(docType, mapperService.merge(docType, new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef(docType,
queryField, "type=percolator" queryField, "type=percolator"
).string()), MapperService.MergeReason.MAPPING_UPDATE); ))), MapperService.MergeReason.MAPPING_UPDATE);
mapperService.merge(docType, new CompressedXContent(PutMappingRequest.buildFromSimplifiedDef(docType, mapperService.merge(docType, new CompressedXContent(Strings.toString(PutMappingRequest.buildFromSimplifiedDef(docType,
STRING_FIELD_NAME, "type=text" STRING_FIELD_NAME, "type=text"
).string()), MapperService.MergeReason.MAPPING_UPDATE); ))), MapperService.MergeReason.MAPPING_UPDATE);
if (mapperService.getIndexSettings().isSingleType() == false) { if (mapperService.getIndexSettings().isSingleType() == false) {
PercolateQueryBuilderTests.docType = docType; PercolateQueryBuilderTests.docType = docType;
} }
@ -339,7 +340,7 @@ public class PercolateQueryBuilderTests extends AbstractQueryTestCase<PercolateQ
XContentBuilder xContent = XContentFactory.jsonBuilder(); XContentBuilder xContent = XContentFactory.jsonBuilder();
xContent.map(source); xContent.map(source);
return xContent.bytes(); return BytesReference.bytes(xContent);
} catch (IOException e) { } catch (IOException e) {
throw new UncheckedIOException(e); throw new UncheckedIOException(e);
} }

@ -42,7 +42,9 @@ import org.apache.lucene.search.join.ScoreMode;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.PlainActionFuture;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.hash.MurmurHash3; import org.elasticsearch.common.hash.MurmurHash3;
@ -141,7 +143,7 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
indexService = createIndex("test"); indexService = createIndex("test");
mapperService = indexService.mapperService(); mapperService = indexService.mapperService();
String mapper = XContentFactory.jsonBuilder().startObject().startObject("doc") String mapper = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("doc")
.startObject("_field_names").field("enabled", false).endObject() // makes testing easier .startObject("_field_names").field("enabled", false).endObject() // makes testing easier
.startObject("properties") .startObject("properties")
.startObject("field").field("type", "text").endObject() .startObject("field").field("type", "text").endObject()
@ -157,15 +159,15 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
.startObject("number_field6").field("type", "double").endObject() .startObject("number_field6").field("type", "double").endObject()
.startObject("number_field7").field("type", "ip").endObject() .startObject("number_field7").field("type", "ip").endObject()
.startObject("date_field").field("type", "date").endObject() .startObject("date_field").field("type", "date").endObject()
.endObject().endObject().endObject().string(); .endObject().endObject().endObject());
mapperService.merge("doc", new CompressedXContent(mapper), MapperService.MergeReason.MAPPING_UPDATE); mapperService.merge("doc", new CompressedXContent(mapper), MapperService.MergeReason.MAPPING_UPDATE);
} }
private void addQueryFieldMappings() throws Exception { private void addQueryFieldMappings() throws Exception {
fieldName = randomAlphaOfLength(4); fieldName = randomAlphaOfLength(4);
String percolatorMapper = XContentFactory.jsonBuilder().startObject().startObject("doc") String percolatorMapper = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("doc")
.startObject("properties").startObject(fieldName).field("type", "percolator").endObject().endObject() .startObject("properties").startObject(fieldName).field("type", "percolator").endObject().endObject()
.endObject().endObject().string(); .endObject().endObject());
mapperService.merge("doc", new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE); mapperService.merge("doc", new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE);
fieldType = (PercolatorFieldMapper.FieldType) mapperService.fullName(fieldName); fieldType = (PercolatorFieldMapper.FieldType) mapperService.fullName(fieldName);
} }
@ -459,12 +461,13 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
public void testPercolatorFieldMapper() throws Exception { public void testPercolatorFieldMapper() throws Exception {
addQueryFieldMappings(); addQueryFieldMappings();
QueryBuilder queryBuilder = termQuery("field", "value"); QueryBuilder queryBuilder = termQuery("field", "value");
ParsedDocument doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", XContentFactory ParsedDocument doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1",
.jsonBuilder() BytesReference.bytes(XContentFactory
.startObject() .jsonBuilder()
.field(fieldName, queryBuilder) .startObject()
.endObject().bytes(), .field(fieldName, queryBuilder)
XContentType.JSON)); .endObject()),
XContentType.JSON));
assertThat(doc.rootDoc().getFields(fieldType.queryTermsField.name()).length, equalTo(1)); assertThat(doc.rootDoc().getFields(fieldType.queryTermsField.name()).length, equalTo(1));
assertThat(doc.rootDoc().getFields(fieldType.queryTermsField.name())[0].binaryValue().utf8ToString(), equalTo("field\0value")); assertThat(doc.rootDoc().getFields(fieldType.queryTermsField.name())[0].binaryValue().utf8ToString(), equalTo("field\0value"));
@ -477,11 +480,11 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
// add an query for which we don't extract terms from // add an query for which we don't extract terms from
queryBuilder = rangeQuery("field").from("a").to("z"); queryBuilder = rangeQuery("field").from("a").to("z");
doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", XContentFactory doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", BytesReference.bytes(XContentFactory
.jsonBuilder() .jsonBuilder()
.startObject() .startObject()
.field(fieldName, queryBuilder) .field(fieldName, queryBuilder)
.endObject().bytes(), .endObject()),
XContentType.JSON)); XContentType.JSON));
assertThat(doc.rootDoc().getFields(fieldType.extractionResultField.name()).length, equalTo(1)); assertThat(doc.rootDoc().getFields(fieldType.extractionResultField.name()).length, equalTo(1));
assertThat(doc.rootDoc().getFields(fieldType.extractionResultField.name())[0].stringValue(), assertThat(doc.rootDoc().getFields(fieldType.extractionResultField.name())[0].stringValue(),
@ -504,9 +507,9 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
for (QueryBuilder query : queries) { for (QueryBuilder query : queries) {
ParsedDocument doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", ParsedDocument doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1",
XContentFactory.jsonBuilder().startObject() BytesReference.bytes(XContentFactory.jsonBuilder().startObject()
.field(fieldName, query) .field(fieldName, query)
.endObject().bytes(), .endObject()),
XContentType.JSON)); XContentType.JSON));
BytesRef qbSource = doc.rootDoc().getFields(fieldType.queryBuilderField.name())[0].binaryValue(); BytesRef qbSource = doc.rootDoc().getFields(fieldType.queryBuilderField.name())[0].binaryValue();
assertQueryBuilder(qbSource, query); assertQueryBuilder(qbSource, query);
@ -517,12 +520,13 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
addQueryFieldMappings(); addQueryFieldMappings();
client().prepareIndex("remote", "doc", "1").setSource("field", "value").get(); client().prepareIndex("remote", "doc", "1").setSource("field", "value").get();
QueryBuilder queryBuilder = termsLookupQuery("field", new TermsLookup("remote", "doc", "1", "field")); QueryBuilder queryBuilder = termsLookupQuery("field", new TermsLookup("remote", "doc", "1", "field"));
ParsedDocument doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", XContentFactory ParsedDocument doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1",
.jsonBuilder() BytesReference.bytes(XContentFactory
.startObject() .jsonBuilder()
.field(fieldName, queryBuilder) .startObject()
.endObject().bytes(), .field(fieldName, queryBuilder)
XContentType.JSON)); .endObject()),
XContentType.JSON));
BytesRef qbSource = doc.rootDoc().getFields(fieldType.queryBuilderField.name())[0].binaryValue(); BytesRef qbSource = doc.rootDoc().getFields(fieldType.queryBuilderField.name())[0].binaryValue();
QueryShardContext shardContext = indexService.newQueryShardContext( QueryShardContext shardContext = indexService.newQueryShardContext(
randomInt(20), null, () -> { randomInt(20), null, () -> {
@ -537,11 +541,11 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
public void testPercolatorFieldMapperUnMappedField() throws Exception { public void testPercolatorFieldMapperUnMappedField() throws Exception {
addQueryFieldMappings(); addQueryFieldMappings();
MapperParsingException exception = expectThrows(MapperParsingException.class, () -> { MapperParsingException exception = expectThrows(MapperParsingException.class, () -> {
mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", XContentFactory mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", BytesReference.bytes(XContentFactory
.jsonBuilder() .jsonBuilder()
.startObject() .startObject()
.field(fieldName, termQuery("unmapped_field", "value")) .field(fieldName, termQuery("unmapped_field", "value"))
.endObject().bytes(), .endObject()),
XContentType.JSON)); XContentType.JSON));
}); });
assertThat(exception.getCause(), instanceOf(QueryShardException.class)); assertThat(exception.getCause(), instanceOf(QueryShardException.class));
@ -551,20 +555,20 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
public void testPercolatorFieldMapper_noQuery() throws Exception { public void testPercolatorFieldMapper_noQuery() throws Exception {
addQueryFieldMappings(); addQueryFieldMappings();
ParsedDocument doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", XContentFactory ParsedDocument doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", BytesReference
.jsonBuilder() .bytes(XContentFactory
.startObject() .jsonBuilder()
.endObject() .startObject()
.bytes(), .endObject()),
XContentType.JSON)); XContentType.JSON));
assertThat(doc.rootDoc().getFields(fieldType.queryBuilderField.name()).length, equalTo(0)); assertThat(doc.rootDoc().getFields(fieldType.queryBuilderField.name()).length, equalTo(0));
try { try {
mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", XContentFactory mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", BytesReference.bytes(XContentFactory
.jsonBuilder() .jsonBuilder()
.startObject() .startObject()
.nullField(fieldName) .nullField(fieldName)
.endObject().bytes(), .endObject()),
XContentType.JSON)); XContentType.JSON));
} catch (MapperParsingException e) { } catch (MapperParsingException e) {
assertThat(e.getDetailedMessage(), containsString("query malformed, must start with start_object")); assertThat(e.getDetailedMessage(), containsString("query malformed, must start with start_object"));
@ -576,9 +580,9 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
IndexService indexService = createIndex("test1", Settings.EMPTY); IndexService indexService = createIndex("test1", Settings.EMPTY);
MapperService mapperService = indexService.mapperService(); MapperService mapperService = indexService.mapperService();
String percolatorMapper = XContentFactory.jsonBuilder().startObject().startObject("doc") String percolatorMapper = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("doc")
.startObject("properties").startObject(fieldName).field("type", "percolator").field("index", "no").endObject().endObject() .startObject("properties").startObject(fieldName).field("type", "percolator").field("index", "no").endObject().endObject()
.endObject().endObject().string(); .endObject().endObject());
MapperParsingException e = expectThrows(MapperParsingException.class, () -> MapperParsingException e = expectThrows(MapperParsingException.class, () ->
mapperService.merge("doc", new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE)); mapperService.merge("doc", new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE));
assertThat(e.getMessage(), containsString("Mapping definition for [" + fieldName + "] has unsupported parameters: [index : no]")); assertThat(e.getMessage(), containsString("Mapping definition for [" + fieldName + "] has unsupported parameters: [index : no]"));
@ -587,21 +591,21 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
// multiple percolator fields are allowed in the mapping, but only one field can be used at index time. // multiple percolator fields are allowed in the mapping, but only one field can be used at index time.
public void testMultiplePercolatorFields() throws Exception { public void testMultiplePercolatorFields() throws Exception {
String typeName = "doc"; String typeName = "doc";
String percolatorMapper = XContentFactory.jsonBuilder().startObject().startObject(typeName) String percolatorMapper = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject(typeName)
.startObject("_field_names").field("enabled", false).endObject() // makes testing easier .startObject("_field_names").field("enabled", false).endObject() // makes testing easier
.startObject("properties") .startObject("properties")
.startObject("query_field1").field("type", "percolator").endObject() .startObject("query_field1").field("type", "percolator").endObject()
.startObject("query_field2").field("type", "percolator").endObject() .startObject("query_field2").field("type", "percolator").endObject()
.endObject() .endObject()
.endObject().endObject().string(); .endObject().endObject());
mapperService.merge(typeName, new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE); mapperService.merge(typeName, new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE);
QueryBuilder queryBuilder = matchQuery("field", "value"); QueryBuilder queryBuilder = matchQuery("field", "value");
ParsedDocument doc = mapperService.documentMapper(typeName).parse(SourceToParse.source("test", typeName, "1", ParsedDocument doc = mapperService.documentMapper(typeName).parse(SourceToParse.source("test", typeName, "1",
jsonBuilder().startObject() BytesReference.bytes(jsonBuilder().startObject()
.field("query_field1", queryBuilder) .field("query_field1", queryBuilder)
.field("query_field2", queryBuilder) .field("query_field2", queryBuilder)
.endObject().bytes(), .endObject()),
XContentType.JSON)); XContentType.JSON));
assertThat(doc.rootDoc().getFields().size(), equalTo(14)); // also includes all other meta fields assertThat(doc.rootDoc().getFields().size(), equalTo(14)); // also includes all other meta fields
BytesRef queryBuilderAsBytes = doc.rootDoc().getField("query_field1.query_builder_field").binaryValue(); BytesRef queryBuilderAsBytes = doc.rootDoc().getField("query_field1.query_builder_field").binaryValue();
@ -614,7 +618,7 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
// percolator field can be nested under an object field, but only one query can be specified per document // percolator field can be nested under an object field, but only one query can be specified per document
public void testNestedPercolatorField() throws Exception { public void testNestedPercolatorField() throws Exception {
String typeName = "doc"; String typeName = "doc";
String percolatorMapper = XContentFactory.jsonBuilder().startObject().startObject(typeName) String percolatorMapper = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject(typeName)
.startObject("_field_names").field("enabled", false).endObject() // makes testing easier .startObject("_field_names").field("enabled", false).endObject() // makes testing easier
.startObject("properties") .startObject("properties")
.startObject("object_field") .startObject("object_field")
@ -624,25 +628,25 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
.endObject() .endObject()
.endObject() .endObject()
.endObject() .endObject()
.endObject().endObject().string(); .endObject().endObject());
mapperService.merge(typeName, new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE); mapperService.merge(typeName, new CompressedXContent(percolatorMapper), MapperService.MergeReason.MAPPING_UPDATE);
QueryBuilder queryBuilder = matchQuery("field", "value"); QueryBuilder queryBuilder = matchQuery("field", "value");
ParsedDocument doc = mapperService.documentMapper(typeName).parse(SourceToParse.source("test", typeName, "1", ParsedDocument doc = mapperService.documentMapper(typeName).parse(SourceToParse.source("test", typeName, "1",
jsonBuilder().startObject().startObject("object_field") BytesReference.bytes(jsonBuilder().startObject().startObject("object_field")
.field("query_field", queryBuilder) .field("query_field", queryBuilder)
.endObject().endObject().bytes(), .endObject().endObject()),
XContentType.JSON)); XContentType.JSON));
assertThat(doc.rootDoc().getFields().size(), equalTo(10)); // also includes all other meta fields assertThat(doc.rootDoc().getFields().size(), equalTo(10)); // also includes all other meta fields
BytesRef queryBuilderAsBytes = doc.rootDoc().getField("object_field.query_field.query_builder_field").binaryValue(); BytesRef queryBuilderAsBytes = doc.rootDoc().getField("object_field.query_field.query_builder_field").binaryValue();
assertQueryBuilder(queryBuilderAsBytes, queryBuilder); assertQueryBuilder(queryBuilderAsBytes, queryBuilder);
doc = mapperService.documentMapper(typeName).parse(SourceToParse.source("test", typeName, "1", doc = mapperService.documentMapper(typeName).parse(SourceToParse.source("test", typeName, "1",
jsonBuilder().startObject() BytesReference.bytes(jsonBuilder().startObject()
.startArray("object_field") .startArray("object_field")
.startObject().field("query_field", queryBuilder).endObject() .startObject().field("query_field", queryBuilder).endObject()
.endArray() .endArray()
.endObject().bytes(), .endObject()),
XContentType.JSON)); XContentType.JSON));
assertThat(doc.rootDoc().getFields().size(), equalTo(10)); // also includes all other meta fields assertThat(doc.rootDoc().getFields().size(), equalTo(10)); // also includes all other meta fields
queryBuilderAsBytes = doc.rootDoc().getField("object_field.query_field.query_builder_field").binaryValue(); queryBuilderAsBytes = doc.rootDoc().getField("object_field.query_field.query_builder_field").binaryValue();
@ -650,12 +654,12 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
MapperParsingException e = expectThrows(MapperParsingException.class, () -> { MapperParsingException e = expectThrows(MapperParsingException.class, () -> {
mapperService.documentMapper(typeName).parse(SourceToParse.source("test", typeName, "1", mapperService.documentMapper(typeName).parse(SourceToParse.source("test", typeName, "1",
jsonBuilder().startObject() BytesReference.bytes(jsonBuilder().startObject()
.startArray("object_field") .startArray("object_field")
.startObject().field("query_field", queryBuilder).endObject() .startObject().field("query_field", queryBuilder).endObject()
.startObject().field("query_field", queryBuilder).endObject() .startObject().field("query_field", queryBuilder).endObject()
.endArray() .endArray()
.endObject().bytes(), .endObject()),
XContentType.JSON)); XContentType.JSON));
} }
); );
@ -708,9 +712,9 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
} }
public void testEmptyName() throws Exception { public void testEmptyName() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type1") String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type1")
.startObject("properties").startObject("").field("type", "percolator").endObject().endObject() .startObject("properties").startObject("").field("type", "percolator").endObject().endObject()
.endObject().endObject().string(); .endObject().endObject());
DocumentMapperParser parser = mapperService.documentMapperParser(); DocumentMapperParser parser = mapperService.documentMapperParser();
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
@ -735,9 +739,9 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
query.endObject(); query.endObject();
ParsedDocument doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", ParsedDocument doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1",
XContentFactory.jsonBuilder().startObject() BytesReference.bytes(XContentFactory.jsonBuilder().startObject()
.rawField(fieldName, new BytesArray(query.string()), query.contentType()) .rawField(fieldName, new BytesArray(Strings.toString(query)).streamInput(), query.contentType())
.endObject().bytes(), .endObject()),
XContentType.JSON)); XContentType.JSON));
BytesRef querySource = doc.rootDoc().getFields(fieldType.queryBuilderField.name())[0].binaryValue(); BytesRef querySource = doc.rootDoc().getFields(fieldType.queryBuilderField.name())[0].binaryValue();
try (InputStream in = new ByteArrayInputStream(querySource.bytes, querySource.offset, querySource.length)) { try (InputStream in = new ByteArrayInputStream(querySource.bytes, querySource.offset, querySource.length)) {
@ -773,9 +777,9 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
query.endObject(); query.endObject();
doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1",
XContentFactory.jsonBuilder().startObject() BytesReference.bytes(XContentFactory.jsonBuilder().startObject()
.rawField(fieldName, new BytesArray(query.string()), query.contentType()) .rawField(fieldName, new BytesArray(Strings.toString(query)).streamInput(), query.contentType())
.endObject().bytes(), .endObject()),
XContentType.JSON)); XContentType.JSON));
querySource = doc.rootDoc().getFields(fieldType.queryBuilderField.name())[0].binaryValue(); querySource = doc.rootDoc().getFields(fieldType.queryBuilderField.name())[0].binaryValue();
try (InputStream in = new ByteArrayInputStream(querySource.bytes, querySource.offset, querySource.length)) { try (InputStream in = new ByteArrayInputStream(querySource.bytes, querySource.offset, querySource.length)) {
@ -859,9 +863,9 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
.must(boolQuery().must(termQuery("field", "value1")).must(termQuery("field", "value2"))) .must(boolQuery().must(termQuery("field", "value1")).must(termQuery("field", "value2")))
.must(boolQuery().must(termQuery("field", "value2")).must(termQuery("field", "value3"))); .must(boolQuery().must(termQuery("field", "value2")).must(termQuery("field", "value3")));
ParsedDocument doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", ParsedDocument doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1",
XContentFactory.jsonBuilder().startObject() BytesReference.bytes(XContentFactory.jsonBuilder().startObject()
.field(fieldName, qb) .field(fieldName, qb)
.endObject().bytes(), .endObject()),
XContentType.JSON)); XContentType.JSON));
List<String> values = Arrays.stream(doc.rootDoc().getFields(fieldType.queryTermsField.name())) List<String> values = Arrays.stream(doc.rootDoc().getFields(fieldType.queryTermsField.name()))
@ -881,9 +885,9 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
.must(boolQuery().must(termQuery("field", "value3")).must(termQuery("field", "value4"))) .must(boolQuery().must(termQuery("field", "value3")).must(termQuery("field", "value4")))
.must(boolQuery().should(termQuery("field", "value4")).should(termQuery("field", "value5"))); .must(boolQuery().should(termQuery("field", "value4")).should(termQuery("field", "value5")));
doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1",
XContentFactory.jsonBuilder().startObject() BytesReference.bytes(XContentFactory.jsonBuilder().startObject()
.field(fieldName, qb) .field(fieldName, qb)
.endObject().bytes(), .endObject()),
XContentType.JSON)); XContentType.JSON));
values = Arrays.stream(doc.rootDoc().getFields(fieldType.queryTermsField.name())) values = Arrays.stream(doc.rootDoc().getFields(fieldType.queryTermsField.name()))
@ -906,9 +910,9 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {
.should(boolQuery().should(termQuery("field", "value3")).should(termQuery("field", "value4"))) .should(boolQuery().should(termQuery("field", "value3")).should(termQuery("field", "value4")))
.should(boolQuery().should(termQuery("field", "value4")).should(termQuery("field", "value5"))); .should(boolQuery().should(termQuery("field", "value4")).should(termQuery("field", "value5")));
doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1", doc = mapperService.documentMapper("doc").parse(SourceToParse.source("test", "doc", "1",
XContentFactory.jsonBuilder().startObject() BytesReference.bytes(XContentFactory.jsonBuilder().startObject()
.field(fieldName, qb) .field(fieldName, qb)
.endObject().bytes(), .endObject()),
XContentType.JSON)); XContentType.JSON));
values = Arrays.stream(doc.rootDoc().getFields(fieldType.queryTermsField.name())) values = Arrays.stream(doc.rootDoc().getFields(fieldType.queryTermsField.name()))

@ -85,7 +85,7 @@ public class PercolatorQuerySearchIT extends ESIntegTestCase {
).endObject()).get(); ).endObject()).get();
client().admin().indices().prepareRefresh().get(); client().admin().indices().prepareRefresh().get();
BytesReference source = jsonBuilder().startObject().endObject().bytes(); BytesReference source = BytesReference.bytes(jsonBuilder().startObject().endObject());
logger.info("percolating empty doc"); logger.info("percolating empty doc");
SearchResponse response = client().prepareSearch() SearchResponse response = client().prepareSearch()
.setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON))
@ -93,7 +93,7 @@ public class PercolatorQuerySearchIT extends ESIntegTestCase {
assertHitCount(response, 1); assertHitCount(response, 1);
assertThat(response.getHits().getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(0).getId(), equalTo("1"));
source = jsonBuilder().startObject().field("field1", "value").endObject().bytes(); source = BytesReference.bytes(jsonBuilder().startObject().field("field1", "value").endObject());
logger.info("percolating doc with 1 field"); logger.info("percolating doc with 1 field");
response = client().prepareSearch() response = client().prepareSearch()
.setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON))
@ -105,7 +105,7 @@ public class PercolatorQuerySearchIT extends ESIntegTestCase {
assertThat(response.getHits().getAt(1).getId(), equalTo("2")); assertThat(response.getHits().getAt(1).getId(), equalTo("2"));
assertThat(response.getHits().getAt(1).getFields().get("_percolator_document_slot").getValue(), equalTo(0)); assertThat(response.getHits().getAt(1).getFields().get("_percolator_document_slot").getValue(), equalTo(0));
source = jsonBuilder().startObject().field("field1", "value").field("field2", "value").endObject().bytes(); source = BytesReference.bytes(jsonBuilder().startObject().field("field1", "value").field("field2", "value").endObject());
logger.info("percolating doc with 2 fields"); logger.info("percolating doc with 2 fields");
response = client().prepareSearch() response = client().prepareSearch()
.setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON))
@ -122,8 +122,8 @@ public class PercolatorQuerySearchIT extends ESIntegTestCase {
logger.info("percolating doc with 2 fields"); logger.info("percolating doc with 2 fields");
response = client().prepareSearch() response = client().prepareSearch()
.setQuery(new PercolateQueryBuilder("query", Arrays.asList( .setQuery(new PercolateQueryBuilder("query", Arrays.asList(
jsonBuilder().startObject().field("field1", "value").endObject().bytes(), BytesReference.bytes(jsonBuilder().startObject().field("field1", "value").endObject()),
jsonBuilder().startObject().field("field1", "value").field("field2", "value").endObject().bytes() BytesReference.bytes(jsonBuilder().startObject().field("field1", "value").field("field2", "value").endObject())
), XContentType.JSON)) ), XContentType.JSON))
.addSort("_uid", SortOrder.ASC) .addSort("_uid", SortOrder.ASC)
.get(); .get();
@ -189,7 +189,7 @@ public class PercolatorQuerySearchIT extends ESIntegTestCase {
client().admin().indices().prepareRefresh().get(); client().admin().indices().prepareRefresh().get();
// Test long range: // Test long range:
BytesReference source = jsonBuilder().startObject().field("field1", 12).endObject().bytes(); BytesReference source = BytesReference.bytes(jsonBuilder().startObject().field("field1", 12).endObject());
SearchResponse response = client().prepareSearch() SearchResponse response = client().prepareSearch()
.setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON))
.get(); .get();
@ -198,7 +198,7 @@ public class PercolatorQuerySearchIT extends ESIntegTestCase {
assertThat(response.getHits().getAt(0).getId(), equalTo("3")); assertThat(response.getHits().getAt(0).getId(), equalTo("3"));
assertThat(response.getHits().getAt(1).getId(), equalTo("1")); assertThat(response.getHits().getAt(1).getId(), equalTo("1"));
source = jsonBuilder().startObject().field("field1", 11).endObject().bytes(); source = BytesReference.bytes(jsonBuilder().startObject().field("field1", 11).endObject());
response = client().prepareSearch() response = client().prepareSearch()
.setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON))
.get(); .get();
@ -206,7 +206,7 @@ public class PercolatorQuerySearchIT extends ESIntegTestCase {
assertThat(response.getHits().getAt(0).getId(), equalTo("1")); assertThat(response.getHits().getAt(0).getId(), equalTo("1"));
// Test double range: // Test double range:
source = jsonBuilder().startObject().field("field2", 12).endObject().bytes(); source = BytesReference.bytes(jsonBuilder().startObject().field("field2", 12).endObject());
response = client().prepareSearch() response = client().prepareSearch()
.setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON))
.get(); .get();
@ -214,7 +214,7 @@ public class PercolatorQuerySearchIT extends ESIntegTestCase {
assertThat(response.getHits().getAt(0).getId(), equalTo("6")); assertThat(response.getHits().getAt(0).getId(), equalTo("6"));
assertThat(response.getHits().getAt(1).getId(), equalTo("4")); assertThat(response.getHits().getAt(1).getId(), equalTo("4"));
source = jsonBuilder().startObject().field("field2", 11).endObject().bytes(); source = BytesReference.bytes(jsonBuilder().startObject().field("field2", 11).endObject());
response = client().prepareSearch() response = client().prepareSearch()
.setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON))
.get(); .get();
@ -222,7 +222,7 @@ public class PercolatorQuerySearchIT extends ESIntegTestCase {
assertThat(response.getHits().getAt(0).getId(), equalTo("4")); assertThat(response.getHits().getAt(0).getId(), equalTo("4"));
// Test IP range: // Test IP range:
source = jsonBuilder().startObject().field("field3", "192.168.1.5").endObject().bytes(); source = BytesReference.bytes(jsonBuilder().startObject().field("field3", "192.168.1.5").endObject());
response = client().prepareSearch() response = client().prepareSearch()
.setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON))
.get(); .get();
@ -230,7 +230,7 @@ public class PercolatorQuerySearchIT extends ESIntegTestCase {
assertThat(response.getHits().getAt(0).getId(), equalTo("9")); assertThat(response.getHits().getAt(0).getId(), equalTo("9"));
assertThat(response.getHits().getAt(1).getId(), equalTo("7")); assertThat(response.getHits().getAt(1).getId(), equalTo("7"));
source = jsonBuilder().startObject().field("field3", "192.168.1.4").endObject().bytes(); source = BytesReference.bytes(jsonBuilder().startObject().field("field3", "192.168.1.4").endObject());
response = client().prepareSearch() response = client().prepareSearch()
.setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON))
.get(); .get();
@ -238,7 +238,7 @@ public class PercolatorQuerySearchIT extends ESIntegTestCase {
assertThat(response.getHits().getAt(0).getId(), equalTo("7")); assertThat(response.getHits().getAt(0).getId(), equalTo("7"));
// Test date range: // Test date range:
source = jsonBuilder().startObject().field("field4", "2016-05-15").endObject().bytes(); source = BytesReference.bytes(jsonBuilder().startObject().field("field4", "2016-05-15").endObject());
response = client().prepareSearch() response = client().prepareSearch()
.setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON))
.get(); .get();
@ -267,9 +267,9 @@ public class PercolatorQuerySearchIT extends ESIntegTestCase {
.endObject()).get(); .endObject()).get();
refresh(); refresh();
BytesReference source = jsonBuilder().startObject() BytesReference source = BytesReference.bytes(jsonBuilder().startObject()
.startObject("field1").field("lat", 52.20).field("lon", 4.51).endObject() .startObject("field1").field("lat", 52.20).field("lon", 4.51).endObject()
.endObject().bytes(); .endObject());
SearchResponse response = client().prepareSearch() SearchResponse response = client().prepareSearch()
.setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON))
.addSort("_id", SortOrder.ASC) .addSort("_id", SortOrder.ASC)
@ -402,10 +402,10 @@ public class PercolatorQuerySearchIT extends ESIntegTestCase {
.get(); .get();
client().admin().indices().prepareRefresh().get(); client().admin().indices().prepareRefresh().get();
BytesReference source = jsonBuilder().startObject() BytesReference source = BytesReference.bytes(jsonBuilder().startObject()
.field("field1", "the quick brown fox jumps over the lazy dog") .field("field1", "the quick brown fox jumps over the lazy dog")
.field("field2", "the quick brown fox falls down into the well") .field("field2", "the quick brown fox falls down into the well")
.endObject().bytes(); .endObject());
SearchResponse response = client().prepareSearch() SearchResponse response = client().prepareSearch()
.setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON)) .setQuery(new PercolateQueryBuilder("query", source, XContentType.JSON))
.addSort("_uid", SortOrder.ASC) .addSort("_uid", SortOrder.ASC)
@ -449,9 +449,9 @@ public class PercolatorQuerySearchIT extends ESIntegTestCase {
.execute().actionGet(); .execute().actionGet();
client().admin().indices().prepareRefresh().get(); client().admin().indices().prepareRefresh().get();
BytesReference document = jsonBuilder().startObject() BytesReference document = BytesReference.bytes(jsonBuilder().startObject()
.field("field1", "The quick brown fox jumps over the lazy dog") .field("field1", "The quick brown fox jumps over the lazy dog")
.endObject().bytes(); .endObject());
SearchResponse searchResponse = client().prepareSearch() SearchResponse searchResponse = client().prepareSearch()
.setQuery(new PercolateQueryBuilder("query", document, XContentType.JSON)) .setQuery(new PercolateQueryBuilder("query", document, XContentType.JSON))
.highlighter(new HighlightBuilder().field("field1")) .highlighter(new HighlightBuilder().field("field1"))
@ -470,12 +470,12 @@ public class PercolatorQuerySearchIT extends ESIntegTestCase {
assertThat(searchResponse.getHits().getAt(4).getHighlightFields().get("field1").fragments()[0].string(), assertThat(searchResponse.getHits().getAt(4).getHighlightFields().get("field1").fragments()[0].string(),
equalTo("The quick brown <em>fox</em> jumps over the lazy dog")); equalTo("The quick brown <em>fox</em> jumps over the lazy dog"));
BytesReference document1 = jsonBuilder().startObject() BytesReference document1 = BytesReference.bytes(jsonBuilder().startObject()
.field("field1", "The quick brown fox jumps") .field("field1", "The quick brown fox jumps")
.endObject().bytes(); .endObject());
BytesReference document2 = jsonBuilder().startObject() BytesReference document2 = BytesReference.bytes(jsonBuilder().startObject()
.field("field1", "over the lazy dog") .field("field1", "over the lazy dog")
.endObject().bytes(); .endObject());
searchResponse = client().prepareSearch() searchResponse = client().prepareSearch()
.setQuery(boolQuery() .setQuery(boolQuery()
.should(new PercolateQueryBuilder("query", document1, XContentType.JSON).setName("query1")) .should(new PercolateQueryBuilder("query", document1, XContentType.JSON).setName("query1"))
@ -500,10 +500,10 @@ public class PercolatorQuerySearchIT extends ESIntegTestCase {
searchResponse = client().prepareSearch() searchResponse = client().prepareSearch()
.setQuery(new PercolateQueryBuilder("query", Arrays.asList( .setQuery(new PercolateQueryBuilder("query", Arrays.asList(
jsonBuilder().startObject().field("field1", "dog").endObject().bytes(), BytesReference.bytes(jsonBuilder().startObject().field("field1", "dog").endObject()),
jsonBuilder().startObject().field("field1", "fox").endObject().bytes(), BytesReference.bytes(jsonBuilder().startObject().field("field1", "fox").endObject()),
jsonBuilder().startObject().field("field1", "jumps").endObject().bytes(), BytesReference.bytes(jsonBuilder().startObject().field("field1", "jumps").endObject()),
jsonBuilder().startObject().field("field1", "brown fox").endObject().bytes() BytesReference.bytes(jsonBuilder().startObject().field("field1", "brown fox").endObject())
), XContentType.JSON)) ), XContentType.JSON))
.highlighter(new HighlightBuilder().field("field1")) .highlighter(new HighlightBuilder().field("field1"))
.addSort("_uid", SortOrder.ASC) .addSort("_uid", SortOrder.ASC)
@ -537,12 +537,12 @@ public class PercolatorQuerySearchIT extends ESIntegTestCase {
searchResponse = client().prepareSearch() searchResponse = client().prepareSearch()
.setQuery(boolQuery() .setQuery(boolQuery()
.should(new PercolateQueryBuilder("query", Arrays.asList( .should(new PercolateQueryBuilder("query", Arrays.asList(
jsonBuilder().startObject().field("field1", "dog").endObject().bytes(), BytesReference.bytes(jsonBuilder().startObject().field("field1", "dog").endObject()),
jsonBuilder().startObject().field("field1", "fox").endObject().bytes() BytesReference.bytes(jsonBuilder().startObject().field("field1", "fox").endObject())
), XContentType.JSON).setName("query1")) ), XContentType.JSON).setName("query1"))
.should(new PercolateQueryBuilder("query", Arrays.asList( .should(new PercolateQueryBuilder("query", Arrays.asList(
jsonBuilder().startObject().field("field1", "jumps").endObject().bytes(), BytesReference.bytes(jsonBuilder().startObject().field("field1", "jumps").endObject()),
jsonBuilder().startObject().field("field1", "brown fox").endObject().bytes() BytesReference.bytes(jsonBuilder().startObject().field("field1", "brown fox").endObject())
), XContentType.JSON).setName("query2")) ), XContentType.JSON).setName("query2"))
) )
.highlighter(new HighlightBuilder().field("field1")) .highlighter(new HighlightBuilder().field("field1"))
@ -664,7 +664,7 @@ public class PercolatorQuerySearchIT extends ESIntegTestCase {
.get(); .get();
client().admin().indices().prepareRefresh().get(); client().admin().indices().prepareRefresh().get();
BytesReference source = jsonBuilder().startObject().field("field", "value").endObject().bytes(); BytesReference source = BytesReference.bytes(jsonBuilder().startObject().field("field", "value").endObject());
SearchResponse response = client().prepareSearch() SearchResponse response = client().prepareSearch()
.setQuery(new PercolateQueryBuilder(queryFieldName, source, XContentType.JSON)) .setQuery(new PercolateQueryBuilder(queryFieldName, source, XContentType.JSON))
.setIndices("test1") .setIndices("test1")
@ -718,13 +718,13 @@ public class PercolatorQuerySearchIT extends ESIntegTestCase {
SearchResponse response = client().prepareSearch() SearchResponse response = client().prepareSearch()
.setQuery(new PercolateQueryBuilder("query", .setQuery(new PercolateQueryBuilder("query",
XContentFactory.jsonBuilder() BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject().field("companyname", "stark") .startObject().field("companyname", "stark")
.startArray("employee") .startArray("employee")
.startObject().field("name", "virginia potts").endObject() .startObject().field("name", "virginia potts").endObject()
.startObject().field("name", "tony stark").endObject() .startObject().field("name", "tony stark").endObject()
.endArray() .endArray()
.endObject().bytes(), XContentType.JSON)) .endObject()), XContentType.JSON))
.addSort("_doc", SortOrder.ASC) .addSort("_doc", SortOrder.ASC)
.get(); .get();
assertHitCount(response, 1); assertHitCount(response, 1);
@ -732,20 +732,20 @@ public class PercolatorQuerySearchIT extends ESIntegTestCase {
response = client().prepareSearch() response = client().prepareSearch()
.setQuery(new PercolateQueryBuilder("query", .setQuery(new PercolateQueryBuilder("query",
XContentFactory.jsonBuilder() BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject().field("companyname", "notstark") .startObject().field("companyname", "notstark")
.startArray("employee") .startArray("employee")
.startObject().field("name", "virginia stark").endObject() .startObject().field("name", "virginia stark").endObject()
.startObject().field("name", "tony stark").endObject() .startObject().field("name", "tony stark").endObject()
.endArray() .endArray()
.endObject().bytes(), XContentType.JSON)) .endObject()), XContentType.JSON))
.addSort("_doc", SortOrder.ASC) .addSort("_doc", SortOrder.ASC)
.get(); .get();
assertHitCount(response, 0); assertHitCount(response, 0);
response = client().prepareSearch() response = client().prepareSearch()
.setQuery(new PercolateQueryBuilder("query", .setQuery(new PercolateQueryBuilder("query",
XContentFactory.jsonBuilder().startObject().field("companyname", "notstark").endObject().bytes(), BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field("companyname", "notstark").endObject()),
XContentType.JSON)) XContentType.JSON))
.addSort("_doc", SortOrder.ASC) .addSort("_doc", SortOrder.ASC)
.get(); .get();
@ -753,20 +753,20 @@ public class PercolatorQuerySearchIT extends ESIntegTestCase {
response = client().prepareSearch() response = client().prepareSearch()
.setQuery(new PercolateQueryBuilder("query", Arrays.asList( .setQuery(new PercolateQueryBuilder("query", Arrays.asList(
XContentFactory.jsonBuilder() BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject().field("companyname", "stark") .startObject().field("companyname", "stark")
.startArray("employee") .startArray("employee")
.startObject().field("name", "virginia potts").endObject() .startObject().field("name", "virginia potts").endObject()
.startObject().field("name", "tony stark").endObject() .startObject().field("name", "tony stark").endObject()
.endArray() .endArray()
.endObject().bytes(), .endObject()),
XContentFactory.jsonBuilder() BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject().field("companyname", "stark") .startObject().field("companyname", "stark")
.startArray("employee") .startArray("employee")
.startObject().field("name", "peter parker").endObject() .startObject().field("name", "peter parker").endObject()
.startObject().field("name", "virginia potts").endObject() .startObject().field("name", "virginia potts").endObject()
.endArray() .endArray()
.endObject().bytes() .endObject())
), XContentType.JSON)) ), XContentType.JSON))
.addSort("_doc", SortOrder.ASC) .addSort("_doc", SortOrder.ASC)
.get(); .get();
@ -803,16 +803,16 @@ public class PercolatorQuerySearchIT extends ESIntegTestCase {
MultiSearchResponse response = client().prepareMultiSearch() MultiSearchResponse response = client().prepareMultiSearch()
.add(client().prepareSearch("test") .add(client().prepareSearch("test")
.setQuery(new PercolateQueryBuilder("query", .setQuery(new PercolateQueryBuilder("query",
jsonBuilder().startObject().field("field1", "b").endObject().bytes(), XContentType.JSON))) BytesReference.bytes(jsonBuilder().startObject().field("field1", "b").endObject()), XContentType.JSON)))
.add(client().prepareSearch("test") .add(client().prepareSearch("test")
.setQuery(new PercolateQueryBuilder("query", .setQuery(new PercolateQueryBuilder("query",
yamlBuilder().startObject().field("field1", "c").endObject().bytes(), XContentType.YAML))) BytesReference.bytes(yamlBuilder().startObject().field("field1", "c").endObject()), XContentType.YAML)))
.add(client().prepareSearch("test") .add(client().prepareSearch("test")
.setQuery(new PercolateQueryBuilder("query", .setQuery(new PercolateQueryBuilder("query",
smileBuilder().startObject().field("field1", "b c").endObject().bytes(), XContentType.SMILE))) BytesReference.bytes(smileBuilder().startObject().field("field1", "b c").endObject()), XContentType.SMILE)))
.add(client().prepareSearch("test") .add(client().prepareSearch("test")
.setQuery(new PercolateQueryBuilder("query", .setQuery(new PercolateQueryBuilder("query",
jsonBuilder().startObject().field("field1", "d").endObject().bytes(), XContentType.JSON))) BytesReference.bytes(jsonBuilder().startObject().field("field1", "d").endObject()), XContentType.JSON)))
.add(client().prepareSearch("test") .add(client().prepareSearch("test")
.setQuery(new PercolateQueryBuilder("query", "test", "type", "5", null, null, null))) .setQuery(new PercolateQueryBuilder("query", "test", "type", "5", null, null, null)))
.add(client().prepareSearch("test") // non existing doc, so error element .add(client().prepareSearch("test") // non existing doc, so error element

@ -21,6 +21,7 @@ package org.elasticsearch.percolator;
import org.apache.lucene.search.join.ScoreMode; import org.apache.lucene.search.join.ScoreMode;
import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.action.support.WriteRequest;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
@ -79,7 +80,7 @@ public class PercolatorQuerySearchTests extends ESSingleNodeTestCase {
.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE)
.execute().actionGet(); .execute().actionGet();
SearchResponse response = client().prepareSearch("index") SearchResponse response = client().prepareSearch("index")
.setQuery(new PercolateQueryBuilder("query", jsonBuilder().startObject().field("field1", "b").endObject().bytes(), .setQuery(new PercolateQueryBuilder("query", BytesReference.bytes(jsonBuilder().startObject().field("field1", "b").endObject()),
XContentType.JSON)) XContentType.JSON))
.get(); .get();
assertHitCount(response, 1); assertHitCount(response, 1);
@ -108,13 +109,13 @@ public class PercolatorQuerySearchTests extends ESSingleNodeTestCase {
for (int i = 0; i < 32; i++) { for (int i = 0; i < 32; i++) {
SearchResponse response = client().prepareSearch() SearchResponse response = client().prepareSearch()
.setQuery(new PercolateQueryBuilder("query", .setQuery(new PercolateQueryBuilder("query",
XContentFactory.jsonBuilder() BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject().field("companyname", "stark") .startObject().field("companyname", "stark")
.startArray("employee") .startArray("employee")
.startObject().field("name", "virginia potts").endObject() .startObject().field("name", "virginia potts").endObject()
.startObject().field("name", "tony stark").endObject() .startObject().field("name", "tony stark").endObject()
.endArray() .endArray()
.endObject().bytes(), XContentType.JSON)) .endObject()), XContentType.JSON))
.addSort("_doc", SortOrder.ASC) .addSort("_doc", SortOrder.ASC)
// size 0, because other wise load bitsets for normal document in FetchPhase#findRootDocumentIfNested(...) // size 0, because other wise load bitsets for normal document in FetchPhase#findRootDocumentIfNested(...)
.setSize(0) .setSize(0)
@ -192,7 +193,7 @@ public class PercolatorQuerySearchTests extends ESSingleNodeTestCase {
doc.endObject(); doc.endObject();
for (int i = 0; i < 32; i++) { for (int i = 0; i < 32; i++) {
SearchResponse response = client().prepareSearch() SearchResponse response = client().prepareSearch()
.setQuery(new PercolateQueryBuilder("query", doc.bytes(), XContentType.JSON)) .setQuery(new PercolateQueryBuilder("query", BytesReference.bytes(doc), XContentType.JSON))
.addSort("_doc", SortOrder.ASC) .addSort("_doc", SortOrder.ASC)
.get(); .get();
assertHitCount(response, 1); assertHitCount(response, 1);
@ -212,8 +213,9 @@ public class PercolatorQuerySearchTests extends ESSingleNodeTestCase {
client().admin().indices().prepareRefresh().get(); client().admin().indices().prepareRefresh().get();
SearchResponse response = client().prepareSearch("test") SearchResponse response = client().prepareSearch("test")
.setQuery(new PercolateQueryBuilder("query", jsonBuilder().startObject().field("field1", "value").endObject().bytes(), .setQuery(new PercolateQueryBuilder("query",
XContentType.JSON)) BytesReference.bytes(jsonBuilder().startObject().field("field1", "value").endObject()),
XContentType.JSON))
.get(); .get();
assertHitCount(response, 1); assertHitCount(response, 1);
assertSearchHits(response, "1"); assertSearchHits(response, "1");

@ -152,7 +152,7 @@ public class RankEvalResponseTests extends ESTestCase {
RankEvalResponse response = new RankEvalResponse(0.123, Collections.singletonMap("coffee_query", coffeeQueryQuality), RankEvalResponse response = new RankEvalResponse(0.123, Collections.singletonMap("coffee_query", coffeeQueryQuality),
Collections.singletonMap("beer_query", new ParsingException(new XContentLocation(0, 0), "someMsg"))); Collections.singletonMap("beer_query", new ParsingException(new XContentLocation(0, 0), "someMsg")));
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
String xContent = response.toXContent(builder, ToXContent.EMPTY_PARAMS).bytes().utf8ToString(); String xContent = BytesReference.bytes(response.toXContent(builder, ToXContent.EMPTY_PARAMS)).utf8ToString();
assertEquals(("{" + assertEquals(("{" +
" \"quality_level\": 0.123," + " \"quality_level\": 0.123," +
" \"details\": {" + " \"details\": {" +

@ -19,6 +19,7 @@
package org.elasticsearch.index.rankeval; package org.elasticsearch.index.rankeval;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.NamedXContentRegistry;
@ -85,7 +86,7 @@ public class RankEvalSpecTests extends ESTestCase {
builder.startObject(); builder.startObject();
builder.field("field", randomAlphaOfLengthBetween(1, 5)); builder.field("field", randomAlphaOfLengthBetween(1, 5));
builder.endObject(); builder.endObject();
script = builder.string(); script = Strings.toString(builder);
} }
templates = new HashSet<>(); templates = new HashSet<>();
@ -115,7 +116,7 @@ public class RankEvalSpecTests extends ESTestCase {
public void testXContentRoundtrip() throws IOException { public void testXContentRoundtrip() throws IOException {
RankEvalSpec testItem = createTestItem(); RankEvalSpec testItem = createTestItem();
XContentBuilder shuffled = shuffleXContent(testItem.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS)); XContentBuilder shuffled = shuffleXContent(testItem.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS));
try (XContentParser parser = createParser(JsonXContent.jsonXContent, shuffled.bytes())) { try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(shuffled))) {
RankEvalSpec parsedItem = RankEvalSpec.parse(parser); RankEvalSpec parsedItem = RankEvalSpec.parse(parser);
// indices, come from URL parameters, so they don't survive xContent roundtrip // indices, come from URL parameters, so they don't survive xContent roundtrip

@ -21,8 +21,8 @@ package org.elasticsearch.index.reindex;
import org.elasticsearch.action.GenericAction; import org.elasticsearch.action.GenericAction;
import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser;
@ -91,7 +91,7 @@ public abstract class AbstractBulkByQueryRestHandler<
} }
} }
return parser.contentType().xContent().createParser(parser.getXContentRegistry(), return parser.contentType().xContent().createParser(parser.getXContentRegistry(),
parser.getDeprecationHandler(), builder.map(body).bytes().streamInput()); parser.getDeprecationHandler(), BytesReference.bytes(builder.map(body)).streamInput());
} }
} }
} }

@ -27,7 +27,6 @@ import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ObjectParser.ValueType; import org.elasticsearch.common.xcontent.ObjectParser.ValueType;
import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContent;
@ -75,7 +74,7 @@ public class RestReindexAction extends AbstractBaseReindexRestHandler<ReindexReq
request.setRemoteInfo(buildRemoteInfo(source)); request.setRemoteInfo(buildRemoteInfo(source));
XContentBuilder builder = XContentFactory.contentBuilder(parser.contentType()); XContentBuilder builder = XContentFactory.contentBuilder(parser.contentType());
builder.map(source); builder.map(source);
try (InputStream stream = builder.bytes().streamInput(); try (InputStream stream = BytesReference.bytes(builder).streamInput();
XContentParser innerParser = parser.contentType().xContent() XContentParser innerParser = parser.contentType().xContent()
.createParser(parser.getXContentRegistry(), parser.getDeprecationHandler(), stream)) { .createParser(parser.getXContentRegistry(), parser.getDeprecationHandler(), stream)) {
request.getSearchRequest().source().parseXContent(innerParser); request.getSearchRequest().source().parseXContent(innerParser);
@ -214,13 +213,13 @@ public class RestReindexAction extends AbstractBaseReindexRestHandler<ReindexReq
XContentBuilder builder = JsonXContent.contentBuilder().prettyPrint(); XContentBuilder builder = JsonXContent.contentBuilder().prettyPrint();
Object query = source.remove("query"); Object query = source.remove("query");
if (query == null) { if (query == null) {
return matchAllQuery().toXContent(builder, ToXContent.EMPTY_PARAMS).bytes(); return BytesReference.bytes(matchAllQuery().toXContent(builder, ToXContent.EMPTY_PARAMS));
} }
if (!(query instanceof Map)) { if (!(query instanceof Map)) {
throw new IllegalArgumentException("Expected [query] to be an object but was [" + query + "]"); throw new IllegalArgumentException("Expected [query] to be an object but was [" + query + "]");
} }
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
Map<String, Object> map = (Map<String, Object>) query; Map<String, Object> map = (Map<String, Object>) query;
return builder.map(map).bytes(); return BytesReference.bytes(builder.map(map));
} }
} }

@ -37,8 +37,8 @@ import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.bulk.BackoffPolicy; import org.elasticsearch.action.bulk.BackoffPolicy;
import org.elasticsearch.action.bulk.BulkItemResponse.Failure; import org.elasticsearch.action.bulk.BulkItemResponse.Failure;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.xcontent.DeprecationHandler; import org.elasticsearch.common.xcontent.DeprecationHandler;
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
import org.elasticsearch.index.reindex.ScrollableHitSource.SearchFailure; import org.elasticsearch.index.reindex.ScrollableHitSource.SearchFailure;
import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchRequest;
@ -345,7 +345,7 @@ public class TransportReindexAction extends HandledTransportAction<ReindexReques
XContentBuilder builder = XContentBuilder.builder(mainRequestXContentType.xContent())) { XContentBuilder builder = XContentBuilder.builder(mainRequestXContentType.xContent())) {
parser.nextToken(); parser.nextToken();
builder.copyCurrentStructure(parser); builder.copyCurrentStructure(parser);
index.source(builder.bytes(), builder.contentType()); index.source(BytesReference.bytes(builder), builder.contentType());
} catch (IOException e) { } catch (IOException e) {
throw new UncheckedIOException("failed to convert hit from " + sourceXContentType + " to " throw new UncheckedIOException("failed to convert hit from " + sourceXContentType + " to "
+ mainRequestXContentType, e); + mainRequestXContentType, e);

@ -155,7 +155,7 @@ final class RemoteRequestBuilders {
} }
entity.endObject(); entity.endObject();
BytesRef bytes = entity.bytes().toBytesRef(); BytesRef bytes = BytesReference.bytes(entity).toBytesRef();
return new ByteArrayEntity(bytes.bytes, bytes.offset, bytes.length, ContentType.APPLICATION_JSON); return new ByteArrayEntity(bytes.bytes, bytes.offset, bytes.length, ContentType.APPLICATION_JSON);
} catch (IOException e) { } catch (IOException e) {
throw new ElasticsearchException("unexpected error building entity", e); throw new ElasticsearchException("unexpected error building entity", e);
@ -209,9 +209,9 @@ final class RemoteRequestBuilders {
return new StringEntity(scroll, ContentType.TEXT_PLAIN); return new StringEntity(scroll, ContentType.TEXT_PLAIN);
} }
try (XContentBuilder entity = JsonXContent.contentBuilder()) { try (XContentBuilder entity = JsonXContent.contentBuilder()) {
return new StringEntity(entity.startObject() return new StringEntity(Strings.toString(entity.startObject()
.field("scroll_id", scroll) .field("scroll_id", scroll)
.endObject().string(), ContentType.APPLICATION_JSON); .endObject()), ContentType.APPLICATION_JSON);
} catch (IOException e) { } catch (IOException e) {
throw new ElasticsearchException("failed to build scroll entity", e); throw new ElasticsearchException("failed to build scroll entity", e);
} }
@ -223,9 +223,9 @@ final class RemoteRequestBuilders {
return new StringEntity(scroll, ContentType.TEXT_PLAIN); return new StringEntity(scroll, ContentType.TEXT_PLAIN);
} }
try (XContentBuilder entity = JsonXContent.contentBuilder()) { try (XContentBuilder entity = JsonXContent.contentBuilder()) {
return new StringEntity(entity.startObject() return new StringEntity(Strings.toString(entity.startObject()
.array("scroll_id", scroll) .array("scroll_id", scroll)
.endObject().string(), ContentType.APPLICATION_JSON); .endObject()), ContentType.APPLICATION_JSON);
} catch (IOException e) { } catch (IOException e) {
throw new ElasticsearchException("failed to build clear scroll entity", e); throw new ElasticsearchException("failed to build clear scroll entity", e);
} }

@ -20,6 +20,7 @@
package org.elasticsearch.index.reindex.remote; package org.elasticsearch.index.reindex.remote;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.index.reindex.ScrollableHitSource.BasicHit; import org.elasticsearch.index.reindex.ScrollableHitSource.BasicHit;
import org.elasticsearch.index.reindex.ScrollableHitSource.Hit; import org.elasticsearch.index.reindex.ScrollableHitSource.Hit;
import org.elasticsearch.index.reindex.ScrollableHitSource.Response; import org.elasticsearch.index.reindex.ScrollableHitSource.Response;
@ -78,7 +79,7 @@ final class RemoteResponseParsers {
try (XContentBuilder b = XContentBuilder.builder(s.xContent())) { try (XContentBuilder b = XContentBuilder.builder(s.xContent())) {
b.copyCurrentStructure(p); b.copyCurrentStructure(p);
// a hack but this lets us get the right xcontent type to go with the source // a hack but this lets us get the right xcontent type to go with the source
return new Tuple<>(b.bytes(), s); return new Tuple<>(BytesReference.bytes(b), s);
} }
} catch (IOException e) { } catch (IOException e) {
throw new ParsingException(p.getTokenLocation(), "[hit] failed to parse [_source]", e); throw new ParsingException(p.getTokenLocation(), "[hit] failed to parse [_source]", e);

@ -118,7 +118,7 @@ public class RestReindexActionTests extends ESTestCase {
b.endObject(); b.endObject();
} }
b.endObject(); b.endObject();
request = b.bytes(); request = BytesReference.bytes(b);
} }
try (XContentParser p = createParser(JsonXContent.jsonXContent, request)) { try (XContentParser p = createParser(JsonXContent.jsonXContent, request)) {
ReindexRequest r = new ReindexRequest(new SearchRequest(), new IndexRequest()); ReindexRequest r = new ReindexRequest(new SearchRequest(), new IndexRequest());
@ -144,7 +144,7 @@ public class RestReindexActionTests extends ESTestCase {
body.endObject(); body.endObject();
} }
body.endObject(); body.endObject();
request.withContent(body.bytes(), body.contentType()); request.withContent(BytesReference.bytes(body), body.contentType());
} }
request.withParams(singletonMap("pipeline", "doesn't matter")); request.withParams(singletonMap("pipeline", "doesn't matter"));
Exception e = expectThrows(IllegalArgumentException.class, () -> action.buildRequest(request.build())); Exception e = expectThrows(IllegalArgumentException.class, () -> action.buildRequest(request.build()));

@ -22,6 +22,7 @@ package org.elasticsearch.rest;
import org.apache.http.entity.ContentType; import org.apache.http.entity.ContentType;
import org.apache.http.entity.StringEntity; import org.apache.http.entity.StringEntity;
import org.elasticsearch.client.Response; import org.elasticsearch.client.Response;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.ESRestTestCase;
import org.hamcrest.Matcher; import org.hamcrest.Matcher;
@ -57,7 +58,7 @@ public class Netty4HeadBodyIsEmptyIT extends ESRestTestCase {
} }
builder.endObject(); builder.endObject();
client().performRequest("PUT", "/" + indexName + "/" + typeName + "/" + "1", emptyMap(), client().performRequest("PUT", "/" + indexName + "/" + typeName + "/" + "1", emptyMap(),
new StringEntity(builder.string(), ContentType.APPLICATION_JSON)); new StringEntity(Strings.toString(builder), ContentType.APPLICATION_JSON));
} }
} }
@ -108,7 +109,8 @@ public class Netty4HeadBodyIsEmptyIT extends ESRestTestCase {
} }
builder.endObject(); builder.endObject();
client().performRequest("POST", "_aliases", emptyMap(), new StringEntity(builder.string(), ContentType.APPLICATION_JSON)); client().performRequest("POST", "_aliases", emptyMap(), new StringEntity(Strings.toString(builder),
ContentType.APPLICATION_JSON));
headTestCase("/_alias/test_alias", emptyMap(), greaterThan(0)); headTestCase("/_alias/test_alias", emptyMap(), greaterThan(0));
headTestCase("/test/_alias/test_alias", emptyMap(), greaterThan(0)); headTestCase("/test/_alias/test_alias", emptyMap(), greaterThan(0));
} }
@ -134,7 +136,7 @@ public class Netty4HeadBodyIsEmptyIT extends ESRestTestCase {
builder.endObject(); builder.endObject();
client().performRequest("PUT", "/_template/template", emptyMap(), client().performRequest("PUT", "/_template/template", emptyMap(),
new StringEntity(builder.string(), ContentType.APPLICATION_JSON)); new StringEntity(Strings.toString(builder), ContentType.APPLICATION_JSON));
headTestCase("/_template/template", emptyMap(), greaterThan(0)); headTestCase("/_template/template", emptyMap(), greaterThan(0));
} }
} }
@ -162,7 +164,8 @@ public class Netty4HeadBodyIsEmptyIT extends ESRestTestCase {
builder.endObject(); builder.endObject();
} }
builder.endObject(); builder.endObject();
client().performRequest("PUT", "/test-no-source", emptyMap(), new StringEntity(builder.string(), ContentType.APPLICATION_JSON)); client().performRequest("PUT", "/test-no-source", emptyMap(), new StringEntity(Strings.toString(builder),
ContentType.APPLICATION_JSON));
createTestDoc("test-no-source", "test-no-source"); createTestDoc("test-no-source", "test-no-source");
headTestCase("/test-no-source/test-no-source/1/_source", emptyMap(), NOT_FOUND.getStatus(), equalTo(0)); headTestCase("/test-no-source/test-no-source/1/_source", emptyMap(), NOT_FOUND.getStatus(), equalTo(0));
} }

@ -29,6 +29,8 @@ import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.IndexableFieldType; import org.apache.lucene.index.IndexableFieldType;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
@ -64,19 +66,19 @@ public class ICUCollationKeywordFieldMapperTests extends ESSingleNodeTestCase {
} }
public void testDefaults() throws Exception { public void testDefaults() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", FIELD_TYPE).endObject().endObject() .startObject("properties").startObject("field").field("type", FIELD_TYPE).endObject().endObject()
.endObject().endObject().string(); .endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString()); assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.startObject() .bytes(XContentFactory.jsonBuilder()
.field("field", "1234") .startObject()
.endObject() .field("field", "1234")
.bytes(), .endObject()),
XContentType.JSON)); XContentType.JSON));
IndexableField[] fields = doc.rootDoc().getFields("field"); IndexableField[] fields = doc.rootDoc().getFields("field");
@ -108,19 +110,19 @@ public class ICUCollationKeywordFieldMapperTests extends ESSingleNodeTestCase {
indexService = createIndex("oldindex", Settings.builder().put("index.version.created", Version.V_5_5_0).build()); indexService = createIndex("oldindex", Settings.builder().put("index.version.created", Version.V_5_5_0).build());
parser = indexService.mapperService().documentMapperParser(); parser = indexService.mapperService().documentMapperParser();
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", FIELD_TYPE).endObject().endObject() .startObject("properties").startObject("field").field("type", FIELD_TYPE).endObject().endObject()
.endObject().endObject().string(); .endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString()); assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(SourceToParse.source("oldindex", "type", "1", XContentFactory.jsonBuilder() ParsedDocument doc = mapper.parse(SourceToParse.source("oldindex", "type", "1", BytesReference
.startObject() .bytes(XContentFactory.jsonBuilder()
.field("field", "1234") .startObject()
.endObject() .field("field", "1234")
.bytes(), .endObject()),
XContentType.JSON)); XContentType.JSON));
IndexableField[] fields = doc.rootDoc().getFields("field"); IndexableField[] fields = doc.rootDoc().getFields("field");
@ -149,44 +151,44 @@ public class ICUCollationKeywordFieldMapperTests extends ESSingleNodeTestCase {
} }
public void testNullValue() throws IOException { public void testNullValue() throws IOException {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", FIELD_TYPE).endObject().endObject() .startObject("properties").startObject("field").field("type", FIELD_TYPE).endObject().endObject()
.endObject().endObject().string(); .endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString()); assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.startObject() .bytes(XContentFactory.jsonBuilder()
.nullField("field") .startObject()
.endObject() .nullField("field")
.bytes(), .endObject()),
XContentType.JSON)); XContentType.JSON));
assertArrayEquals(new IndexableField[0], doc.rootDoc().getFields("field")); assertArrayEquals(new IndexableField[0], doc.rootDoc().getFields("field"));
mapping = XContentFactory.jsonBuilder().startObject().startObject("type") mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", FIELD_TYPE) .startObject("properties").startObject("field").field("type", FIELD_TYPE)
.field("null_value", "1234").endObject().endObject() .field("null_value", "1234").endObject().endObject()
.endObject().endObject().string(); .endObject().endObject());
mapper = parser.parse("type", new CompressedXContent(mapping)); mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString()); assertEquals(mapping, mapper.mappingSource().toString());
doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.startObject() .bytes(XContentFactory.jsonBuilder()
.endObject() .startObject()
.bytes(), .endObject()),
XContentType.JSON)); XContentType.JSON));
IndexableField[] fields = doc.rootDoc().getFields("field"); IndexableField[] fields = doc.rootDoc().getFields("field");
assertEquals(0, fields.length); assertEquals(0, fields.length);
doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.startObject() .bytes(XContentFactory.jsonBuilder()
.nullField("field") .startObject()
.endObject() .nullField("field")
.bytes(), .endObject()),
XContentType.JSON)); XContentType.JSON));
Collator collator = Collator.getInstance(ULocale.ROOT); Collator collator = Collator.getInstance(ULocale.ROOT);
@ -199,20 +201,20 @@ public class ICUCollationKeywordFieldMapperTests extends ESSingleNodeTestCase {
} }
public void testEnableStore() throws IOException { public void testEnableStore() throws IOException {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", FIELD_TYPE) .startObject("properties").startObject("field").field("type", FIELD_TYPE)
.field("store", true).endObject().endObject() .field("store", true).endObject().endObject()
.endObject().endObject().string(); .endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString()); assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.startObject() .bytes(XContentFactory.jsonBuilder()
.field("field", "1234") .startObject()
.endObject() .field("field", "1234")
.bytes(), .endObject()),
XContentType.JSON)); XContentType.JSON));
IndexableField[] fields = doc.rootDoc().getFields("field"); IndexableField[] fields = doc.rootDoc().getFields("field");
@ -221,20 +223,20 @@ public class ICUCollationKeywordFieldMapperTests extends ESSingleNodeTestCase {
} }
public void testDisableIndex() throws IOException { public void testDisableIndex() throws IOException {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", FIELD_TYPE) .startObject("properties").startObject("field").field("type", FIELD_TYPE)
.field("index", false).endObject().endObject() .field("index", false).endObject().endObject()
.endObject().endObject().string(); .endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString()); assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.startObject() .bytes(XContentFactory.jsonBuilder()
.field("field", "1234") .startObject()
.endObject() .field("field", "1234")
.bytes(), .endObject()),
XContentType.JSON)); XContentType.JSON));
IndexableField[] fields = doc.rootDoc().getFields("field"); IndexableField[] fields = doc.rootDoc().getFields("field");
@ -244,20 +246,20 @@ public class ICUCollationKeywordFieldMapperTests extends ESSingleNodeTestCase {
} }
public void testDisableDocValues() throws IOException { public void testDisableDocValues() throws IOException {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", FIELD_TYPE) .startObject("properties").startObject("field").field("type", FIELD_TYPE)
.field("doc_values", false).endObject().endObject() .field("doc_values", false).endObject().endObject()
.endObject().endObject().string(); .endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString()); assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.startObject() .bytes(XContentFactory.jsonBuilder()
.field("field", "1234") .startObject()
.endObject() .field("field", "1234")
.bytes(), .endObject()),
XContentType.JSON)); XContentType.JSON));
IndexableField[] fields = doc.rootDoc().getFields("field"); IndexableField[] fields = doc.rootDoc().getFields("field");
@ -266,19 +268,19 @@ public class ICUCollationKeywordFieldMapperTests extends ESSingleNodeTestCase {
} }
public void testMultipleValues() throws IOException { public void testMultipleValues() throws IOException {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", FIELD_TYPE).endObject().endObject() .startObject("properties").startObject("field").field("type", FIELD_TYPE).endObject().endObject()
.endObject().endObject().string(); .endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString()); assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.startObject() .bytes(XContentFactory.jsonBuilder()
.field("field", Arrays.asList("1234", "5678")) .startObject()
.endObject() .field("field", Arrays.asList("1234", "5678"))
.bytes(), .endObject()),
XContentType.JSON)); XContentType.JSON));
IndexableField[] fields = doc.rootDoc().getFields("field"); IndexableField[] fields = doc.rootDoc().getFields("field");
@ -328,20 +330,20 @@ public class ICUCollationKeywordFieldMapperTests extends ESSingleNodeTestCase {
} }
public void testIndexOptions() throws IOException { public void testIndexOptions() throws IOException {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", FIELD_TYPE) .startObject("properties").startObject("field").field("type", FIELD_TYPE)
.field("index_options", "freqs").endObject().endObject() .field("index_options", "freqs").endObject().endObject()
.endObject().endObject().string(); .endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString()); assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.startObject() .bytes(XContentFactory.jsonBuilder()
.field("field", "1234") .startObject()
.endObject() .field("field", "1234")
.bytes(), .endObject()),
XContentType.JSON)); XContentType.JSON));
IndexableField[] fields = doc.rootDoc().getFields("field"); IndexableField[] fields = doc.rootDoc().getFields("field");
@ -349,10 +351,10 @@ public class ICUCollationKeywordFieldMapperTests extends ESSingleNodeTestCase {
assertEquals(IndexOptions.DOCS_AND_FREQS, fields[0].fieldType().indexOptions()); assertEquals(IndexOptions.DOCS_AND_FREQS, fields[0].fieldType().indexOptions());
for (String indexOptions : Arrays.asList("positions", "offsets")) { for (String indexOptions : Arrays.asList("positions", "offsets")) {
final String mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type") final String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", FIELD_TYPE) .startObject("properties").startObject("field").field("type", FIELD_TYPE)
.field("index_options", indexOptions).endObject().endObject() .field("index_options", indexOptions).endObject().endObject()
.endObject().endObject().string(); .endObject().endObject());
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> parser.parse("type", new CompressedXContent(mapping2))); () -> parser.parse("type", new CompressedXContent(mapping2)));
assertEquals("The [" + FIELD_TYPE + "] field does not support positions, got [index_options]=" + indexOptions, assertEquals("The [" + FIELD_TYPE + "] field does not support positions, got [index_options]=" + indexOptions,
@ -361,20 +363,20 @@ public class ICUCollationKeywordFieldMapperTests extends ESSingleNodeTestCase {
} }
public void testEnableNorms() throws IOException { public void testEnableNorms() throws IOException {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field").field("type", FIELD_TYPE) .startObject("properties").startObject("field").field("type", FIELD_TYPE)
.field("norms", true).endObject().endObject() .field("norms", true).endObject().endObject()
.endObject().endObject().string(); .endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString()); assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.startObject() .bytes(XContentFactory.jsonBuilder()
.field("field", "1234") .startObject()
.endObject() .field("field", "1234")
.bytes(), .endObject()),
XContentType.JSON)); XContentType.JSON));
IndexableField[] fields = doc.rootDoc().getFields("field"); IndexableField[] fields = doc.rootDoc().getFields("field");
@ -383,22 +385,22 @@ public class ICUCollationKeywordFieldMapperTests extends ESSingleNodeTestCase {
} }
public void testCollator() throws IOException { public void testCollator() throws IOException {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field") .startObject("properties").startObject("field")
.field("type", FIELD_TYPE) .field("type", FIELD_TYPE)
.field("language", "tr") .field("language", "tr")
.field("strength", "primary") .field("strength", "primary")
.endObject().endObject().endObject().endObject().string(); .endObject().endObject().endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
assertEquals(mapping, mapper.mappingSource().toString()); assertEquals(mapping, mapper.mappingSource().toString());
ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() ParsedDocument doc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference
.startObject() .bytes(XContentFactory.jsonBuilder()
.field("field", "I WİLL USE TURKİSH CASING") .startObject()
.endObject() .field("field", "I WİLL USE TURKİSH CASING")
.bytes(), .endObject()),
XContentType.JSON)); XContentType.JSON));
Collator collator = Collator.getInstance(new ULocale("tr")); Collator collator = Collator.getInstance(new ULocale("tr"));
@ -428,19 +430,19 @@ public class ICUCollationKeywordFieldMapperTests extends ESSingleNodeTestCase {
} }
public void testUpdateCollator() throws IOException { public void testUpdateCollator() throws IOException {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field") .startObject("properties").startObject("field")
.field("type", FIELD_TYPE) .field("type", FIELD_TYPE)
.field("language", "tr") .field("language", "tr")
.field("strength", "primary") .field("strength", "primary")
.endObject().endObject().endObject().endObject().string(); .endObject().endObject().endObject().endObject());
indexService.mapperService().merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE); indexService.mapperService().merge("type", new CompressedXContent(mapping), MergeReason.MAPPING_UPDATE);
String mapping2 = XContentFactory.jsonBuilder().startObject().startObject("type") String mapping2 = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field") .startObject("properties").startObject("field")
.field("type", FIELD_TYPE) .field("type", FIELD_TYPE)
.field("language", "en") .field("language", "en")
.endObject().endObject().endObject().endObject().string(); .endObject().endObject().endObject().endObject());
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> indexService.mapperService().merge("type", () -> indexService.mapperService().merge("type",
new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE)); new CompressedXContent(mapping2), MergeReason.MAPPING_UPDATE));

@ -22,6 +22,8 @@ package org.elasticsearch.index.mapper.murmur3;
import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexOptions;
import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.IndexableField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
@ -71,15 +73,15 @@ public class Murmur3FieldMapperTests extends ESSingleNodeTestCase {
} }
public void testDefaults() throws Exception { public void testDefaults() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field") .startObject("properties").startObject("field")
.field("type", "murmur3") .field("type", "murmur3")
.endObject().endObject().endObject().endObject().string(); .endObject().endObject().endObject().endObject());
DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping));
ParsedDocument parsedDoc = mapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.jsonBuilder() ParsedDocument parsedDoc = mapper.parse(SourceToParse.source("test", "type", "1", BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject() .startObject()
.field("field", "value") .field("field", "value")
.endObject().bytes(), .endObject()),
XContentType.JSON)); XContentType.JSON));
IndexableField[] fields = parsedDoc.rootDoc().getFields("field"); IndexableField[] fields = parsedDoc.rootDoc().getFields("field");
assertNotNull(fields); assertNotNull(fields);
@ -90,11 +92,11 @@ public class Murmur3FieldMapperTests extends ESSingleNodeTestCase {
} }
public void testDocValuesSettingNotAllowed() throws Exception { public void testDocValuesSettingNotAllowed() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field") .startObject("properties").startObject("field")
.field("type", "murmur3") .field("type", "murmur3")
.field("doc_values", false) .field("doc_values", false)
.endObject().endObject().endObject().endObject().string(); .endObject().endObject().endObject().endObject());
try { try {
parser.parse("type", new CompressedXContent(mapping)); parser.parse("type", new CompressedXContent(mapping));
fail("expected a mapper parsing exception"); fail("expected a mapper parsing exception");
@ -103,11 +105,11 @@ public class Murmur3FieldMapperTests extends ESSingleNodeTestCase {
} }
// even setting to the default is not allowed, the setting is invalid // even setting to the default is not allowed, the setting is invalid
mapping = XContentFactory.jsonBuilder().startObject().startObject("type") mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field") .startObject("properties").startObject("field")
.field("type", "murmur3") .field("type", "murmur3")
.field("doc_values", true) .field("doc_values", true)
.endObject().endObject().endObject().endObject().string(); .endObject().endObject().endObject().endObject());
try { try {
parser.parse("type", new CompressedXContent(mapping)); parser.parse("type", new CompressedXContent(mapping));
fail("expected a mapper parsing exception"); fail("expected a mapper parsing exception");
@ -117,11 +119,11 @@ public class Murmur3FieldMapperTests extends ESSingleNodeTestCase {
} }
public void testIndexSettingNotAllowed() throws Exception { public void testIndexSettingNotAllowed() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field") .startObject("properties").startObject("field")
.field("type", "murmur3") .field("type", "murmur3")
.field("index", "not_analyzed") .field("index", "not_analyzed")
.endObject().endObject().endObject().endObject().string(); .endObject().endObject().endObject().endObject());
try { try {
parser.parse("type", new CompressedXContent(mapping)); parser.parse("type", new CompressedXContent(mapping));
fail("expected a mapper parsing exception"); fail("expected a mapper parsing exception");
@ -130,11 +132,11 @@ public class Murmur3FieldMapperTests extends ESSingleNodeTestCase {
} }
// even setting to the default is not allowed, the setting is invalid // even setting to the default is not allowed, the setting is invalid
mapping = XContentFactory.jsonBuilder().startObject().startObject("type") mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("field") .startObject("properties").startObject("field")
.field("type", "murmur3") .field("type", "murmur3")
.field("index", "no") .field("index", "no")
.endObject().endObject().endObject().endObject().string(); .endObject().endObject().endObject().endObject());
try { try {
parser.parse("type", new CompressedXContent(mapping)); parser.parse("type", new CompressedXContent(mapping));
fail("expected a mapper parsing exception"); fail("expected a mapper parsing exception");
@ -144,10 +146,10 @@ public class Murmur3FieldMapperTests extends ESSingleNodeTestCase {
} }
public void testEmptyName() throws Exception { public void testEmptyName() throws Exception {
String mapping = XContentFactory.jsonBuilder().startObject().startObject("type") String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("properties").startObject("") .startObject("properties").startObject("")
.field("type", "murmur3") .field("type", "murmur3")
.endObject().endObject().endObject().endObject().string(); .endObject().endObject().endObject().endObject());
IllegalArgumentException e = expectThrows(IllegalArgumentException.class, IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
() -> parser.parse("type", new CompressedXContent(mapping)) () -> parser.parse("type", new CompressedXContent(mapping))

@ -21,8 +21,7 @@ package org.elasticsearch.index.mapper.size;
import java.util.Collection; import java.util.Collection;
import org.elasticsearch.Version; import org.elasticsearch.common.Strings;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
@ -30,11 +29,9 @@ import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexService;
import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.NumberFieldMapper;
import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.DocumentMapper;
import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.index.mapper.SourceToParse;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.plugin.mapper.MapperSizePlugin; import org.elasticsearch.plugin.mapper.MapperSizePlugin;
import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.ESSingleNodeTestCase;
@ -42,7 +39,6 @@ import org.elasticsearch.test.InternalSettingsPlugin;
import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.nullValue;
import static org.hamcrest.Matchers.instanceOf;
import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.IndexableField;
@ -56,11 +52,11 @@ public class SizeMappingTests extends ESSingleNodeTestCase {
IndexService service = createIndex("test", Settings.EMPTY, "type", "_size", "enabled=true"); IndexService service = createIndex("test", Settings.EMPTY, "type", "_size", "enabled=true");
DocumentMapper docMapper = service.mapperService().documentMapper("type"); DocumentMapper docMapper = service.mapperService().documentMapper("type");
BytesReference source = XContentFactory.jsonBuilder() BytesReference source = BytesReference
.startObject() .bytes(XContentFactory.jsonBuilder()
.field("field", "value") .startObject()
.endObject() .field("field", "value")
.bytes(); .endObject());
ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", source, XContentType.JSON)); ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", source, XContentType.JSON));
boolean stored = false; boolean stored = false;
@ -77,11 +73,11 @@ public class SizeMappingTests extends ESSingleNodeTestCase {
IndexService service = createIndex("test", Settings.EMPTY, "type", "_size", "enabled=false"); IndexService service = createIndex("test", Settings.EMPTY, "type", "_size", "enabled=false");
DocumentMapper docMapper = service.mapperService().documentMapper("type"); DocumentMapper docMapper = service.mapperService().documentMapper("type");
BytesReference source = XContentFactory.jsonBuilder() BytesReference source = BytesReference
.startObject() .bytes(XContentFactory.jsonBuilder()
.field("field", "value") .startObject()
.endObject() .field("field", "value")
.bytes(); .endObject());
ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", source, XContentType.JSON)); ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", source, XContentType.JSON));
assertThat(doc.rootDoc().getField("_size"), nullValue()); assertThat(doc.rootDoc().getField("_size"), nullValue());
@ -91,11 +87,11 @@ public class SizeMappingTests extends ESSingleNodeTestCase {
IndexService service = createIndex("test", Settings.EMPTY, "type"); IndexService service = createIndex("test", Settings.EMPTY, "type");
DocumentMapper docMapper = service.mapperService().documentMapper("type"); DocumentMapper docMapper = service.mapperService().documentMapper("type");
BytesReference source = XContentFactory.jsonBuilder() BytesReference source = BytesReference
.startObject() .bytes(XContentFactory.jsonBuilder()
.field("field", "value") .startObject()
.endObject() .field("field", "value")
.bytes(); .endObject());
ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", source, XContentType.JSON)); ParsedDocument doc = docMapper.parse(SourceToParse.source("test", "type", "1", source, XContentType.JSON));
assertThat(doc.rootDoc().getField("_size"), nullValue()); assertThat(doc.rootDoc().getField("_size"), nullValue());
@ -106,9 +102,9 @@ public class SizeMappingTests extends ESSingleNodeTestCase {
DocumentMapper docMapper = service.mapperService().documentMapper("type"); DocumentMapper docMapper = service.mapperService().documentMapper("type");
assertThat(docMapper.metadataMapper(SizeFieldMapper.class).enabled(), is(true)); assertThat(docMapper.metadataMapper(SizeFieldMapper.class).enabled(), is(true));
String disabledMapping = XContentFactory.jsonBuilder().startObject().startObject("type") String disabledMapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type")
.startObject("_size").field("enabled", false).endObject() .startObject("_size").field("enabled", false).endObject()
.endObject().endObject().string(); .endObject().endObject());
docMapper = service.mapperService().merge("type", new CompressedXContent(disabledMapping), docMapper = service.mapperService().merge("type", new CompressedXContent(disabledMapping),
MapperService.MergeReason.MAPPING_UPDATE); MapperService.MergeReason.MAPPING_UPDATE);

@ -19,6 +19,7 @@
package org.elasticsearch.repositories.gcs; package org.elasticsearch.repositories.gcs;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.path.PathTrie; import org.elasticsearch.common.path.PathTrie;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
@ -522,7 +523,7 @@ public class GoogleCloudStorageTestServer {
*/ */
private static Response newResponse(final RestStatus status, final Map<String, String> headers, final XContentBuilder xContentBuilder) { private static Response newResponse(final RestStatus status, final Map<String, String> headers, final XContentBuilder xContentBuilder) {
try (ByteArrayOutputStream out = new ByteArrayOutputStream()) { try (ByteArrayOutputStream out = new ByteArrayOutputStream()) {
xContentBuilder.bytes().writeTo(out); BytesReference.bytes(xContentBuilder).writeTo(out);
return new Response(status, headers, XContentType.JSON.mediaType(), out.toByteArray()); return new Response(status, headers, XContentType.JSON.mediaType(), out.toByteArray());
} catch (IOException e) { } catch (IOException e) {
return newError(RestStatus.INTERNAL_SERVER_ERROR, e.getMessage()); return newError(RestStatus.INTERNAL_SERVER_ERROR, e.getMessage());
@ -548,7 +549,7 @@ public class GoogleCloudStorageTestServer {
.endArray() .endArray()
.endObject() .endObject()
.endObject(); .endObject();
builder.bytes().writeTo(out); BytesReference.bytes(builder).writeTo(out);
} }
return new Response(status, emptyMap(), XContentType.JSON.mediaType(), out.toByteArray()); return new Response(status, emptyMap(), XContentType.JSON.mediaType(), out.toByteArray());
} catch (IOException e) { } catch (IOException e) {

@ -43,6 +43,7 @@ import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.common.xcontent.json.JsonXContent;
@ -306,7 +307,7 @@ public class CrossClusterSearchUnavailableClusterIT extends ESRestTestCase {
builder.endObject(); builder.endObject();
} }
builder.endObject(); builder.endObject();
requestBody = builder.string(); requestBody = Strings.toString(builder);
} }
return new NStringEntity(requestBody, ContentType.APPLICATION_JSON); return new NStringEntity(requestBody, ContentType.APPLICATION_JSON);
} }

@ -29,6 +29,7 @@ import org.elasticsearch.client.RestClient;
import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.Booleans; import org.elasticsearch.common.Booleans;
import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.CheckedFunction;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentHelper;
@ -140,7 +141,7 @@ public class FullClusterRestartIT extends ESRestTestCase {
} }
mappingsAndSettings.endObject(); mappingsAndSettings.endObject();
client().performRequest("PUT", "/" + index, Collections.emptyMap(), client().performRequest("PUT", "/" + index, Collections.emptyMap(),
new StringEntity(mappingsAndSettings.string(), ContentType.APPLICATION_JSON)); new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON));
count = randomIntBetween(2000, 3000); count = randomIntBetween(2000, 3000);
byte[] randomByteArray = new byte[16]; byte[] randomByteArray = new byte[16];
@ -203,7 +204,7 @@ public class FullClusterRestartIT extends ESRestTestCase {
} }
mappingsAndSettings.endObject(); mappingsAndSettings.endObject();
client().performRequest("PUT", "/" + index, Collections.emptyMap(), client().performRequest("PUT", "/" + index, Collections.emptyMap(),
new StringEntity(mappingsAndSettings.string(), ContentType.APPLICATION_JSON)); new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON));
int numDocs = randomIntBetween(2000, 3000); int numDocs = randomIntBetween(2000, 3000);
indexRandomDocuments(numDocs, true, false, i -> { indexRandomDocuments(numDocs, true, false, i -> {
@ -280,7 +281,7 @@ public class FullClusterRestartIT extends ESRestTestCase {
} }
mappingsAndSettings.endObject(); mappingsAndSettings.endObject();
client().performRequest("PUT", "/" + index, Collections.emptyMap(), client().performRequest("PUT", "/" + index, Collections.emptyMap(),
new StringEntity(mappingsAndSettings.string(), ContentType.APPLICATION_JSON)); new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON));
String aliasName = "%23" + index; // %23 == # String aliasName = "%23" + index; // %23 == #
client().performRequest("PUT", "/" + index + "/_alias/" + aliasName); client().performRequest("PUT", "/" + index + "/_alias/" + aliasName);
@ -328,7 +329,7 @@ public class FullClusterRestartIT extends ESRestTestCase {
} }
mappingsAndSettings.endObject(); mappingsAndSettings.endObject();
client().performRequest("PUT", "/_template/template_1", Collections.emptyMap(), client().performRequest("PUT", "/_template/template_1", Collections.emptyMap(),
new StringEntity(mappingsAndSettings.string(), ContentType.APPLICATION_JSON)); new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON));
client().performRequest("PUT", "/" + index); client().performRequest("PUT", "/" + index);
} }
@ -379,7 +380,7 @@ public class FullClusterRestartIT extends ESRestTestCase {
} }
mappingsAndSettings.endObject(); mappingsAndSettings.endObject();
client().performRequest("PUT", "/" + index, Collections.emptyMap(), client().performRequest("PUT", "/" + index, Collections.emptyMap(),
new StringEntity(mappingsAndSettings.string(), ContentType.APPLICATION_JSON)); new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON));
numDocs = randomIntBetween(512, 1024); numDocs = randomIntBetween(512, 1024);
indexRandomDocuments(numDocs, true, true, i -> { indexRandomDocuments(numDocs, true, true, i -> {
@ -446,7 +447,7 @@ public class FullClusterRestartIT extends ESRestTestCase {
} }
mappingsAndSettings.endObject(); mappingsAndSettings.endObject();
client().performRequest("PUT", "/" + index, Collections.emptyMap(), client().performRequest("PUT", "/" + index, Collections.emptyMap(),
new StringEntity(mappingsAndSettings.string(), ContentType.APPLICATION_JSON)); new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON));
numDocs = randomIntBetween(512, 1024); numDocs = randomIntBetween(512, 1024);
indexRandomDocuments(numDocs, true, true, i -> { indexRandomDocuments(numDocs, true, true, i -> {
@ -836,7 +837,7 @@ public class FullClusterRestartIT extends ESRestTestCase {
} }
templateBuilder.endObject().endObject(); templateBuilder.endObject().endObject();
client().performRequest("PUT", "/_template/test_template", emptyMap(), client().performRequest("PUT", "/_template/test_template", emptyMap(),
new StringEntity(templateBuilder.string(), ContentType.APPLICATION_JSON)); new StringEntity(Strings.toString(templateBuilder), ContentType.APPLICATION_JSON));
if (runningAgainstOldCluster) { if (runningAgainstOldCluster) {
// Create the repo // Create the repo
@ -850,7 +851,7 @@ public class FullClusterRestartIT extends ESRestTestCase {
} }
repoConfig.endObject(); repoConfig.endObject();
client().performRequest("PUT", "/_snapshot/repo", emptyMap(), client().performRequest("PUT", "/_snapshot/repo", emptyMap(),
new StringEntity(repoConfig.string(), ContentType.APPLICATION_JSON)); new StringEntity(Strings.toString(repoConfig), ContentType.APPLICATION_JSON));
} }
client().performRequest("PUT", "/_snapshot/repo/" + (runningAgainstOldCluster ? "old_snap" : "new_snap"), client().performRequest("PUT", "/_snapshot/repo/" + (runningAgainstOldCluster ? "old_snap" : "new_snap"),
@ -875,7 +876,7 @@ public class FullClusterRestartIT extends ESRestTestCase {
} }
mappingsAndSettings.endObject(); mappingsAndSettings.endObject();
client().performRequest("PUT", "/" + index, Collections.emptyMap(), client().performRequest("PUT", "/" + index, Collections.emptyMap(),
new StringEntity(mappingsAndSettings.string(), ContentType.APPLICATION_JSON)); new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON));
} else { } else {
Response response = client().performRequest("GET", index + "/_stats", singletonMap("level", "shards")); Response response = client().performRequest("GET", index + "/_stats", singletonMap("level", "shards"));
List<Object> shardStats = ObjectPath.createFromResponse(response).evaluate("indices." + index + ".shards.0"); List<Object> shardStats = ObjectPath.createFromResponse(response).evaluate("indices." + index + ".shards.0");
@ -919,7 +920,7 @@ public class FullClusterRestartIT extends ESRestTestCase {
restoreCommand.field("rename_replacement", "restored_" + index); restoreCommand.field("rename_replacement", "restored_" + index);
restoreCommand.endObject(); restoreCommand.endObject();
client().performRequest("POST", "/_snapshot/repo/" + snapshotName + "/_restore", singletonMap("wait_for_completion", "true"), client().performRequest("POST", "/_snapshot/repo/" + snapshotName + "/_restore", singletonMap("wait_for_completion", "true"),
new StringEntity(restoreCommand.string(), ContentType.APPLICATION_JSON)); new StringEntity(Strings.toString(restoreCommand), ContentType.APPLICATION_JSON));
// Make sure search finds all documents // Make sure search finds all documents
String countResponse = toStr(client().performRequest("GET", "/restored_" + index + "/_search", singletonMap("size", "0"))); String countResponse = toStr(client().performRequest("GET", "/restored_" + index + "/_search", singletonMap("size", "0")));
@ -997,7 +998,7 @@ public class FullClusterRestartIT extends ESRestTestCase {
for (int i = 0; i < count; i++) { for (int i = 0; i < count; i++) {
logger.debug("Indexing document [{}]", i); logger.debug("Indexing document [{}]", i);
client().performRequest("POST", "/" + index + "/doc/" + i, emptyMap(), client().performRequest("POST", "/" + index + "/doc/" + i, emptyMap(),
new StringEntity(docSupplier.apply(i).string(), ContentType.APPLICATION_JSON)); new StringEntity(Strings.toString(docSupplier.apply(i)), ContentType.APPLICATION_JSON));
if (rarely()) { if (rarely()) {
refresh(); refresh();
} }
@ -1022,7 +1023,7 @@ public class FullClusterRestartIT extends ESRestTestCase {
// Only create the first version so we know how many documents are created when the index is first created // Only create the first version so we know how many documents are created when the index is first created
Map<String, String> params = singletonMap("op_type", "create"); Map<String, String> params = singletonMap("op_type", "create");
client().performRequest("PUT", "/info/doc/" + index + "_" + type, params, client().performRequest("PUT", "/info/doc/" + index + "_" + type, params,
new StringEntity(infoDoc.string(), ContentType.APPLICATION_JSON)); new StringEntity(Strings.toString(infoDoc), ContentType.APPLICATION_JSON));
} }
private String loadInfoDocument(String type) throws IOException { private String loadInfoDocument(String type) throws IOException {

@ -25,6 +25,7 @@ import org.elasticsearch.Version;
import org.elasticsearch.client.Response; import org.elasticsearch.client.Response;
import org.elasticsearch.client.RestClient; import org.elasticsearch.client.RestClient;
import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.index.seqno.SeqNoStats; import org.elasticsearch.index.seqno.SeqNoStats;
@ -42,7 +43,6 @@ import java.util.stream.Collectors;
import static com.carrotsearch.randomizedtesting.RandomizedTest.randomAsciiOfLength; import static com.carrotsearch.randomizedtesting.RandomizedTest.randomAsciiOfLength;
import static java.util.Collections.emptyMap; import static java.util.Collections.emptyMap;
import static java.util.Collections.singletonMap; import static java.util.Collections.singletonMap;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.not;
@ -237,15 +237,15 @@ public class IndexingIT extends ESRestTestCase {
logger.info("cluster discovered: {}", nodes.toString()); logger.info("cluster discovered: {}", nodes.toString());
// Create the repository before taking the snapshot. // Create the repository before taking the snapshot.
String repoConfig = JsonXContent.contentBuilder() String repoConfig = Strings
.startObject() .toString(JsonXContent.contentBuilder()
.field("type", "fs") .startObject()
.startObject("settings") .field("type", "fs")
.field("compress", randomBoolean()) .startObject("settings")
.field("location", System.getProperty("tests.path.repo")) .field("compress", randomBoolean())
.endObject() .field("location", System.getProperty("tests.path.repo"))
.endObject() .endObject()
.string(); .endObject());
assertOK( assertOK(
client().performRequest("PUT", "/_snapshot/repo", emptyMap(), client().performRequest("PUT", "/_snapshot/repo", emptyMap(),

@ -25,6 +25,7 @@ import org.apache.http.util.EntityUtils;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.client.Response; import org.elasticsearch.client.Response;
import org.elasticsearch.common.Booleans; import org.elasticsearch.common.Booleans;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.InputStreamStreamInput; import org.elasticsearch.common.io.stream.InputStreamStreamInput;
import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
@ -189,7 +190,7 @@ public class QueryBuilderBWCIT extends ESRestTestCase {
} }
mappingsAndSettings.endObject(); mappingsAndSettings.endObject();
Response rsp = client().performRequest("PUT", "/" + index, Collections.emptyMap(), Response rsp = client().performRequest("PUT", "/" + index, Collections.emptyMap(),
new StringEntity(mappingsAndSettings.string(), ContentType.APPLICATION_JSON)); new StringEntity(Strings.toString(mappingsAndSettings), ContentType.APPLICATION_JSON));
assertEquals(200, rsp.getStatusLine().getStatusCode()); assertEquals(200, rsp.getStatusLine().getStatusCode());
for (int i = 0; i < CANDIDATES.size(); i++) { for (int i = 0; i < CANDIDATES.size(); i++) {

@ -33,6 +33,7 @@ import org.elasticsearch.action.termvectors.MultiTermVectorsRequest;
import org.elasticsearch.client.Client; import org.elasticsearch.client.Client;
import org.elasticsearch.client.Response; import org.elasticsearch.client.Response;
import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.network.NetworkModule;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
@ -103,12 +104,12 @@ public class ContextAndHeaderTransportIT extends HttpSmokeTestCase {
@Before @Before
public void createIndices() throws Exception { public void createIndices() throws Exception {
String mapping = jsonBuilder().startObject().startObject("type") String mapping = Strings.toString(jsonBuilder().startObject().startObject("type")
.startObject("properties") .startObject("properties")
.startObject("location").field("type", "geo_shape").endObject() .startObject("location").field("type", "geo_shape").endObject()
.startObject("name").field("type", "text").endObject() .startObject("name").field("type", "text").endObject()
.endObject() .endObject()
.endObject().endObject().string(); .endObject().endObject());
Settings settings = Settings.builder() Settings settings = Settings.builder()
.put(indexSettings()) .put(indexSettings())

@ -23,6 +23,7 @@ import org.apache.http.HttpEntity;
import org.apache.http.entity.ContentType; import org.apache.http.entity.ContentType;
import org.apache.http.entity.StringEntity; import org.apache.http.entity.StringEntity;
import org.elasticsearch.client.Response; import org.elasticsearch.client.Response;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.logging.LoggerMessageFormat; import org.elasticsearch.common.logging.LoggerMessageFormat;
import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting;
@ -210,7 +211,7 @@ public class DeprecationHttpIT extends HttpSmokeTestCase {
builder.endArray().endObject(); builder.endArray().endObject();
return new StringEntity(builder.string(), ContentType.APPLICATION_JSON); return new StringEntity(Strings.toString(builder), ContentType.APPLICATION_JSON);
} }
} }

@ -31,6 +31,7 @@ import org.apache.lucene.util.LuceneTestCase;
import org.elasticsearch.Build; import org.elasticsearch.Build;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterModule; import org.elasticsearch.cluster.ClusterModule;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.DeprecationHandler; import org.elasticsearch.common.xcontent.DeprecationHandler;
import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
@ -76,7 +77,7 @@ public class WildflyIT extends LuceneTestCase {
builder.endArray(); builder.endArray();
} }
builder.endObject(); builder.endObject();
body = builder.string(); body = Strings.toString(builder);
} }
put.setEntity(new StringEntity(body, ContentType.APPLICATION_JSON)); put.setEntity(new StringEntity(body, ContentType.APPLICATION_JSON));
try (CloseableHttpResponse response = client.execute(put)) { try (CloseableHttpResponse response = client.execute(put)) {

@ -23,10 +23,10 @@ import org.elasticsearch.action.FailedNodeException;
import org.elasticsearch.action.support.nodes.BaseNodesResponse; import org.elasticsearch.action.support.nodes.BaseNodesResponse;
import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContent.Params;
import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.ToXContentFragment;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
@ -130,7 +130,7 @@ public class NodesInfoResponse extends BaseNodesResponse<NodeInfo> implements To
builder.startObject(); builder.startObject();
toXContent(builder, EMPTY_PARAMS); toXContent(builder, EMPTY_PARAMS);
builder.endObject(); builder.endObject();
return builder.string(); return Strings.toString(builder);
} catch (IOException e) { } catch (IOException e) {
return "{ \"error\" : \"" + e.getMessage() + "\"}"; return "{ \"error\" : \"" + e.getMessage() + "\"}";
} }

@ -22,6 +22,7 @@ package org.elasticsearch.action.admin.cluster.node.stats;
import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.FailedNodeException;
import org.elasticsearch.action.support.nodes.BaseNodesResponse; import org.elasticsearch.action.support.nodes.BaseNodesResponse;
import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.ToXContentFragment;
@ -72,7 +73,7 @@ public class NodesStatsResponse extends BaseNodesResponse<NodeStats> implements
builder.startObject(); builder.startObject();
toXContent(builder, EMPTY_PARAMS); toXContent(builder, EMPTY_PARAMS);
builder.endObject(); builder.endObject();
return builder.string(); return Strings.toString(builder);
} catch (IOException e) { } catch (IOException e) {
return "{ \"error\" : \"" + e.getMessage() + "\"}"; return "{ \"error\" : \"" + e.getMessage() + "\"}";
} }

@ -22,9 +22,9 @@ package org.elasticsearch.action.admin.cluster.node.usage;
import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.FailedNodeException;
import org.elasticsearch.action.support.nodes.BaseNodesResponse; import org.elasticsearch.action.support.nodes.BaseNodesResponse;
import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ToXContent.Params;
import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.ToXContentFragment;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
@ -77,10 +77,10 @@ public class NodesUsageResponse extends BaseNodesResponse<NodeUsage> implements
builder.startObject(); builder.startObject();
toXContent(builder, EMPTY_PARAMS); toXContent(builder, EMPTY_PARAMS);
builder.endObject(); builder.endObject();
return builder.string(); return Strings.toString(builder);
} catch (IOException e) { } catch (IOException e) {
return "{ \"error\" : \"" + e.getMessage() + "\"}"; return "{ \"error\" : \"" + e.getMessage() + "\"}";
} }
} }
} }

@ -22,6 +22,7 @@ package org.elasticsearch.action.admin.cluster.repositories.put;
import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.ElasticsearchGenerationException;
import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.action.support.master.AcknowledgedRequest;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
@ -161,7 +162,7 @@ public class PutRepositoryRequest extends AcknowledgedRequest<PutRepositoryReque
try { try {
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
builder.map(source); builder.map(source);
settings(builder.string(), builder.contentType()); settings(Strings.toString(builder), builder.contentType());
} catch (IOException e) { } catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e); throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e);
} }

@ -23,6 +23,7 @@ import org.elasticsearch.ElasticsearchGenerationException;
import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.action.support.master.AcknowledgedRequest;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
@ -76,7 +77,7 @@ public class ClusterUpdateSettingsRequest extends AcknowledgedRequest<ClusterUpd
/** /**
* Sets the value of "flat_settings". * Sets the value of "flat_settings".
* Used only by the high-level REST client. * Used only by the high-level REST client.
* *
* @param flatSettings * @param flatSettings
* value of "flat_settings" flag to be set * value of "flat_settings" flag to be set
* @return this request * @return this request
@ -89,7 +90,7 @@ public class ClusterUpdateSettingsRequest extends AcknowledgedRequest<ClusterUpd
/** /**
* Return settings in flat format. * Return settings in flat format.
* Used only by the high-level REST client. * Used only by the high-level REST client.
* *
* @return <code>true</code> if settings need to be returned in flat format; <code>false</code> otherwise. * @return <code>true</code> if settings need to be returned in flat format; <code>false</code> otherwise.
*/ */
public boolean flatSettings() { public boolean flatSettings() {
@ -136,7 +137,7 @@ public class ClusterUpdateSettingsRequest extends AcknowledgedRequest<ClusterUpd
try { try {
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
builder.map(source); builder.map(source);
transientSettings(builder.string(), builder.contentType()); transientSettings(Strings.toString(builder), builder.contentType());
} catch (IOException e) { } catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e); throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e);
} }
@ -175,7 +176,7 @@ public class ClusterUpdateSettingsRequest extends AcknowledgedRequest<ClusterUpd
try { try {
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
builder.map(source); builder.map(source);
persistentSettings(builder.string(), builder.contentType()); persistentSettings(Strings.toString(builder), builder.contentType());
} catch (IOException e) { } catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e); throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e);
} }

@ -338,7 +338,7 @@ public class CreateSnapshotRequest extends MasterNodeRequest<CreateSnapshotReque
try { try {
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
builder.map(source); builder.map(source);
settings(builder.string(), builder.contentType()); settings(Strings.toString(builder), builder.contentType());
} catch (IOException e) { } catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e); throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e);
} }

@ -373,7 +373,7 @@ public class RestoreSnapshotRequest extends MasterNodeRequest<RestoreSnapshotReq
try { try {
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
builder.map(source); builder.map(source);
settings(builder.string(), builder.contentType()); settings(Strings.toString(builder), builder.contentType());
} catch (IOException e) { } catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e); throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e);
} }
@ -485,7 +485,7 @@ public class RestoreSnapshotRequest extends MasterNodeRequest<RestoreSnapshotReq
try { try {
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
builder.map(source); builder.map(source);
indexSettings(builder.string(), builder.contentType()); indexSettings(Strings.toString(builder), builder.contentType());
} catch (IOException e) { } catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e); throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e);
} }

@ -23,6 +23,7 @@ import org.elasticsearch.action.FailedNodeException;
import org.elasticsearch.action.support.nodes.BaseNodesResponse; import org.elasticsearch.action.support.nodes.BaseNodesResponse;
import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.cluster.health.ClusterHealthStatus;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.ToXContentFragment;
@ -130,7 +131,7 @@ public class ClusterStatsResponse extends BaseNodesResponse<ClusterStatsNodeResp
builder.startObject(); builder.startObject();
toXContent(builder, EMPTY_PARAMS); toXContent(builder, EMPTY_PARAMS);
builder.endObject(); builder.endObject();
return builder.string(); return Strings.toString(builder);
} catch (IOException e) { } catch (IOException e) {
return "{ \"error\" : \"" + e.getMessage() + "\"}"; return "{ \"error\" : \"" + e.getMessage() + "\"}";
} }

@ -22,6 +22,7 @@ package org.elasticsearch.action.admin.indices.alias;
import org.elasticsearch.ElasticsearchGenerationException; import org.elasticsearch.ElasticsearchGenerationException;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
@ -35,8 +36,8 @@ import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilder;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream;
import java.util.Map; import java.util.Map;
import java.util.Objects;
/** /**
* Represents an alias, to be associated with an index * Represents an alias, to be associated with an index
@ -100,7 +101,7 @@ public class Alias implements Streamable, ToXContentObject {
try { try {
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
builder.map(filter); builder.map(filter);
this.filter = builder.string(); this.filter = Strings.toString(builder);
return this; return this;
} catch (IOException e) { } catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to generate [" + filter + "]", e); throw new ElasticsearchGenerationException("Failed to generate [" + filter + "]", e);
@ -119,7 +120,7 @@ public class Alias implements Streamable, ToXContentObject {
XContentBuilder builder = XContentFactory.jsonBuilder(); XContentBuilder builder = XContentFactory.jsonBuilder();
filterBuilder.toXContent(builder, ToXContent.EMPTY_PARAMS); filterBuilder.toXContent(builder, ToXContent.EMPTY_PARAMS);
builder.close(); builder.close();
this.filter = builder.string(); this.filter = Strings.toString(builder);
return this; return this;
} catch (IOException e) { } catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to build json for alias request", e); throw new ElasticsearchGenerationException("Failed to build json for alias request", e);
@ -227,7 +228,9 @@ public class Alias implements Streamable, ToXContentObject {
builder.startObject(name); builder.startObject(name);
if (filter != null) { if (filter != null) {
builder.rawField(FILTER.getPreferredName(), new BytesArray(filter), XContentType.JSON); try (InputStream stream = new BytesArray(filter).streamInput()) {
builder.rawField(FILTER.getPreferredName(), stream, XContentType.JSON);
}
} }
if (indexRouting != null && indexRouting.equals(searchRouting)) { if (indexRouting != null && indexRouting.equals(searchRouting)) {

@ -44,6 +44,7 @@ import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilder;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.List; import java.util.List;
@ -377,7 +378,7 @@ public class IndicesAliasesRequest extends AcknowledgedRequest<IndicesAliasesReq
try { try {
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
builder.map(filter); builder.map(filter);
this.filter = builder.string(); this.filter = Strings.toString(builder);
return this; return this;
} catch (IOException e) { } catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to generate [" + filter + "]", e); throw new ElasticsearchGenerationException("Failed to generate [" + filter + "]", e);
@ -393,7 +394,7 @@ public class IndicesAliasesRequest extends AcknowledgedRequest<IndicesAliasesReq
XContentBuilder builder = XContentFactory.jsonBuilder(); XContentBuilder builder = XContentFactory.jsonBuilder();
filter.toXContent(builder, ToXContent.EMPTY_PARAMS); filter.toXContent(builder, ToXContent.EMPTY_PARAMS);
builder.close(); builder.close();
this.filter = builder.string(); this.filter = Strings.toString(builder);
return this; return this;
} catch (IOException e) { } catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to build json for alias request", e); throw new ElasticsearchGenerationException("Failed to build json for alias request", e);
@ -432,7 +433,9 @@ public class IndicesAliasesRequest extends AcknowledgedRequest<IndicesAliasesReq
builder.array(ALIASES.getPreferredName(), aliases); builder.array(ALIASES.getPreferredName(), aliases);
} }
if (false == Strings.isEmpty(filter)) { if (false == Strings.isEmpty(filter)) {
builder.rawField(FILTER.getPreferredName(), new BytesArray(filter), XContentType.JSON); try (InputStream stream = new BytesArray(filter).streamInput()) {
builder.rawField(FILTER.getPreferredName(), stream, XContentType.JSON);
}
} }
if (false == Strings.isEmpty(routing)) { if (false == Strings.isEmpty(routing)) {
builder.field(ROUTING.getPreferredName(), routing); builder.field(ROUTING.getPreferredName(), routing);

@ -78,7 +78,7 @@ public class AnalyzeRequest extends SingleShardRequest<AnalyzeRequest> {
try { try {
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
builder.map(definition); builder.map(definition);
this.definition = Settings.builder().loadFromSource(builder.string(), builder.contentType()).build(); this.definition = Settings.builder().loadFromSource(Strings.toString(builder), builder.contentType()).build();
} catch (IOException e) { } catch (IOException e) {
throw new IllegalArgumentException("Failed to parse [" + definition + "]", e); throw new IllegalArgumentException("Failed to parse [" + definition + "]", e);
} }

@ -31,6 +31,7 @@ import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.action.support.master.AcknowledgedRequest;
import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.collect.MapBuilder;
@ -48,6 +49,7 @@ import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream;
import java.io.UncheckedIOException; import java.io.UncheckedIOException;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet; import java.util.HashSet;
@ -180,11 +182,7 @@ public class CreateIndexRequest extends AcknowledgedRequest<CreateIndexRequest>
* Allows to set the settings using a json builder. * Allows to set the settings using a json builder.
*/ */
public CreateIndexRequest settings(XContentBuilder builder) { public CreateIndexRequest settings(XContentBuilder builder) {
try { settings(Strings.toString(builder), builder.contentType());
settings(builder.string(), builder.contentType());
} catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to generate json settings from builder", e);
}
return this; return this;
} }
@ -196,7 +194,7 @@ public class CreateIndexRequest extends AcknowledgedRequest<CreateIndexRequest>
try { try {
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
builder.map(source); builder.map(source);
settings(builder.string(), XContentType.JSON); settings(Strings.toString(builder), XContentType.JSON);
} catch (IOException e) { } catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e); throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e);
} }
@ -249,7 +247,7 @@ public class CreateIndexRequest extends AcknowledgedRequest<CreateIndexRequest>
* @param source The mapping source * @param source The mapping source
*/ */
public CreateIndexRequest mapping(String type, XContentBuilder source) { public CreateIndexRequest mapping(String type, XContentBuilder source) {
return mapping(type, source.bytes(), source.contentType()); return mapping(type, BytesReference.bytes(source), source.contentType());
} }
/** /**
@ -293,7 +291,7 @@ public class CreateIndexRequest extends AcknowledgedRequest<CreateIndexRequest>
try { try {
XContentBuilder builder = XContentFactory.jsonBuilder(); XContentBuilder builder = XContentFactory.jsonBuilder();
builder.map(source); builder.map(source);
return aliases(builder.bytes()); return aliases(BytesReference.bytes(builder));
} catch (IOException e) { } catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e); throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e);
} }
@ -303,7 +301,7 @@ public class CreateIndexRequest extends AcknowledgedRequest<CreateIndexRequest>
* Sets the aliases that will be associated with the index when it gets created * Sets the aliases that will be associated with the index when it gets created
*/ */
public CreateIndexRequest aliases(XContentBuilder source) { public CreateIndexRequest aliases(XContentBuilder source) {
return aliases(source.bytes()); return aliases(BytesReference.bytes(source));
} }
/** /**
@ -350,7 +348,7 @@ public class CreateIndexRequest extends AcknowledgedRequest<CreateIndexRequest>
* Sets the settings and mappings as a single source. * Sets the settings and mappings as a single source.
*/ */
public CreateIndexRequest source(XContentBuilder source) { public CreateIndexRequest source(XContentBuilder source) {
return source(source.bytes(), source.contentType()); return source(BytesReference.bytes(source), source.contentType());
} }
/** /**
@ -536,7 +534,9 @@ public class CreateIndexRequest extends AcknowledgedRequest<CreateIndexRequest>
builder.startObject(MAPPINGS.getPreferredName()); builder.startObject(MAPPINGS.getPreferredName());
for (Map.Entry<String, String> entry : mappings.entrySet()) { for (Map.Entry<String, String> entry : mappings.entrySet()) {
builder.rawField(entry.getKey(), new BytesArray(entry.getValue()), XContentType.JSON); try (InputStream stream = new BytesArray(entry.getValue()).streamInput()) {
builder.rawField(entry.getKey(), stream, XContentType.JSON);
}
} }
builder.endObject(); builder.endObject();

@ -31,6 +31,7 @@ import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.Mapper;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
@ -127,7 +128,9 @@ public class GetFieldMappingsResponse extends ActionResponse implements ToXConte
if (params.paramAsBoolean("pretty", false)) { if (params.paramAsBoolean("pretty", false)) {
builder.field("mapping", sourceAsMap()); builder.field("mapping", sourceAsMap());
} else { } else {
builder.rawField("mapping", source, XContentType.JSON); try (InputStream stream = source.streamInput()) {
builder.rawField("mapping", stream, XContentType.JSON);
}
} }
return builder; return builder;
} }

@ -250,11 +250,7 @@ public class PutMappingRequest extends AcknowledgedRequest<PutMappingRequest> im
* The mapping source definition. * The mapping source definition.
*/ */
public PutMappingRequest source(XContentBuilder mappingBuilder) { public PutMappingRequest source(XContentBuilder mappingBuilder) {
try { return source(Strings.toString(mappingBuilder), mappingBuilder.contentType());
return source(mappingBuilder.string(), mappingBuilder.contentType());
} catch (IOException e) {
throw new IllegalArgumentException("Failed to build json for mapping request", e);
}
} }
/** /**
@ -265,7 +261,7 @@ public class PutMappingRequest extends AcknowledgedRequest<PutMappingRequest> im
try { try {
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
builder.map(mappingSource); builder.map(mappingSource);
return source(builder.string(), XContentType.JSON); return source(Strings.toString(builder), XContentType.JSON);
} catch (IOException e) { } catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to generate [" + mappingSource + "]", e); throw new ElasticsearchGenerationException("Failed to generate [" + mappingSource + "]", e);
} }

@ -24,6 +24,7 @@ import org.elasticsearch.action.ActionRequestValidationException;
import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.IndicesRequest;
import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.action.support.master.AcknowledgedRequest;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.Settings;
@ -153,7 +154,7 @@ public class UpdateSettingsRequest extends AcknowledgedRequest<UpdateSettingsReq
try { try {
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
builder.map(source); builder.map(source);
settings(builder.string(), builder.contentType()); settings(Strings.toString(builder), builder.contentType());
} catch (IOException e) { } catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e); throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e);
} }

@ -22,6 +22,7 @@ package org.elasticsearch.action.admin.indices.stats;
import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.action.support.DefaultShardOperationFailedException;
import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.action.support.broadcast.BroadcastResponse;
import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.ToXContentFragment;
@ -213,7 +214,7 @@ public class IndicesStatsResponse extends BroadcastResponse implements ToXConten
builder.startObject(); builder.startObject();
toXContent(builder, EMPTY_PARAMS); toXContent(builder, EMPTY_PARAMS);
builder.endObject(); builder.endObject();
return builder.string(); return Strings.toString(builder);
} catch (IOException e) { } catch (IOException e) {
return "{ \"error\" : \"" + e.getMessage() + "\"}"; return "{ \"error\" : \"" + e.getMessage() + "\"}";
} }

@ -28,6 +28,7 @@ import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest;
import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.IndicesOptions;
import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.action.support.master.MasterNodeRequest;
import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.collect.MapBuilder;
@ -196,7 +197,7 @@ public class PutIndexTemplateRequest extends MasterNodeRequest<PutIndexTemplateR
try { try {
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
builder.map(source); builder.map(source);
settings(builder.string(), XContentType.JSON); settings(Strings.toString(builder), XContentType.JSON);
} catch (IOException e) { } catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e); throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e);
} }
@ -237,7 +238,7 @@ public class PutIndexTemplateRequest extends MasterNodeRequest<PutIndexTemplateR
* @param source The mapping source * @param source The mapping source
*/ */
public PutIndexTemplateRequest mapping(String type, XContentBuilder source) { public PutIndexTemplateRequest mapping(String type, XContentBuilder source) {
return mapping(type, source.bytes(), source.contentType()); return mapping(type, BytesReference.bytes(source), source.contentType());
} }
/** /**
@ -295,7 +296,7 @@ public class PutIndexTemplateRequest extends MasterNodeRequest<PutIndexTemplateR
*/ */
public PutIndexTemplateRequest source(XContentBuilder templateBuilder) { public PutIndexTemplateRequest source(XContentBuilder templateBuilder) {
try { try {
return source(templateBuilder.bytes(), templateBuilder.contentType()); return source(BytesReference.bytes(templateBuilder), templateBuilder.contentType());
} catch (Exception e) { } catch (Exception e) {
throw new IllegalArgumentException("Failed to build json for template request", e); throw new IllegalArgumentException("Failed to build json for template request", e);
} }
@ -412,7 +413,7 @@ public class PutIndexTemplateRequest extends MasterNodeRequest<PutIndexTemplateR
try { try {
XContentBuilder builder = XContentFactory.jsonBuilder(); XContentBuilder builder = XContentFactory.jsonBuilder();
builder.map(source); builder.map(source);
return aliases(builder.bytes()); return aliases(BytesReference.bytes(builder));
} catch (IOException e) { } catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e); throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e);
} }
@ -422,7 +423,7 @@ public class PutIndexTemplateRequest extends MasterNodeRequest<PutIndexTemplateR
* Sets the aliases that will be associated with the index when it gets created * Sets the aliases that will be associated with the index when it gets created
*/ */
public PutIndexTemplateRequest aliases(XContentBuilder source) { public PutIndexTemplateRequest aliases(XContentBuilder source) {
return aliases(source.bytes()); return aliases(BytesReference.bytes(source));
} }
/** /**

@ -43,7 +43,6 @@ import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.VersionType; import org.elasticsearch.index.VersionType;
import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.shard.ShardId;
@ -75,7 +74,7 @@ import static org.elasticsearch.action.ValidateActions.addValidationError;
public class IndexRequest extends ReplicatedWriteRequest<IndexRequest> implements DocWriteRequest<IndexRequest>, CompositeIndicesRequest { public class IndexRequest extends ReplicatedWriteRequest<IndexRequest> implements DocWriteRequest<IndexRequest>, CompositeIndicesRequest {
/** /**
* Max length of the source document to include into toString() * Max length of the source document to include into string()
* *
* @see ReplicationRequest#createTask * @see ReplicationRequest#createTask
*/ */
@ -332,7 +331,7 @@ public class IndexRequest extends ReplicatedWriteRequest<IndexRequest> implement
* Sets the content source to index. * Sets the content source to index.
*/ */
public IndexRequest source(XContentBuilder sourceBuilder) { public IndexRequest source(XContentBuilder sourceBuilder) {
return source(sourceBuilder.bytes(), sourceBuilder.contentType()); return source(BytesReference.bytes(sourceBuilder), sourceBuilder.contentType());
} }
/** /**

@ -306,7 +306,7 @@ public class MultiSearchRequest extends ActionRequest implements CompositeIndice
xContentBuilder.field("allow_partial_search_results", request.allowPartialSearchResults()); xContentBuilder.field("allow_partial_search_results", request.allowPartialSearchResults());
} }
xContentBuilder.endObject(); xContentBuilder.endObject();
xContentBuilder.bytes().writeTo(output); BytesReference.bytes(xContentBuilder).writeTo(output);
} }
output.write(xContent.streamSeparator()); output.write(xContent.streamSeparator());
try (XContentBuilder xContentBuilder = XContentBuilder.builder(xContent)) { try (XContentBuilder xContentBuilder = XContentBuilder.builder(xContent)) {
@ -316,7 +316,7 @@ public class MultiSearchRequest extends ActionRequest implements CompositeIndice
xContentBuilder.startObject(); xContentBuilder.startObject();
xContentBuilder.endObject(); xContentBuilder.endObject();
} }
xContentBuilder.bytes().writeTo(output); BytesReference.bytes(xContentBuilder).writeTo(output);
} }
output.write(xContent.streamSeparator()); output.write(xContent.streamSeparator());
} }

@ -256,7 +256,7 @@ public class TermVectorsRequest extends SingleShardRequest<TermVectorsRequest> i
* Sets an artificial document from which term vectors are requested for. * Sets an artificial document from which term vectors are requested for.
*/ */
public TermVectorsRequest doc(XContentBuilder documentBuilder) { public TermVectorsRequest doc(XContentBuilder documentBuilder) {
return this.doc(documentBuilder.bytes(), true, documentBuilder.contentType()); return this.doc(BytesReference.bytes(documentBuilder), true, documentBuilder.contentType());
} }
/** /**

@ -259,7 +259,8 @@ public class TermVectorsResponse extends ActionResponse implements ToXContentObj
builder.field(FieldStrings.END_OFFSET, currentEndOffset[i]); builder.field(FieldStrings.END_OFFSET, currentEndOffset[i]);
} }
if (curTerms.hasPayloads() && (currentPayloads[i].length() > 0)) { if (curTerms.hasPayloads() && (currentPayloads[i].length() > 0)) {
builder.field(FieldStrings.PAYLOAD, currentPayloads[i]); BytesRef bytesRef = currentPayloads[i].toBytesRef();
builder.field(FieldStrings.PAYLOAD, bytesRef.bytes, bytesRef.offset, bytesRef.length);
} }
builder.endObject(); builder.endObject();
} }

@ -356,7 +356,7 @@ public class UpdateHelper extends AbstractComponent {
BytesStreamOutput streamOutput = new BytesStreamOutput(initialCapacity); BytesStreamOutput streamOutput = new BytesStreamOutput(initialCapacity);
try (XContentBuilder builder = new XContentBuilder(sourceContentType.xContent(), streamOutput)) { try (XContentBuilder builder = new XContentBuilder(sourceContentType.xContent(), streamOutput)) {
builder.value(value); builder.value(value);
sourceFilteredAsBytes = builder.bytes(); sourceFilteredAsBytes = BytesReference.bytes(builder);
} }
} catch (IOException e) { } catch (IOException e) {
throw new ElasticsearchException("Error filtering source", e); throw new ElasticsearchException("Error filtering source", e);

@ -24,6 +24,7 @@ import org.elasticsearch.cluster.AbstractDiffable;
import org.elasticsearch.cluster.Diff; import org.elasticsearch.cluster.Diff;
import org.elasticsearch.common.Strings; import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
@ -244,7 +245,7 @@ public class AliasMetaData extends AbstractDiffable<AliasMetaData> {
} }
try { try {
XContentBuilder builder = XContentFactory.jsonBuilder().map(filter); XContentBuilder builder = XContentFactory.jsonBuilder().map(filter);
this.filter = new CompressedXContent(builder.bytes()); this.filter = new CompressedXContent(BytesReference.bytes(builder));
return this; return this;
} catch (IOException e) { } catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to build json for alias request", e); throw new ElasticsearchGenerationException("Failed to build json for alias request", e);
@ -252,11 +253,7 @@ public class AliasMetaData extends AbstractDiffable<AliasMetaData> {
} }
public Builder filter(XContentBuilder filterBuilder) { public Builder filter(XContentBuilder filterBuilder) {
try { return filter(Strings.toString(filterBuilder));
return filter(filterBuilder.string());
} catch (IOException e) {
throw new ElasticsearchGenerationException("Failed to build json for alias request", e);
}
} }
public Builder routing(String routing) { public Builder routing(String routing) {

@ -25,6 +25,7 @@ import org.elasticsearch.Version;
import org.elasticsearch.cluster.AbstractDiffable; import org.elasticsearch.cluster.AbstractDiffable;
import org.elasticsearch.cluster.Diff; import org.elasticsearch.cluster.Diff;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.collect.MapBuilder;
@ -459,7 +460,7 @@ public class IndexTemplateMetaData extends AbstractDiffable<IndexTemplateMetaDat
String mappingType = currentFieldName; String mappingType = currentFieldName;
Map<String, Object> mappingSource = Map<String, Object> mappingSource =
MapBuilder.<String, Object>newMapBuilder().put(mappingType, parser.mapOrdered()).map(); MapBuilder.<String, Object>newMapBuilder().put(mappingType, parser.mapOrdered()).map();
builder.putMapping(mappingType, XContentFactory.jsonBuilder().map(mappingSource).string()); builder.putMapping(mappingType, Strings.toString(XContentFactory.jsonBuilder().map(mappingSource)));
} }
} }
} else if ("aliases".equals(currentFieldName)) { } else if ("aliases".equals(currentFieldName)) {
@ -483,7 +484,7 @@ public class IndexTemplateMetaData extends AbstractDiffable<IndexTemplateMetaDat
Map<String, Object> mapping = parser.mapOrdered(); Map<String, Object> mapping = parser.mapOrdered();
if (mapping.size() == 1) { if (mapping.size() == 1) {
String mappingType = mapping.keySet().iterator().next(); String mappingType = mapping.keySet().iterator().next();
String mappingSource = XContentFactory.jsonBuilder().map(mapping).string(); String mappingSource = Strings.toString(XContentFactory.jsonBuilder().map(mapping));
if (mappingSource == null) { if (mappingSource == null) {
// crap, no mapping source, warn? // crap, no mapping source, warn?

@ -23,6 +23,7 @@ import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.cluster.AbstractDiffable; import org.elasticsearch.cluster.AbstractDiffable;
import org.elasticsearch.cluster.Diff; import org.elasticsearch.cluster.Diff;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
@ -99,7 +100,7 @@ public class MappingMetaData extends AbstractDiffable<MappingMetaData> {
public MappingMetaData(String type, Map<String, Object> mapping) throws IOException { public MappingMetaData(String type, Map<String, Object> mapping) throws IOException {
this.type = type; this.type = type;
XContentBuilder mappingBuilder = XContentFactory.jsonBuilder().map(mapping); XContentBuilder mappingBuilder = XContentFactory.jsonBuilder().map(mapping);
this.source = new CompressedXContent(mappingBuilder.bytes()); this.source = new CompressedXContent(BytesReference.bytes(mappingBuilder));
Map<String, Object> withoutType = mapping; Map<String, Object> withoutType = mapping;
if (mapping.size() == 1 && mapping.containsKey(type)) { if (mapping.size() == 1 && mapping.containsKey(type)) {
withoutType = (Map<String, Object>) mapping.get(type); withoutType = (Map<String, Object>) mapping.get(type);

@ -1081,7 +1081,7 @@ public class MetaData implements Iterable<IndexMetaData>, Diffable<MetaData>, To
builder.startObject(); builder.startObject();
toXContent(metaData, builder, ToXContent.EMPTY_PARAMS); toXContent(metaData, builder, ToXContent.EMPTY_PARAMS);
builder.endObject(); builder.endObject();
return builder.string(); return Strings.toString(builder);
} }
public static void toXContent(MetaData metaData, XContentBuilder builder, ToXContent.Params params) throws IOException { public static void toXContent(MetaData metaData, XContentBuilder builder, ToXContent.Params params) throws IOException {

@ -755,6 +755,14 @@ public class Strings {
return toString(toXContent, false, false); return toString(toXContent, false, false);
} }
/**
* Returns a string representation of the builder (only applicable for text based xcontent).
* @param xContentBuilder
*/
public static String toString(XContentBuilder xContentBuilder) {
return BytesReference.bytes(xContentBuilder).utf8ToString();
}
/** /**
* Return a {@link String} that is the json representation of the provided {@link ToXContent}. * Return a {@link String} that is the json representation of the provided {@link ToXContent}.
* Wraps the output into an anonymous object if needed. Allows to control whether the outputted * Wraps the output into an anonymous object if needed. Allows to control whether the outputted
@ -771,7 +779,7 @@ public class Strings {
if (toXContent.isFragment()) { if (toXContent.isFragment()) {
builder.endObject(); builder.endObject();
} }
return builder.string(); return toString(builder);
} catch (IOException e) { } catch (IOException e) {
try { try {
XContentBuilder builder = createBuilder(pretty, human); XContentBuilder builder = createBuilder(pretty, human);
@ -779,7 +787,7 @@ public class Strings {
builder.field("error", "error building toString out of XContent: " + e.getMessage()); builder.field("error", "error building toString out of XContent: " + e.getMessage());
builder.field("stack_trace", ExceptionsHelper.stackTrace(e)); builder.field("stack_trace", ExceptionsHelper.stackTrace(e));
builder.endObject(); builder.endObject();
return builder.string(); return toString(builder);
} catch (IOException e2) { } catch (IOException e2) {
throw new ElasticsearchException("cannot generate error message for deserialization", e); throw new ElasticsearchException("cannot generate error message for deserialization", e);
} }
@ -845,5 +853,4 @@ public class Strings {
return sb.toString(); return sb.toString();
} }
} }
} }

@ -21,8 +21,11 @@ package org.elasticsearch.common.bytes;
import org.apache.lucene.util.Accountable; import org.apache.lucene.util.Accountable;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.BytesRefIterator; import org.apache.lucene.util.BytesRefIterator;
import org.elasticsearch.common.io.stream.BytesStream;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.xcontent.XContentBuilder;
import java.io.ByteArrayOutputStream;
import java.io.EOFException; import java.io.EOFException;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
@ -38,6 +41,20 @@ public abstract class BytesReference implements Accountable, Comparable<BytesRef
private Integer hash = null; // we cache the hash of this reference since it can be quite costly to re-calculated it private Integer hash = null; // we cache the hash of this reference since it can be quite costly to re-calculated it
/**
* Convert an {@link XContentBuilder} into a BytesReference. This method closes the builder,
* so no further fields may be added.
*/
public static BytesReference bytes(XContentBuilder xContentBuilder) {
xContentBuilder.close();
OutputStream stream = xContentBuilder.getOutputStream();
if (stream instanceof ByteArrayOutputStream) {
return new BytesArray(((ByteArrayOutputStream) stream).toByteArray());
} else {
return ((BytesStream) stream).bytes();
}
}
/** /**
* Returns the byte at the specified index. Need to be between 0 and length. * Returns the byte at the specified index. Need to be between 0 and length.
*/ */

@ -19,6 +19,7 @@
package org.elasticsearch.common.document; package org.elasticsearch.common.document;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.io.stream.Streamable;
@ -124,9 +125,13 @@ public class DocumentField implements Streamable, ToXContentFragment, Iterable<O
for (Object value : values) { for (Object value : values) {
// this call doesn't really need to support writing any kind of object. // this call doesn't really need to support writing any kind of object.
// Stored fields values are converted using MappedFieldType#valueForDisplay. // Stored fields values are converted using MappedFieldType#valueForDisplay.
// As a result they can either be Strings, Numbers, Booleans, or BytesReference, that's // As a result they can either be Strings, Numbers, or Booleans, that's
// all. // all.
builder.value(value); if (value instanceof BytesReference) {
builder.binaryValue(((BytesReference) value).toBytesRef());
} else {
builder.value(value);
}
} }
builder.endArray(); builder.endArray();
return builder; return builder;
@ -168,4 +173,4 @@ public class DocumentField implements Streamable, ToXContentFragment, Iterable<O
", values=" + values + ", values=" + values +
'}'; '}';
} }
} }

@ -790,7 +790,7 @@ public class Setting<T> implements ToXContentObject {
builder.startObject(); builder.startObject();
subSettings.toXContent(builder, EMPTY_PARAMS); subSettings.toXContent(builder, EMPTY_PARAMS);
builder.endObject(); builder.endObject();
return builder.string(); return Strings.toString(builder);
} catch (IOException e) { } catch (IOException e) {
throw new RuntimeException(e); throw new RuntimeException(e);
} }
@ -1172,7 +1172,7 @@ public class Setting<T> implements ToXContentObject {
builder.value(element); builder.value(element);
} }
builder.endArray(); builder.endArray();
return builder.string(); return Strings.toString(builder);
} catch (IOException ex) { } catch (IOException ex) {
throw new ElasticsearchException(ex); throw new ElasticsearchException(ex);
} }

@ -1442,7 +1442,7 @@ public final class Settings implements ToXContentFragment {
builder.startObject(); builder.startObject();
toXContent(builder, new MapParams(Collections.singletonMap("flat_settings", "true"))); toXContent(builder, new MapParams(Collections.singletonMap("flat_settings", "true")));
builder.endObject(); builder.endObject();
return builder.string(); return Strings.toString(builder);
} catch (IOException e) { } catch (IOException e) {
throw new UncheckedIOException(e); throw new UncheckedIOException(e);
} }

@ -20,6 +20,7 @@
package org.elasticsearch.common.settings; package org.elasticsearch.common.settings;
import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.Logger;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.inject.Binder; import org.elasticsearch.common.inject.Binder;
import org.elasticsearch.common.inject.Module; import org.elasticsearch.common.inject.Module;
import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.logging.Loggers;
@ -117,7 +118,7 @@ public class SettingsModule implements Module {
xContentBuilder.startObject(); xContentBuilder.startObject();
indexSettings.toXContent(xContentBuilder, new ToXContent.MapParams(Collections.singletonMap("flat_settings", "true"))); indexSettings.toXContent(xContentBuilder, new ToXContent.MapParams(Collections.singletonMap("flat_settings", "true")));
xContentBuilder.endObject(); xContentBuilder.endObject();
builder.append(xContentBuilder.string()); builder.append(Strings.toString(xContentBuilder));
} }
builder.append("'"); builder.append("'");
builder.append(System.lineSeparator()); builder.append(System.lineSeparator());

@ -219,7 +219,7 @@ public abstract class AbstractObjectParser<Value, Context>
try (XContentBuilder builder = JsonXContent.contentBuilder()) { try (XContentBuilder builder = JsonXContent.contentBuilder()) {
builder.prettyPrint(); builder.prettyPrint();
builder.copyCurrentStructure(p); builder.copyCurrentStructure(p);
return builder.bytes(); return BytesReference.bytes(builder);
} }
}; };
declareField(consumer, bytesParser, field, ValueType.OBJECT); declareField(consumer, bytesParser, field, ValueType.OBJECT);

@ -20,10 +20,7 @@
package org.elasticsearch.common.xcontent; package org.elasticsearch.common.xcontent;
import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.GeoPoint;
import org.elasticsearch.common.io.stream.BytesStream;
import org.elasticsearch.common.lease.Releasable; import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.text.Text; import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.ByteSizeValue;
@ -173,6 +170,13 @@ public final class XContentBuilder implements Releasable, Flushable {
return generator.contentType(); return generator.contentType();
} }
/**
* @return the output stream to which the built object is being written. Note that is dangerous to modify the stream.
*/
public OutputStream getOutputStream() {
return bos;
}
public XContentBuilder prettyPrint() { public XContentBuilder prettyPrint() {
generator.usePrettyPrint(); generator.usePrettyPrint();
return this; return this;
@ -626,24 +630,6 @@ public final class XContentBuilder implements Releasable, Flushable {
return this; return this;
} }
/**
* Writes the binary content of the given {@link BytesReference}.
*
* Use {@link org.elasticsearch.common.xcontent.XContentParser#binaryValue()} to read the value back
*/
public XContentBuilder field(String name, BytesReference value) throws IOException {
return field(name).value(value);
}
/**
* Writes the binary content of the given {@link BytesReference}.
*
* Use {@link org.elasticsearch.common.xcontent.XContentParser#binaryValue()} to read the value back
*/
public XContentBuilder value(BytesReference value) throws IOException {
return (value == null) ? nullValue() : binaryValue(value.toBytesRef());
}
//////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////
// Text // Text
////////////////////////////////// //////////////////////////////////
@ -810,8 +796,6 @@ public final class XContentBuilder implements Releasable, Flushable {
value((Calendar) value); value((Calendar) value);
} else if (value instanceof ReadableInstant) { } else if (value instanceof ReadableInstant) {
value((ReadableInstant) value); value((ReadableInstant) value);
} else if (value instanceof BytesReference) {
value((BytesReference) value);
} else if (value instanceof ToXContent) { } else if (value instanceof ToXContent) {
value((ToXContent) value); value((ToXContent) value);
} else { } else {
@ -982,28 +966,6 @@ public final class XContentBuilder implements Releasable, Flushable {
return this; return this;
} }
/**
* Writes a raw field with the given bytes as the value
* @deprecated use {@link #rawField(String name, BytesReference, XContentType)} to avoid content type auto-detection
*/
@Deprecated
public XContentBuilder rawField(String name, BytesReference value) throws IOException {
try (InputStream stream = value.streamInput()) {
generator.writeRawField(name, stream);
}
return this;
}
/**
* Writes a raw field with the given bytes as the value
*/
public XContentBuilder rawField(String name, BytesReference value, XContentType contentType) throws IOException {
try (InputStream stream = value.streamInput()) {
generator.writeRawField(name, stream, contentType);
}
return this;
}
/** /**
* Writes a value with the source coming directly from the bytes in the stream * Writes a value with the source coming directly from the bytes in the stream
*/ */
@ -1035,22 +997,6 @@ public final class XContentBuilder implements Releasable, Flushable {
return this.generator; return this.generator;
} }
public BytesReference bytes() {
close();
if (bos instanceof ByteArrayOutputStream) {
return new BytesArray(((ByteArrayOutputStream) bos).toByteArray());
} else {
return ((BytesStream) bos).bytes();
}
}
/**
* Returns a string representation of the builder (only applicable for text based xcontent).
*/
public String string() throws IOException {
return bytes().utf8ToString();
}
static void ensureNameNotNull(String name) { static void ensureNameNotNull(String name) {
ensureNotNull(name, "Field name cannot be null"); ensureNotNull(name, "Field name cannot be null");
} }

@ -20,6 +20,7 @@
package org.elasticsearch.common.xcontent; package org.elasticsearch.common.xcontent;
import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.compress.Compressor; import org.elasticsearch.common.compress.Compressor;
@ -174,7 +175,7 @@ public class XContentHelper {
builder.prettyPrint(); builder.prettyPrint();
} }
builder.copyCurrentStructure(parser); builder.copyCurrentStructure(parser);
return builder.string(); return Strings.toString(builder);
} }
} }
@ -371,7 +372,7 @@ public class XContentHelper {
/** /**
* Writes a "raw" (bytes) field, handling cases where the bytes are compressed, and tries to optimize writing using * Writes a "raw" (bytes) field, handling cases where the bytes are compressed, and tries to optimize writing using
* {@link XContentBuilder#rawField(String, org.elasticsearch.common.bytes.BytesReference)}. * {@link XContentBuilder#rawField(String, InputStream)}.
* @deprecated use {@link #writeRawField(String, BytesReference, XContentType, XContentBuilder, Params)} to avoid content type * @deprecated use {@link #writeRawField(String, BytesReference, XContentType, XContentBuilder, Params)} to avoid content type
* auto-detection * auto-detection
*/ */
@ -383,13 +384,15 @@ public class XContentHelper {
builder.rawField(field, compressedStreamInput); builder.rawField(field, compressedStreamInput);
} }
} else { } else {
builder.rawField(field, source); try (InputStream stream = source.streamInput()) {
builder.rawField(field, stream);
}
} }
} }
/** /**
* Writes a "raw" (bytes) field, handling cases where the bytes are compressed, and tries to optimize writing using * Writes a "raw" (bytes) field, handling cases where the bytes are compressed, and tries to optimize writing using
* {@link XContentBuilder#rawField(String, org.elasticsearch.common.bytes.BytesReference, XContentType)}. * {@link XContentBuilder#rawField(String, InputStream, XContentType)}.
*/ */
public static void writeRawField(String field, BytesReference source, XContentType xContentType, XContentBuilder builder, public static void writeRawField(String field, BytesReference source, XContentType xContentType, XContentBuilder builder,
ToXContent.Params params) throws IOException { ToXContent.Params params) throws IOException {
@ -400,7 +403,9 @@ public class XContentHelper {
builder.rawField(field, compressedStreamInput, xContentType); builder.rawField(field, compressedStreamInput, xContentType);
} }
} else { } else {
builder.rawField(field, source, xContentType); try (InputStream stream = source.streamInput()) {
builder.rawField(field, stream, xContentType);
}
} }
} }
@ -428,7 +433,7 @@ public class XContentHelper {
if (toXContent.isFragment()) { if (toXContent.isFragment()) {
builder.endObject(); builder.endObject();
} }
return builder.bytes(); return BytesReference.bytes(builder);
} }
} }
} }

@ -229,7 +229,6 @@ public interface XContentParser extends Closeable {
* *
* <ul> * <ul>
* <li>{@link XContentBuilder#field(String, org.apache.lucene.util.BytesRef)}</li> * <li>{@link XContentBuilder#field(String, org.apache.lucene.util.BytesRef)}</li>
* <li>{@link XContentBuilder#field(String, org.elasticsearch.common.bytes.BytesReference)}</li>
* <li>{@link XContentBuilder#field(String, byte[], int, int)}}</li> * <li>{@link XContentBuilder#field(String, byte[], int, int)}}</li>
* <li>{@link XContentBuilder#field(String, byte[])}}</li> * <li>{@link XContentBuilder#field(String, byte[])}}</li>
* </ul> * </ul>

@ -304,7 +304,7 @@ public class GetResult implements Streamable, Iterable<DocumentField>, ToXConten
//the original document gets slightly modified: whitespaces or pretty printing are not preserved, //the original document gets slightly modified: whitespaces or pretty printing are not preserved,
//it all depends on the current builder settings //it all depends on the current builder settings
builder.copyCurrentStructure(parser); builder.copyCurrentStructure(parser);
source = builder.bytes(); source = BytesReference.bytes(builder);
} }
} else if (FIELDS.equals(currentFieldName)) { } else if (FIELDS.equals(currentFieldName)) {
while(parser.nextToken() != XContentParser.Token.END_OBJECT) { while(parser.nextToken() != XContentParser.Token.END_OBJECT) {

@ -227,7 +227,7 @@ public final class ShardGetService extends AbstractIndexShardComponent {
sourceAsMap = typeMapTuple.v2(); sourceAsMap = typeMapTuple.v2();
sourceAsMap = XContentMapValues.filter(sourceAsMap, fetchSourceContext.includes(), fetchSourceContext.excludes()); sourceAsMap = XContentMapValues.filter(sourceAsMap, fetchSourceContext.includes(), fetchSourceContext.excludes());
try { try {
source = XContentFactory.contentBuilder(sourceContentType).map(sourceAsMap).bytes(); source = BytesReference.bytes(XContentFactory.contentBuilder(sourceContentType).map(sourceAsMap));
} catch (IOException e) { } catch (IOException e) {
throw new ElasticsearchException("Failed to get type [" + type + "] and id [" + id + "] with includes/excludes set", e); throw new ElasticsearchException("Failed to get type [" + type + "] and id [" + id + "] with includes/excludes set", e);
} }

@ -31,6 +31,7 @@ import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MappingMetaData; import org.elasticsearch.cluster.metadata.MappingMetaData;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.logging.Loggers;
@ -256,7 +257,7 @@ public class MapperService extends AbstractIndexComponent implements Closeable {
Map<String, CompressedXContent> mappingSourcesCompressed = new LinkedHashMap<>(mappings.size()); Map<String, CompressedXContent> mappingSourcesCompressed = new LinkedHashMap<>(mappings.size());
for (Map.Entry<String, Map<String, Object>> entry : mappings.entrySet()) { for (Map.Entry<String, Map<String, Object>> entry : mappings.entrySet()) {
try { try {
mappingSourcesCompressed.put(entry.getKey(), new CompressedXContent(XContentFactory.jsonBuilder().map(entry.getValue()).string())); mappingSourcesCompressed.put(entry.getKey(), new CompressedXContent(Strings.toString(XContentFactory.jsonBuilder().map(entry.getValue()))));
} catch (Exception e) { } catch (Exception e) {
throw new MapperParsingException("Failed to parse mapping [{}]: {}", e, entry.getKey(), e.getMessage()); throw new MapperParsingException("Failed to parse mapping [{}]: {}", e, entry.getKey(), e.getMessage());
} }

@ -20,6 +20,7 @@
package org.elasticsearch.index.mapper; package org.elasticsearch.index.mapper;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.ToXContentFragment;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
@ -146,7 +147,7 @@ public final class Mapping implements ToXContentFragment {
try { try {
XContentBuilder builder = XContentFactory.jsonBuilder().startObject(); XContentBuilder builder = XContentFactory.jsonBuilder().startObject();
toXContent(builder, new ToXContent.MapParams(emptyMap())); toXContent(builder, new ToXContent.MapParams(emptyMap()));
return builder.endObject().string(); return Strings.toString(builder.endObject());
} catch (IOException bogus) { } catch (IOException bogus) {
throw new UncheckedIOException(bogus); throw new UncheckedIOException(bogus);
} }

@ -57,6 +57,7 @@ import org.elasticsearch.index.mapper.TextFieldMapper.TextFieldType;
import org.elasticsearch.index.mapper.UidFieldMapper; import org.elasticsearch.index.mapper.UidFieldMapper;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.HashSet; import java.util.HashSet;
@ -208,7 +209,7 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
} }
this.index = index; this.index = index;
this.type = type; this.type = type;
this.doc = doc.bytes(); this.doc = BytesReference.bytes(doc);
this.xContentType = doc.contentType(); this.xContentType = doc.contentType();
} }
@ -373,7 +374,7 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
} else if (ID.match(currentFieldName, parser.getDeprecationHandler())) { } else if (ID.match(currentFieldName, parser.getDeprecationHandler())) {
item.id = parser.text(); item.id = parser.text();
} else if (DOC.match(currentFieldName, parser.getDeprecationHandler())) { } else if (DOC.match(currentFieldName, parser.getDeprecationHandler())) {
item.doc = jsonBuilder().copyCurrentStructure(parser).bytes(); item.doc = BytesReference.bytes(jsonBuilder().copyCurrentStructure(parser));
item.xContentType = XContentType.JSON; item.xContentType = XContentType.JSON;
} else if (FIELDS.match(currentFieldName, parser.getDeprecationHandler())) { } else if (FIELDS.match(currentFieldName, parser.getDeprecationHandler())) {
if (token == XContentParser.Token.START_ARRAY) { if (token == XContentParser.Token.START_ARRAY) {
@ -424,7 +425,9 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
builder.field(ID.getPreferredName(), this.id); builder.field(ID.getPreferredName(), this.id);
} }
if (this.doc != null) { if (this.doc != null) {
builder.rawField(DOC.getPreferredName(), this.doc, xContentType); try (InputStream stream = this.doc.streamInput()) {
builder.rawField(DOC.getPreferredName(), stream, xContentType);
}
} }
if (this.fields != null) { if (this.fields != null) {
builder.array(FIELDS.getPreferredName(), this.fields); builder.array(FIELDS.getPreferredName(), this.fields);
@ -450,7 +453,7 @@ public class MoreLikeThisQueryBuilder extends AbstractQueryBuilder<MoreLikeThisQ
XContentBuilder builder = XContentFactory.jsonBuilder(); XContentBuilder builder = XContentFactory.jsonBuilder();
builder.prettyPrint(); builder.prettyPrint();
toXContent(builder, EMPTY_PARAMS); toXContent(builder, EMPTY_PARAMS);
return builder.string(); return Strings.toString(builder);
} catch (Exception e) { } catch (Exception e) {
return "{ \"error\" : \"" + ExceptionsHelper.detailedMessage(e) + "\"}"; return "{ \"error\" : \"" + ExceptionsHelper.detailedMessage(e) + "\"}";
} }

@ -104,7 +104,7 @@ public abstract class DecayFunctionBuilder<DFB extends DecayFunctionBuilder<DFB>
} }
builder.field(DECAY, decay); builder.field(DECAY, decay);
builder.endObject(); builder.endObject();
this.functionBytes = builder.bytes(); this.functionBytes = BytesReference.bytes(builder);
} catch (IOException e) { } catch (IOException e) {
throw new IllegalArgumentException("unable to build inner function object",e); throw new IllegalArgumentException("unable to build inner function object",e);
} }
@ -149,7 +149,9 @@ public abstract class DecayFunctionBuilder<DFB extends DecayFunctionBuilder<DFB>
@Override @Override
public void doXContent(XContentBuilder builder, Params params) throws IOException { public void doXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(getName()); builder.startObject(getName());
builder.rawField(fieldName, functionBytes); try (InputStream stream = functionBytes.streamInput()) {
builder.rawField(fieldName, stream);
}
builder.field(DecayFunctionParser.MULTI_VALUE_MODE.getPreferredName(), multiValueMode.name()); builder.field(DecayFunctionParser.MULTI_VALUE_MODE.getPreferredName(), multiValueMode.name());
builder.endObject(); builder.endObject();
} }

@ -109,7 +109,7 @@ public final class DecayFunctionParser<DFB extends DecayFunctionBuilder<DFB>> im
fieldName = currentFieldName; fieldName = currentFieldName;
XContentBuilder builder = XContentFactory.jsonBuilder(); XContentBuilder builder = XContentFactory.jsonBuilder();
builder.copyCurrentStructure(parser); builder.copyCurrentStructure(parser);
functionBytes = builder.bytes(); functionBytes = BytesReference.bytes(builder);
} else if (MULTI_VALUE_MODE.match(currentFieldName, parser.getDeprecationHandler())) { } else if (MULTI_VALUE_MODE.match(currentFieldName, parser.getDeprecationHandler())) {
multiValueMode = MultiValueMode.fromString(parser.text()); multiValueMode = MultiValueMode.fromString(parser.text());
} else { } else {

@ -23,6 +23,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.routing.RecoverySource; import org.elasticsearch.cluster.routing.RecoverySource;
import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Streamable; import org.elasticsearch.common.io.stream.Streamable;
@ -936,7 +937,7 @@ public class RecoveryState implements ToXContentFragment, Streamable {
builder.startObject(); builder.startObject();
toXContent(builder, EMPTY_PARAMS); toXContent(builder, EMPTY_PARAMS);
builder.endObject(); builder.endObject();
return builder.string(); return Strings.toString(builder);
} catch (IOException e) { } catch (IOException e) {
return "{ \"error\" : \"" + e.getMessage() + "\"}"; return "{ \"error\" : \"" + e.getMessage() + "\"}";
} }

@ -28,7 +28,6 @@ import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ContextParser; import org.elasticsearch.common.xcontent.ContextParser;
import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ObjectParser;
import org.elasticsearch.common.xcontent.ToXContent.Params;
import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentFactory;
@ -50,7 +49,7 @@ public final class PipelineConfiguration extends AbstractDiffable<PipelineConfig
PARSER.declareField((parser, builder, aVoid) -> { PARSER.declareField((parser, builder, aVoid) -> {
XContentBuilder contentBuilder = XContentBuilder.builder(parser.contentType().xContent()); XContentBuilder contentBuilder = XContentBuilder.builder(parser.contentType().xContent());
XContentHelper.copyCurrentStructure(contentBuilder.generator(), parser); XContentHelper.copyCurrentStructure(contentBuilder.generator(), parser);
builder.setConfig(contentBuilder.bytes(), contentBuilder.contentType()); builder.setConfig(BytesReference.bytes(contentBuilder), contentBuilder.contentType());
}, new ParseField("config"), ObjectParser.ValueType.OBJECT); }, new ParseField("config"), ObjectParser.ValueType.OBJECT);
} }

@ -54,7 +54,7 @@ public class BytesRestResponse extends RestResponse {
* Creates a new response based on {@link XContentBuilder}. * Creates a new response based on {@link XContentBuilder}.
*/ */
public BytesRestResponse(RestStatus status, XContentBuilder builder) { public BytesRestResponse(RestStatus status, XContentBuilder builder) {
this(status, builder.contentType().mediaType(), builder.bytes()); this(status, builder.contentType().mediaType(), BytesReference.bytes(builder));
} }
/** /**
@ -94,7 +94,7 @@ public class BytesRestResponse extends RestResponse {
public BytesRestResponse(RestChannel channel, RestStatus status, Exception e) throws IOException { public BytesRestResponse(RestChannel channel, RestStatus status, Exception e) throws IOException {
this.status = status; this.status = status;
try (XContentBuilder builder = build(channel, status, e)) { try (XContentBuilder builder = build(channel, status, e)) {
this.content = builder.bytes(); this.content = BytesReference.bytes(builder);
this.contentType = builder.contentType().mediaType(); this.contentType = builder.contentType().mediaType();
} }
if (e instanceof ElasticsearchException) { if (e instanceof ElasticsearchException) {

@ -21,7 +21,9 @@ package org.elasticsearch.script;
import org.elasticsearch.Version; import org.elasticsearch.Version;
import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.io.stream.Writeable;
@ -159,7 +161,7 @@ public final class Script implements ToXContentObject, Writeable {
if (parser.currentToken() == Token.START_OBJECT) { if (parser.currentToken() == Token.START_OBJECT) {
//this is really for search templates, that need to be converted to json format //this is really for search templates, that need to be converted to json format
XContentBuilder builder = XContentFactory.jsonBuilder(); XContentBuilder builder = XContentFactory.jsonBuilder();
idOrCode = builder.copyCurrentStructure(parser).string(); idOrCode = Strings.toString(builder.copyCurrentStructure(parser));
options.put(CONTENT_TYPE_OPTION, XContentType.JSON.mediaType()); options.put(CONTENT_TYPE_OPTION, XContentType.JSON.mediaType());
} else { } else {
idOrCode = parser.text(); idOrCode = parser.text();
@ -283,7 +285,7 @@ public final class Script implements ToXContentObject, Writeable {
builder.startObject(); builder.startObject();
settings.toXContent(builder, ToXContent.EMPTY_PARAMS); settings.toXContent(builder, ToXContent.EMPTY_PARAMS);
builder.endObject(); builder.endObject();
try (InputStream stream = builder.bytes().streamInput(); try (InputStream stream = BytesReference.bytes(builder).streamInput();
XContentParser parser = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, XContentParser parser = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY,
LoggingDeprecationHandler.INSTANCE, stream)) { LoggingDeprecationHandler.INSTANCE, stream)) {
return parse(parser); return parse(parser);
@ -639,7 +641,9 @@ public final class Script implements ToXContentObject, Writeable {
if (type == ScriptType.INLINE) { if (type == ScriptType.INLINE) {
if (contentType != null && builder.contentType().mediaType().equals(contentType)) { if (contentType != null && builder.contentType().mediaType().equals(contentType)) {
builder.rawField(SOURCE_PARSE_FIELD.getPreferredName(), new BytesArray(idOrCode)); try (InputStream stream = new BytesArray(idOrCode).streamInput()) {
builder.rawField(SOURCE_PARSE_FIELD.getPreferredName(), stream);
}
} else { } else {
builder.field(SOURCE_PARSE_FIELD.getPreferredName(), idOrCode); builder.field(SOURCE_PARSE_FIELD.getPreferredName(), idOrCode);
} }

@ -26,6 +26,7 @@ import java.util.List;
import java.util.Objects; import java.util.Objects;
import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContent;
@ -49,11 +50,11 @@ public class ScriptException extends ElasticsearchException {
private final List<String> scriptStack; private final List<String> scriptStack;
private final String script; private final String script;
private final String lang; private final String lang;
/** /**
* Create a new ScriptException. * Create a new ScriptException.
* @param message A short and simple summary of what happened, such as "compile error". * @param message A short and simple summary of what happened, such as "compile error".
* Must not be {@code null}. * Must not be {@code null}.
* @param cause The underlying cause of the exception. Must not be {@code null}. * @param cause The underlying cause of the exception. Must not be {@code null}.
* @param scriptStack An implementation-specific "stacktrace" for the error in the script. * @param scriptStack An implementation-specific "stacktrace" for the error in the script.
* Must not be {@code null}, but can be empty (though this should be avoided if possible). * Must not be {@code null}, but can be empty (though this should be avoided if possible).
@ -85,7 +86,7 @@ public class ScriptException extends ElasticsearchException {
out.writeString(script); out.writeString(script);
out.writeString(lang); out.writeString(lang);
} }
@Override @Override
protected void metadataToXContent(XContentBuilder builder, Params params) throws IOException { protected void metadataToXContent(XContentBuilder builder, Params params) throws IOException {
builder.field("script_stack", scriptStack); builder.field("script_stack", scriptStack);
@ -100,7 +101,7 @@ public class ScriptException extends ElasticsearchException {
public List<String> getScriptStack() { public List<String> getScriptStack() {
return scriptStack; return scriptStack;
} }
/** /**
* Returns the identifier for which script. * Returns the identifier for which script.
* @return script's name or source text that identifies the script. * @return script's name or source text that identifies the script.
@ -108,7 +109,7 @@ public class ScriptException extends ElasticsearchException {
public String getScript() { public String getScript() {
return script; return script;
} }
/** /**
* Returns the language of the script. * Returns the language of the script.
* @return the {@code lang} parameter of the scripting engine. * @return the {@code lang} parameter of the scripting engine.
@ -117,7 +118,7 @@ public class ScriptException extends ElasticsearchException {
return lang; return lang;
} }
/** /**
* Returns a JSON version of this exception for debugging. * Returns a JSON version of this exception for debugging.
*/ */
public String toJsonString() { public String toJsonString() {
@ -126,7 +127,7 @@ public class ScriptException extends ElasticsearchException {
json.startObject(); json.startObject();
toXContent(json, ToXContent.EMPTY_PARAMS); toXContent(json, ToXContent.EMPTY_PARAMS);
json.endObject(); json.endObject();
return json.string(); return Strings.toString(json);
} catch (IOException e) { } catch (IOException e) {
throw new RuntimeException(e); throw new RuntimeException(e);
} }

Some files were not shown because too many files have changed in this diff Show More